1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "reg_alloc_lsra.h"
17 #include "loop.h"
18 #include <queue>
19
20 namespace maplebe {
21 namespace {
22 constexpr uint32 kSpilled = 1;
23 constexpr uint32 kMinLiveIntervalLength = 20;
24 constexpr uint32 kPrintedActiveListLength = 10;
25 /* Here, kLoopWeight is a fine-tuned empirical parameter */
26 constexpr uint32 kLoopWeight = 4;
27 } // namespace
28
29 /*
30 * This LSRA implementation is an interpretation of the [Poletto97] paper.
31 * BFS BB ordering is used to order the instructions. The live intervals are vased on
32 * this instruction order. All vreg defines should come before an use, else a warning is
33 * given.
34 * Live interval is traversed in order from lower instruction order to higher order.
35 * When encountering a live interval for the first time, it is assumed to be live and placed
36 * inside the 'active' structure until the vreg's last access. During the time a vreg
37 * is in 'active', the vreg occupies a physical register allocation and no other vreg can
38 * be allocated the same physical register.
39 */
PrintRegSet(const MapleSet<uint32> & set,const std::string & str) const40 void LSRALinearScanRegAllocator::PrintRegSet(const MapleSet<uint32> &set, const std::string &str) const
41 {
42 LogInfo::MapleLogger() << str;
43 for (auto reg : set) {
44 LogInfo::MapleLogger() << " " << reg;
45 }
46 LogInfo::MapleLogger() << "\n";
47 }
48
CheckForReg(Operand & opnd,const Insn & insn,const LiveInterval & li,regno_t regNO,bool isDef) const49 bool LSRALinearScanRegAllocator::CheckForReg(Operand &opnd, const Insn &insn, const LiveInterval &li, regno_t regNO,
50 bool isDef) const
51 {
52 if (!opnd.IsRegister()) {
53 return false;
54 }
55 auto ®Opnd = static_cast<RegOperand &>(opnd);
56 if (regOpnd.GetRegisterType() == kRegTyCc || regOpnd.GetRegisterType() == kRegTyVary) {
57 return false;
58 }
59 if (regOpnd.GetRegisterNumber() == regNO) {
60 LogInfo::MapleLogger() << "set object circle at " << insn.GetId() << "," << li.GetRegNO()
61 << " size 5 fillcolor rgb \"";
62 if (isDef) {
63 LogInfo::MapleLogger() << "black\"\n";
64 } else {
65 LogInfo::MapleLogger() << "orange\"\n";
66 }
67 }
68 return true;
69 }
70
PrintLiveRanges(const LiveInterval & li) const71 void LSRALinearScanRegAllocator::PrintLiveRanges(const LiveInterval &li) const
72 {
73 if (li.GetAssignedReg() != 0) {
74 uint32 base = (li.GetRegType() == kRegTyInt) ? firstIntReg : firstFpReg;
75 char regType = (li.GetRegType() == kRegTyInt) ? 'R' : 'V';
76 LogInfo::MapleLogger() << "(assigned " << regType << (li.GetAssignedReg() - base) << ")";
77 }
78 if (li.GetStackSlot() == kSpilled) {
79 LogInfo::MapleLogger() << "(spill)";
80 }
81 if (li.IsShouldSave()) {
82 LogInfo::MapleLogger() << "(ShouldSave)";
83 }
84 for (auto range : li.GetRanges()) {
85 LogInfo::MapleLogger() << "[" << range.GetStart() << ", " << range.GetEnd() << "]"
86 << " ";
87 }
88 if (li.GetSplitNext() != nullptr) {
89 LogInfo::MapleLogger() << "### SPLIT ### ";
90 PrintLiveRanges(*li.GetSplitNext());
91 }
92 LogInfo::MapleLogger() << "\n";
93 }
94
PrintAllLiveRanges() const95 void LSRALinearScanRegAllocator::PrintAllLiveRanges() const
96 {
97 LogInfo::MapleLogger() << "func: " << cgFunc->GetName() << "\n";
98 for (auto *li : liveIntervalsArray) {
99 if (li == nullptr || li->GetRegNO() == 0) {
100 continue;
101 }
102 LogInfo::MapleLogger() << "vreg" << li->GetRegNO() << ": ";
103 if (li->GetSplitParent() != nullptr) {
104 PrintLiveRanges(*li->GetSplitParent());
105 } else {
106 PrintLiveRanges(*li);
107 }
108 }
109 }
110
111 /*
112 * This is a support routine to compute the overlapping live intervals in graph form.
113 * The output file can be viewed by gnuplot.
114 * Despite the function name of LiveRanges, it is using live intervals.
115 */
PrintLiveRangesGraph() const116 void LSRALinearScanRegAllocator::PrintLiveRangesGraph() const
117 {
118 /* ================= Output to plot.pg =============== */
119 std::ofstream out("plot.pg");
120 CHECK_FATAL(out.is_open(), "Failed to open output file: plot.pg");
121 std::streambuf *coutBuf = LogInfo::MapleLogger().rdbuf(); /* old buf */
122 LogInfo::MapleLogger().rdbuf(out.rdbuf()); /* new buf */
123
124 LogInfo::MapleLogger() << "#!/usr/bin/gnuplot\n";
125 LogInfo::MapleLogger() << "#maxInsnNum " << maxInsnNum << "\n";
126 LogInfo::MapleLogger() << "#minVregNum " << minVregNum << "\n";
127 LogInfo::MapleLogger() << "#maxVregNum " << maxVregNum << "\n";
128 LogInfo::MapleLogger() << "reset\nset terminal png\n";
129 LogInfo::MapleLogger() << "set xrange [1:" << maxInsnNum << "]\n";
130 LogInfo::MapleLogger() << "set grid\nset style data linespoints\n";
131 LogInfo::MapleLogger() << "set datafile missing '0'\n";
132 std::vector<std::vector<uint32>> graph(maxVregNum, std::vector<uint32>(maxInsnNum, 0));
133
134 uint32 minY = 0xFFFFFFFF;
135 uint32 maxY = 0;
136 for (auto *li : liveIntervalsArray) {
137 if (li == nullptr || li->GetRegNO() == 0) {
138 continue;
139 }
140 uint32 regNO = li->GetRegNO();
141 if ((li->GetLastUse() - li->GetFirstDef()) < kMinLiveIntervalLength) {
142 continue;
143 }
144 if (regNO < minY) {
145 minY = regNO;
146 }
147 if (regNO > maxY) {
148 maxY = regNO;
149 }
150 uint32 n;
151 CHECK_FATAL(li->GetFirstDef() > 0, "must not be zero");
152 for (n = 0; n <= (li->GetFirstDef() - 1); ++n) {
153 graph[regNO - minVregNum][n] = 0;
154 }
155 if (li->GetLastUse() >= n) {
156 for (; n <= (li->GetLastUse() - 1); ++n) {
157 graph[regNO - minVregNum][n] = regNO;
158 }
159 }
160 for (; n < maxInsnNum; ++n) {
161 graph[regNO - minVregNum][n] = 0;
162 }
163
164 for (auto *bb : bfs->sortedBBs) {
165 FOR_BB_INSNS(insn, bb)
166 {
167 const InsnDesc *md = insn->GetDesc();
168 uint32 opndNum = insn->GetOperandSize();
169 for (uint32 iSecond = 0; iSecond < opndNum; ++iSecond) {
170 Operand &opnd = insn->GetOperand(iSecond);
171 const OpndDesc *regProp = md->GetOpndDes(iSecond);
172 DEBUG_ASSERT(regProp != nullptr,
173 "pointer is null in LSRALinearScanRegAllocator::PrintLiveRangesGraph");
174 bool isDef = regProp->IsRegDef();
175 if (opnd.IsList()) {
176 auto &listOpnd = static_cast<ListOperand &>(opnd);
177 for (auto op : listOpnd.GetOperands()) {
178 (void)CheckForReg(*op, *insn, *li, regNO, isDef);
179 }
180 } else if (opnd.IsMemoryAccessOperand()) {
181 auto &memOpnd = static_cast<MemOperand &>(opnd);
182 Operand *base = memOpnd.GetBaseRegister();
183 Operand *offset = memOpnd.GetIndexRegister();
184 if (base != nullptr && !CheckForReg(*base, *insn, *li, regNO, false)) {
185 continue;
186 }
187 if (offset != nullptr && !CheckForReg(*offset, *insn, *li, regNO, false)) {
188 continue;
189 }
190 } else {
191 (void)CheckForReg(opnd, *insn, *li, regNO, isDef);
192 }
193 }
194 }
195 }
196 }
197 LogInfo::MapleLogger() << "set yrange [" << (minY - 1) << ":" << (maxY + 1) << "]\n";
198
199 LogInfo::MapleLogger() << "plot \"plot.dat\" using 1:2 title \"R" << minVregNum << "\"";
200 for (uint32 i = 1; i < ((maxVregNum - minVregNum) + 1); ++i) {
201 LogInfo::MapleLogger() << ", \\\n\t\"\" using 1:" << (i + kDivide2) << " title \"R" << (minVregNum + i) << "\"";
202 }
203 LogInfo::MapleLogger() << ";\n";
204
205 /* ================= Output to plot.dat =============== */
206 std::ofstream out2("plot.dat");
207 CHECK_FATAL(out2.is_open(), "Failed to open output file: plot.dat");
208 LogInfo::MapleLogger().rdbuf(out2.rdbuf()); /* new buf */
209 LogInfo::MapleLogger() << "##reg";
210 for (uint32 i = minVregNum; i <= maxVregNum; ++i) {
211 LogInfo::MapleLogger() << " R" << i;
212 }
213 LogInfo::MapleLogger() << "\n";
214 for (uint32 n = 0; n < maxInsnNum; ++n) {
215 LogInfo::MapleLogger() << (n + 1);
216 for (uint32 i = minVregNum; i <= maxVregNum; ++i) {
217 LogInfo::MapleLogger() << " " << graph[i - minVregNum][n];
218 }
219 LogInfo::MapleLogger() << "\n";
220 }
221 LogInfo::MapleLogger().rdbuf(coutBuf);
222 }
223
SpillStackMapInfo()224 void LSRALinearScanRegAllocator::SpillStackMapInfo()
225 {
226 RA_TIMER_REGISTER(lsra, "LSRA SpillStackMapInfo");
227 const auto &referenceMapInsns = cgFunc->GetStackMapInsns();
228
229 for (auto *insn : referenceMapInsns) {
230 auto &deoptInfo = insn->GetStackMap()->GetDeoptInfo();
231 const auto &deoptBundleInfo = deoptInfo.GetDeoptBundleInfo();
232 for (const auto &item : deoptBundleInfo) {
233 const auto *opnd = item.second;
234 if (!opnd->IsRegister()) {
235 continue;
236 }
237 auto *li = liveIntervalsArray[(static_cast<const RegOperand *>(opnd))->GetRegisterNumber()];
238 if (li != nullptr) {
239 if (needDump) {
240 PrintLiveInterval(*li, "Spill Deopt:");
241 }
242 li->SetStackSlot(kSpilled);
243 li->SetShouldSave(false);
244 }
245 }
246
247 for (auto regNO : insn->GetStackMapLiveIn()) {
248 if (!cgFunc->IsRegReference(regNO)) {
249 continue;
250 }
251 auto *li = liveIntervalsArray[regNO];
252 if (li == nullptr) {
253 continue;
254 }
255 auto itr = derivedRef2Base.find(regNO);
256 if (itr != derivedRef2Base.end()) {
257 auto baseRegNum = (itr->second)->GetRegisterNumber();
258 DEBUG_ASSERT(liveIntervalsArray[baseRegNum] != nullptr, "empty li");
259 if (needDump) {
260 PrintLiveInterval(*liveIntervalsArray[baseRegNum], "Spill StackMap derivedRef:");
261 }
262 liveIntervalsArray[baseRegNum]->SetStackSlot(kSpilled);
263 liveIntervalsArray[baseRegNum]->SetShouldSave(false);
264 }
265 if (needDump) {
266 PrintLiveInterval(*li, "Spill StackMap:");
267 }
268 li->SetStackSlot(kSpilled);
269 li->SetShouldSave(false);
270 }
271 }
272 }
273
PrintLiveInterval(const LiveInterval & li,const std::string & str) const274 void LSRALinearScanRegAllocator::PrintLiveInterval(const LiveInterval &li, const std::string &str) const
275 {
276 LogInfo::MapleLogger() << str << "\n";
277 if (li.GetIsCall() != nullptr) {
278 LogInfo::MapleLogger() << " firstDef " << li.GetFirstDef();
279 LogInfo::MapleLogger() << " isCall";
280 } else if (li.GetPhysUse() > 0) {
281 LogInfo::MapleLogger() << "\tregNO " << li.GetRegNO();
282 LogInfo::MapleLogger() << " firstDef " << li.GetFirstDef();
283 LogInfo::MapleLogger() << " physUse " << li.GetPhysUse();
284 LogInfo::MapleLogger() << " endByCall " << li.IsEndByCall();
285 } else {
286 LogInfo::MapleLogger() << "\tregNO " << li.GetRegNO();
287 LogInfo::MapleLogger() << " firstDef " << li.GetFirstDef();
288 LogInfo::MapleLogger() << " lastUse " << li.GetLastUse();
289 LogInfo::MapleLogger() << " assigned " << li.GetAssignedReg();
290 LogInfo::MapleLogger() << " refCount " << li.GetRefCount();
291 LogInfo::MapleLogger() << " priority " << li.GetPriority();
292 }
293 LogInfo::MapleLogger() << "\n";
294 }
295
PrintParamQueue(const std::string & str)296 void LSRALinearScanRegAllocator::PrintParamQueue(const std::string &str)
297 {
298 LogInfo::MapleLogger() << str << "\n";
299 for (SingleQue &que : intParamQueue) {
300 if (que.empty()) {
301 continue;
302 }
303 LiveInterval *li = que.front();
304 LiveInterval *last = que.back();
305 PrintLiveInterval(*li, "");
306 while (li != last) {
307 que.pop_front();
308 que.push_back(li);
309 li = que.front();
310 PrintLiveInterval(*li, "");
311 }
312 que.pop_front();
313 que.push_back(li);
314 }
315 }
316
PrintCallQueue(const std::string & str) const317 void LSRALinearScanRegAllocator::PrintCallQueue(const std::string &str) const
318 {
319 LogInfo::MapleLogger() << str << "\n";
320 for (auto callInsnID : callQueue) {
321 LogInfo::MapleLogger() << callInsnID << " ";
322 }
323 LogInfo::MapleLogger() << "\n";
324 }
325
PrintActiveList(const std::string & str,uint32 len) const326 void LSRALinearScanRegAllocator::PrintActiveList(const std::string &str, uint32 len) const
327 {
328 uint32 count = 0;
329 LogInfo::MapleLogger() << str << " " << active.size() << "\n";
330 for (auto *li : active) {
331 PrintLiveInterval(*li, "");
332 ++count;
333 if ((len != 0) && (count == len)) {
334 break;
335 }
336 }
337 }
338
PrintActiveListSimple() const339 void LSRALinearScanRegAllocator::PrintActiveListSimple() const
340 {
341 for (const auto *li : active) {
342 uint32 assignedReg = li->GetAssignedReg();
343 LogInfo::MapleLogger() << li->GetRegNO() << "(" << assignedReg << ", ";
344 if (li->GetPhysUse() > 0) {
345 LogInfo::MapleLogger() << "p) ";
346 } else {
347 LogInfo::MapleLogger() << li->GetFirstAcrossedCall();
348 }
349 LogInfo::MapleLogger() << "<" << li->GetFirstDef() << "," << li->GetLastUse() << ">) ";
350 }
351 LogInfo::MapleLogger() << "\n";
352 }
353
PrintLiveIntervals() const354 void LSRALinearScanRegAllocator::PrintLiveIntervals() const
355 {
356 /* vreg LogInfo */
357 for (auto *li : liveIntervalsArray) {
358 if (li == nullptr || li->GetRegNO() == 0) {
359 continue;
360 }
361 PrintLiveInterval(*li, "");
362 }
363 LogInfo::MapleLogger() << "\n";
364 /* preg LogInfo */
365 for (auto param : intParamQueue) {
366 for (auto *li : param) {
367 if (li == nullptr || li->GetRegNO() == 0) {
368 continue;
369 }
370 PrintLiveInterval(*li, "");
371 }
372 }
373 LogInfo::MapleLogger() << "\n";
374 }
375
DebugCheckActiveList() const376 void LSRALinearScanRegAllocator::DebugCheckActiveList() const
377 {
378 LiveInterval *prev = nullptr;
379 for (auto *li : active) {
380 if (prev != nullptr) {
381 if ((li->GetRegNO() <= regInfo->GetLastParamsFpReg()) &&
382 (prev->GetRegNO() > regInfo->GetLastParamsFpReg())) {
383 if (li->GetFirstDef() < prev->GetFirstDef()) {
384 LogInfo::MapleLogger() << "ERRer: active list with out of order phys + vreg\n";
385 PrintLiveInterval(*prev, "prev");
386 PrintLiveInterval(*li, "current");
387 PrintActiveList("Active", kPrintedActiveListLength);
388 }
389 }
390 if ((li->GetRegNO() <= regInfo->GetLastParamsFpReg()) &&
391 (prev->GetRegNO() <= regInfo->GetLastParamsFpReg())) {
392 if (li->GetFirstDef() < prev->GetFirstDef()) {
393 LogInfo::MapleLogger() << "ERRer: active list with out of order phys reg use\n";
394 PrintLiveInterval(*prev, "prev");
395 PrintLiveInterval(*li, "current");
396 PrintActiveList("Active", kPrintedActiveListLength);
397 }
398 }
399 } else {
400 prev = li;
401 }
402 }
403 }
404
405 /*
406 * Prepare the free physical register pool for allocation.
407 * When a physical register is allocated, it is removed from the pool.
408 * The physical register is re-inserted into the pool when the associated live
409 * interval has ended.
410 */
InitFreeRegPool()411 void LSRALinearScanRegAllocator::InitFreeRegPool()
412 {
413 for (regno_t regNO : regInfo->GetAllRegs()) {
414 if (!regInfo->IsAvailableReg(regNO)) {
415 continue;
416 }
417 if (regInfo->IsGPRegister(regNO)) {
418 if (regInfo->IsYieldPointReg(regNO)) {
419 continue;
420 }
421 /* ExtraSpillReg */
422 if (regInfo->IsSpillRegInRA(regNO, needExtraSpillReg)) {
423 intSpillRegSet.push_back(regNO - firstIntReg);
424 continue;
425 }
426 if (regInfo->IsCalleeSavedReg(regNO)) {
427 /* callee-saved registers */
428 (void)intCalleeRegSet.insert(regNO - firstIntReg);
429 intCalleeMask |= 1u << (regNO - firstIntReg);
430 } else {
431 /* caller-saved registers */
432 (void)intCallerRegSet.insert(regNO - firstIntReg);
433 intCallerMask |= 1u << (regNO - firstIntReg);
434 }
435 } else {
436 /* fp ExtraSpillReg */
437 if (regInfo->IsSpillRegInRA(regNO, needExtraSpillReg)) {
438 fpSpillRegSet.push_back(regNO - firstFpReg);
439 continue;
440 }
441 if (regInfo->IsCalleeSavedReg(regNO)) {
442 /* fp callee-saved registers */
443 (void)fpCalleeRegSet.insert(regNO - firstFpReg);
444 fpCalleeMask |= 1u << (regNO - firstFpReg);
445 } else {
446 /* fp caller-saved registers */
447 (void)fpCallerRegSet.insert(regNO - firstFpReg);
448 fpCallerMask |= 1u << (regNO - firstFpReg);
449 }
450 }
451 }
452 DEBUG_ASSERT(intSpillRegSet.size() >= 2U, "too few spill regs");
453
454 if (needDump) {
455 PrintRegSet(intCallerRegSet, "ALLOCATABLE_INT_CALLER");
456 PrintRegSet(intCalleeRegSet, "ALLOCATABLE_INT_CALLEE");
457 PrintRegSet(intParamRegSet, "ALLOCATABLE_INT_PARAM");
458 PrintRegSet(fpCallerRegSet, "ALLOCATABLE_FP_CALLER");
459 PrintRegSet(fpCalleeRegSet, "ALLOCATABLE_FP_CALLEE");
460 PrintRegSet(fpParamRegSet, "ALLOCATABLE_FP_PARAM");
461 LogInfo::MapleLogger() << "INT_SPILL_REGS";
462 for (uint32 intSpillRegNO : intSpillRegSet) {
463 LogInfo::MapleLogger() << " " << intSpillRegNO;
464 }
465 LogInfo::MapleLogger() << "\n";
466 LogInfo::MapleLogger() << "FP_SPILL_REGS";
467 for (uint32 fpSpillRegNO : fpSpillRegSet) {
468 LogInfo::MapleLogger() << " " << fpSpillRegNO;
469 }
470 LogInfo::MapleLogger() << "\n";
471 LogInfo::MapleLogger() << std::hex;
472 LogInfo::MapleLogger() << "INT_CALLER_MASK " << intCallerMask << "\n";
473 LogInfo::MapleLogger() << "INT_CALLEE_MASK " << intCalleeMask << "\n";
474 LogInfo::MapleLogger() << "INT_PARAM_MASK " << intParamMask << "\n";
475 LogInfo::MapleLogger() << "FP_CALLER_FP_MASK " << fpCallerMask << "\n";
476 LogInfo::MapleLogger() << "FP_CALLEE_FP_MASK " << fpCalleeMask << "\n";
477 LogInfo::MapleLogger() << "FP_PARAM_FP_MASK " << fpParamMask << "\n";
478 LogInfo::MapleLogger() << std::dec;
479 }
480 }
481
RecordPhysRegs(const RegOperand & regOpnd,uint32 insnNum,bool isDef)482 void LSRALinearScanRegAllocator::RecordPhysRegs(const RegOperand ®Opnd, uint32 insnNum, bool isDef)
483 {
484 RegType regType = regOpnd.GetRegisterType();
485 uint32 regNO = regOpnd.GetRegisterNumber();
486 if (regType == kRegTyCc || regType == kRegTyVary) {
487 return;
488 }
489 if (regInfo->IsUnconcernedReg(regNO) || regInfo->IsVirtualRegister(regNO)) {
490 return;
491 }
492 if (isDef) {
493 /* parameter/return register def is assumed to be live until a call. */
494 auto *li = memPool->New<LiveInterval>(*memPool);
495 li->SetRegNO(regNO);
496 li->SetRegType(regType);
497 li->SetStackSlot(0xFFFFFFFF);
498 li->SetFirstDef(insnNum);
499 li->SetPhysUse(insnNum);
500 li->SetAssignedReg(regNO);
501
502 if (regType == kRegTyInt) {
503 intParamQueue[regInfo->GetIntParamRegIdx(regNO)].push_back(li);
504 } else {
505 fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].push_back(li);
506 }
507 } else {
508 if (regType == kRegTyInt) {
509 CHECK_FATAL(!intParamQueue[regInfo->GetIntParamRegIdx(regNO)].empty(),
510 "was not defined before use, impossible");
511 LiveInterval *li = intParamQueue[regInfo->GetIntParamRegIdx(regNO)].back();
512 li->SetPhysUse(insnNum);
513 } else {
514 CHECK_FATAL(!fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].empty(),
515 "was not defined before use, impossible");
516 LiveInterval *li = fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].back();
517 li->SetPhysUse(insnNum);
518 }
519 }
520 }
521
UpdateLiveIntervalState(const BB & bb,LiveInterval & li) const522 void LSRALinearScanRegAllocator::UpdateLiveIntervalState(const BB &bb, LiveInterval &li) const
523 {
524 if (bb.IsCatch()) {
525 li.SetInCatchState();
526 } else {
527 li.SetNotInCatchState();
528 }
529
530 if (bb.GetInternalFlag1() != 0) {
531 li.SetInCleanupState();
532 } else {
533 li.SetNotInCleanupState(bb.GetId() == 1);
534 }
535 }
536
UpdateRegUsedInfo(LiveInterval & li,regno_t regNO)537 void LSRALinearScanRegAllocator::UpdateRegUsedInfo(LiveInterval &li, regno_t regNO)
538 {
539 uint32 index = regNO / (sizeof(uint64) * k8ByteSize);
540 uint64 bit = regNO % (sizeof(uint64) * k8ByteSize);
541 if ((regUsedInBB[index] & (static_cast<uint64>(1) << bit)) != 0) {
542 li.SetMultiUseInBB(true);
543 }
544 regUsedInBB[index] |= (static_cast<uint64>(1) << bit);
545
546 if (minVregNum > regNO) {
547 minVregNum = regNO;
548 }
549 if (maxVregNum < regNO) {
550 maxVregNum = regNO;
551 }
552 }
553
554 /* main entry function for live interval computation. */
SetupLiveInterval(Operand & opnd,Insn & insn,bool isDef,uint32 & nUses,uint32 regSize)555 void LSRALinearScanRegAllocator::SetupLiveInterval(Operand &opnd, Insn &insn, bool isDef, uint32 &nUses, uint32 regSize)
556 {
557 if (!opnd.IsRegister()) {
558 return;
559 }
560 auto ®Opnd = static_cast<RegOperand &>(opnd);
561 uint32 insnNum = insn.GetId();
562 if (!regInfo->IsVirtualRegister(regOpnd)) {
563 RecordPhysRegs(regOpnd, insnNum, isDef);
564 return;
565 }
566 RegType regType = regOpnd.GetRegisterType();
567 if (regType == kRegTyCc || regType == kRegTyVary) {
568 return;
569 }
570
571 LiveInterval *li = nullptr;
572 uint32 regNO = regOpnd.GetRegisterNumber();
573 if (liveIntervalsArray[regNO] == nullptr) {
574 li = memPool->New<LiveInterval>(*memPool);
575 li->SetRegNO(regNO);
576 li->SetStackSlot(0xFFFFFFFF);
577 liveIntervalsArray[regNO] = li;
578 liQue.push_back(li);
579 } else {
580 li = liveIntervalsArray[regNO];
581 }
582 li->SetRegType(regType);
583
584 BB *curBB = insn.GetBB();
585 if (isDef) {
586 /* never set to 0 before, why consider this condition ? */
587 if (li->GetFirstDef() == 0) {
588 li->SetFirstDef(insnNum);
589 li->SetLastUse(insnNum + 1);
590 } else if (!curBB->IsUnreachable()) {
591 if (li->GetLastUse() < insnNum || li->IsUseBeforeDef()) {
592 li->SetLastUse(insnNum + 1);
593 }
594 }
595 /*
596 * try-catch related
597 * Not set when extending live interval with bb's livein in ComputeLiveInterval.
598 */
599 li->SetResultCount(li->GetResultCount() + 1);
600 // add range
601 li->AddRange(insn.GetId(), insn.GetId());
602 } else {
603 if (li->GetFirstDef() == 0) {
604 DEBUG_ASSERT(false, "SetupLiveInterval: use before def");
605 }
606 /*
607 * In ComputeLiveInterval when extending live interval using
608 * live-out information, li created does not have a type.
609 */
610 if (!curBB->IsUnreachable()) {
611 li->SetLastUse(insnNum);
612 }
613 ++nUses;
614 // update range
615 if (li->GetRanges().empty()) {
616 li->AddRange(insn.GetId(), insn.GetId());
617 } else {
618 li->GetRanges().back().SetEnd(insn.GetId());
619 }
620 }
621 UpdateLiveIntervalState(*curBB, *li);
622 if (isDef) {
623 li->SetMaxDefSize(std::max(regSize, li->GetMaxDefSize()));
624 } else {
625 li->SetMaxUseSize(std::max(regSize, li->GetMaxUseSize()));
626 }
627 if (li->GetMaxDefSize() == 0) {
628 li->SetSpillSize(li->GetMaxUseSize());
629 } else if (li->GetMaxUseSize() == 0) {
630 li->SetSpillSize(li->GetMaxDefSize());
631 } else {
632 li->SetSpillSize(std::min(li->GetMaxDefSize(), li->GetMaxUseSize()));
633 }
634 li->SetRefCount(li->GetRefCount() + 1);
635 li->AddUsePositions(insnNum);
636 UpdateRegUsedInfo(*li, regNO);
637
638 /* setup the def/use point for it */
639 DEBUG_ASSERT(regNO < liveIntervalsArray.size(), "out of range of vector liveIntervalsArray");
640 }
641
642 /*
643 * Support 'hole' in LSRA.
644 * For a live interval, there might be multiple segments of live ranges,
645 * and between these segments a 'hole'.
646 * Some other short lived vreg can go into these 'holes'.
647 *
648 * from : starting instruction sequence id
649 * to : ending instruction sequence id
650 */
AddRange(uint32 from,uint32 to)651 void LSRALinearScanRegAllocator::LiveInterval::AddRange(uint32 from, uint32 to)
652 {
653 if (ranges.empty()) {
654 ranges.emplace_back(from, to);
655 return;
656 }
657 /* create a new range */
658 if (from > ranges.back().GetEnd()) {
659 ranges.emplace_back(from, to);
660 return;
661 }
662 DEBUG_ASSERT(to >= ranges.back().GetStart(), "No possible on forward traverse.");
663 if (to >= ranges.back().GetEnd() && from < ranges.back().GetStart()) {
664 ranges.back().SetStart(from);
665 ranges.back().SetEnd(to);
666 return;
667 }
668 /* extend it's range forward. e.g. def-use opnd */
669 if (to >= ranges.back().GetEnd() && from < ranges.back().GetEnd()) {
670 ranges.back().SetEnd(to);
671 return;
672 }
673 return;
674 }
675
FindPosRange(uint32 pos)676 MapleVector<LSRALinearScanRegAllocator::LinearRange>::iterator LSRALinearScanRegAllocator::LiveInterval::FindPosRange(
677 uint32 pos)
678 {
679 while (rangeFinder != ranges.end()) {
680 if (rangeFinder->GetEnd() >= pos) {
681 break;
682 }
683 ++rangeFinder;
684 }
685 return rangeFinder;
686 }
687
688 /* Extend live interval with live-in info */
UpdateLiveIntervalByLiveIn(const BB & bb,uint32 insnNum)689 void LSRALinearScanRegAllocator::UpdateLiveIntervalByLiveIn(const BB &bb, uint32 insnNum)
690 {
691 for (const auto ®NO : bb.GetLiveInRegNO()) {
692 if (!regInfo->IsVirtualRegister(regNO)) {
693 /* Do not consider physical regs. */
694 continue;
695 }
696 DEBUG_ASSERT(regNO < liveIntervalsArray.size(), "index out of range.");
697 LiveInterval *liOuter = liveIntervalsArray[regNO];
698 if (liOuter != nullptr) {
699 // add live interval range
700 liOuter->AddRange(insnNum, insnNum);
701 continue;
702 }
703 if (bb.IsEmpty() && bb.GetId() != 1) {
704 continue;
705 }
706 /*
707 * try-catch related
708 * Since it is livein but not seen before, its a use before def
709 * spill it temporarily
710 */
711 auto *li = memPool->New<LiveInterval>(*memPool);
712 li->SetRegNO(regNO);
713 li->SetStackSlot(kSpilled);
714 liveIntervalsArray[regNO] = li;
715 li->SetFirstDef(insnNum);
716 liQue.push_back(li);
717
718 li->SetUseBeforeDef(true);
719
720 // add live interval range
721 li->AddRange(insnNum, insnNum);
722
723 if (!bb.IsUnreachable()) {
724 if (bb.GetId() != 1) {
725 LogInfo::MapleLogger() << "ERROR: " << regNO << " use before def in bb " << bb.GetId() << " : "
726 << cgFunc->GetName() << "\n";
727 DEBUG_ASSERT(false, "There should only be [use before def in bb 1], temporarily.");
728 }
729 LogInfo::MapleLogger() << "WARNING: " << regNO << " use before def in bb " << bb.GetId() << " : "
730 << cgFunc->GetName() << "\n";
731 }
732 UpdateLiveIntervalState(bb, *li);
733 }
734 }
735
736 /* traverse live in regNO, for each live in regNO create a new liveinterval */
UpdateParamLiveIntervalByLiveIn(const BB & bb,uint32 insnNum)737 void LSRALinearScanRegAllocator::UpdateParamLiveIntervalByLiveIn(const BB &bb, uint32 insnNum)
738 {
739 for (const auto regNO : bb.GetLiveInRegNO()) {
740 if (regInfo->IsUnconcernedReg(regNO) || regInfo->IsVirtualRegister(regNO)) {
741 continue;
742 }
743 auto *li = memPool->New<LiveInterval>(*memPool);
744 li->SetRegNO(regNO);
745 li->SetStackSlot(0xFFFFFFFF);
746 li->SetFirstDef(insnNum);
747 li->SetPhysUse(insnNum);
748 li->SetAssignedReg(regNO);
749
750 if (regInfo->IsGPRegister(regNO)) {
751 li->SetRegType(kRegTyInt);
752 intParamQueue[regInfo->GetIntParamRegIdx(regNO)].push_back(li);
753 } else {
754 li->SetRegType(kRegTyFloat);
755 fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].push_back(li);
756 }
757 UpdateLiveIntervalState(bb, *li);
758 }
759 }
760
ComputeLiveIn(BB & bb,uint32 insnNum)761 void LSRALinearScanRegAllocator::ComputeLiveIn(BB &bb, uint32 insnNum)
762 {
763 if (bb.IsEmpty() && bb.GetId() != 1) {
764 return;
765 }
766 if (needDump) {
767 LogInfo::MapleLogger() << "bb(" << bb.GetId() << ")LIVEOUT:";
768 for (const auto &liveOutRegNO : bb.GetLiveOutRegNO()) {
769 LogInfo::MapleLogger() << " " << liveOutRegNO;
770 }
771 LogInfo::MapleLogger() << ".\n";
772 LogInfo::MapleLogger() << "bb(" << bb.GetId() << ")LIVEIN:";
773 for (const auto &liveInRegNO : bb.GetLiveInRegNO()) {
774 LogInfo::MapleLogger() << " " << liveInRegNO;
775 }
776 LogInfo::MapleLogger() << ".\n";
777 }
778
779 UpdateLiveIntervalByLiveIn(bb, insnNum);
780
781 if (bb.GetFirstInsn() == nullptr) {
782 return;
783 }
784 UpdateParamLiveIntervalByLiveIn(bb, insnNum);
785 }
786
ComputeLiveOut(BB & bb,uint32 insnNum)787 void LSRALinearScanRegAllocator::ComputeLiveOut(BB &bb, uint32 insnNum)
788 {
789 auto updateLiveIntervalByLiveOut = [this, &bb, insnNum](regno_t regNO) {
790 /* Extend live interval with live-out info */
791 LiveInterval *li = liveIntervalsArray[regNO];
792 if (li != nullptr && !bb.IsEmpty()) {
793 li->SetLastUse(bb.GetLastInsn()->GetId());
794 UpdateLiveIntervalState(bb, *li);
795 if (bb.GetKind() == BB::kBBRangeGoto) {
796 li->SetSplitForbid(true);
797 }
798
799 // update live interval range
800 if (li->GetRanges().empty()) {
801 li->AddRange(insnNum, insnNum);
802 } else {
803 li->GetRanges().back().SetEnd(insnNum);
804 }
805 }
806 };
807
808 /*
809 * traverse live out regNO
810 * for each live out regNO if the last corresponding live interval is created within this bb
811 * update this lastUse of li to the end of BB
812 */
813 for (const auto regNO : bb.GetLiveOutRegNO()) {
814 if (regInfo->IsUnconcernedReg(regNO)) {
815 continue;
816 }
817 if (!regInfo->IsVirtualRegister(regNO)) {
818 LiveInterval *liOut = nullptr;
819 if (regInfo->IsGPRegister(regNO)) {
820 if (intParamQueue[regInfo->GetIntParamRegIdx(regNO)].empty()) {
821 continue;
822 }
823 liOut = intParamQueue[regInfo->GetIntParamRegIdx(regNO)].back();
824 if (bb.GetFirstInsn() && liOut->GetFirstDef() >= bb.GetFirstInsn()->GetId()) {
825 liOut->SetPhysUse(insnNum);
826 }
827 } else {
828 if (fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].empty()) {
829 continue;
830 }
831 liOut = fpParamQueue[regInfo->GetFpParamRegIdx(regNO)].back();
832 if (bb.GetFirstInsn() && liOut->GetFirstDef() >= bb.GetFirstInsn()->GetId()) {
833 liOut->SetPhysUse(insnNum);
834 }
835 }
836 continue;
837 }
838 updateLiveIntervalByLiveOut(regNO);
839 auto iter = derivedRef2Base.find(regNO);
840 if (iter != derivedRef2Base.end()) {
841 updateLiveIntervalByLiveOut(iter->second->GetRegisterNumber());
842 }
843 }
844 }
845
846 struct RegOpndInfo {
RegOpndInfomaplebe::RegOpndInfo847 RegOpndInfo(RegOperand &opnd, uint32 size, uint32 idx, bool def)
848 : regOpnd(opnd), regSize(size), opndIdx(idx), isDef(def)
849 {
850 }
851 RegOperand ®Opnd;
852 uint32 regSize = 0;
853 uint32 opndIdx = -1;
854 bool isDef = false;
855 };
856
ComputeLiveIntervalForEachOperand(Insn & insn)857 void LSRALinearScanRegAllocator::ComputeLiveIntervalForEachOperand(Insn &insn)
858 {
859 std::vector<RegOpndInfo> allRegOpndInfo;
860 const InsnDesc *md = insn.GetDesc();
861 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
862 Operand &opnd = insn.GetOperand(i);
863 const OpndDesc *opndDesc = md->GetOpndDes(i);
864 DEBUG_ASSERT(opndDesc != nullptr, "ptr null check.");
865 if (opnd.IsRegister()) {
866 auto ®Opnd = static_cast<RegOperand &>(opnd);
867 // Specifically, the "use-def" opnd is treated as a "use" opnd
868 bool isUse = opndDesc->IsRegUse();
869 // Fixup: The size in the insn md of x64 is not accurate, and the size in the fload opnd
870 // does not change, so we use the size in the opnd first.
871 auto regSize = (regOpnd.GetRegisterType() == kRegTyInt ? opndDesc->GetSize() : regOpnd.GetSize());
872 allRegOpndInfo.emplace_back(regOpnd, regSize, i, !isUse);
873 } else if (opnd.IsMemoryAccessOperand()) {
874 auto &memOpnd = static_cast<MemOperand &>(opnd);
875 RegOperand *base = memOpnd.GetBaseRegister();
876 RegOperand *offset = memOpnd.GetIndexRegister();
877 if (base != nullptr) {
878 allRegOpndInfo.emplace_back(*base, k64BitSize, i, false);
879 }
880 if (offset != nullptr) {
881 allRegOpndInfo.emplace_back(*offset, k64BitSize, i, false);
882 }
883 } else if (opnd.IsList()) {
884 auto &listOpnd = static_cast<ListOperand &>(opnd);
885 for (auto op : listOpnd.GetOperands()) {
886 allRegOpndInfo.emplace_back(*op, op->GetSize(), i, opndDesc->IsDef());
887 }
888 }
889 }
890
891 uint32 numUses = 0;
892 for (auto ®OpndInfo : allRegOpndInfo) {
893 if (!regOpndInfo.isDef) {
894 SetupLiveInterval(regOpndInfo.regOpnd, insn, false, numUses, regOpndInfo.regSize);
895 }
896 }
897
898 for (auto ®OpndInfo : allRegOpndInfo) {
899 if (regOpndInfo.isDef) {
900 SetupLiveInterval(regOpndInfo.regOpnd, insn, true, numUses, regOpndInfo.regSize);
901 }
902 }
903
904 if (numUses >= regInfo->GetNormalUseOperandNum()) {
905 needExtraSpillReg = true;
906 }
907 }
908
ComputeLoopLiveIntervalPriority(const LoopDesc & loop)909 void LSRALinearScanRegAllocator::ComputeLoopLiveIntervalPriority(const LoopDesc &loop)
910 {
911 for (const auto *lp : loop.GetChildLoops()) {
912 /* handle nested Loops */
913 ComputeLoopLiveIntervalPriority(*lp);
914 }
915 for (auto bbId : loop.GetLoopBBs()) {
916 auto *bb = cgFunc->GetBBFromID(bbId);
917 if (bb->IsEmpty()) {
918 continue;
919 }
920 FOR_BB_INSNS(insn, bb)
921 {
922 ComputeLoopLiveIntervalPriorityInInsn(*insn);
923 }
924 loopBBRegSet.clear();
925 }
926 }
927
ComputeLoopLiveIntervalPriorityInInsn(const Insn & insn)928 void LSRALinearScanRegAllocator::ComputeLoopLiveIntervalPriorityInInsn(const Insn &insn)
929 {
930 uint32 opndNum = insn.GetOperandSize();
931 for (uint32 i = 0; i < opndNum; ++i) {
932 Operand &opnd = insn.GetOperand(i);
933 if (!opnd.IsRegister()) {
934 continue;
935 }
936 auto ®Opnd = static_cast<RegOperand &>(opnd);
937 if (!regInfo->IsVirtualRegister(regOpnd)) {
938 continue;
939 }
940 uint32 regNo = regOpnd.GetRegisterNumber();
941 LiveInterval *li = liveIntervalsArray[regNo];
942 if (li == nullptr || loopBBRegSet.find(regNo) != loopBBRegSet.end()) {
943 continue;
944 }
945 li->SetPriority(kLoopWeight * li->GetPriority());
946 (void)loopBBRegSet.insert(regNo);
947 }
948 return;
949 }
950
ComputeLiveIntervalForCall(Insn & insn)951 void LSRALinearScanRegAllocator::ComputeLiveIntervalForCall(Insn &insn)
952 {
953 uint32 cnt = 0;
954 // referencemap info
955 for (auto regNO : insn.GetStackMapLiveIn()) {
956 if (derivedRef2Base.find(regNO) != derivedRef2Base.end()) {
957 auto baseOpnd = derivedRef2Base[regNO];
958 auto regSize = baseOpnd->GetSize();
959 SetupLiveInterval(*baseOpnd, insn, false, cnt, regSize);
960 }
961 }
962 // deoptinfo
963 if (insn.GetStackMap() != nullptr) {
964 for (auto [_, opnd] : insn.GetStackMap()->GetDeoptInfo().GetDeoptBundleInfo()) {
965 auto regSize = opnd->GetSize();
966 SetupLiveInterval(*opnd, insn, false, cnt, regSize);
967 }
968 }
969 }
970
ComputeLiveInterval()971 void LSRALinearScanRegAllocator::ComputeLiveInterval()
972 {
973 RA_TIMER_REGISTER(lsra, "LSRA ComputeLiveInterval");
974 liQue.clear();
975 uint32 regUsedInBBSz = (cgFunc->GetMaxVReg() / (sizeof(uint64) * k8ByteSize) + 1);
976 regUsedInBB.resize(regUsedInBBSz, 0);
977 uint32 insnNum = 1;
978 for (BB *bb : bfs->sortedBBs) {
979 ComputeLiveIn(*bb, insnNum);
980 FOR_BB_INSNS(insn, bb)
981 {
982 insn->SetId(insnNum);
983 /* skip comment and debug insn */
984 if (!insn->IsMachineInstruction()) {
985 continue;
986 }
987
988 /* RecordCall, remember calls for caller/callee allocation. */
989 if (insn->IsCall()) {
990 if (!insn->GetIsThrow()) {
991 callQueue.emplace_back(insn->GetId());
992 }
993 if (!cgFunc->IsStackMapComputed()) {
994 ComputeLiveIntervalForCall(*insn);
995 }
996 }
997
998 ComputeLiveIntervalForEachOperand(*insn);
999
1000 /* handle return value for call insn */
1001 if (insn->IsCall()) {
1002 /* For all backend architectures so far, adopt all RetRegs as Def via this insn,
1003 * and then their live begins.
1004 * next optimization, you can determine which registers are actually used.
1005 */
1006 for (uint32 i = 0; i < regInfo->GetIntRetRegsNum(); i++) {
1007 auto *retReg = regInfo->GetOrCreatePhyRegOperand(regInfo->GetIntRetReg(i), k64BitSize, kRegTyInt);
1008 RecordPhysRegs(*retReg, insnNum, true);
1009 }
1010 for (uint32 i = 0; i < regInfo->GetFpRetRegsNum(); i++) {
1011 auto *retReg = regInfo->GetOrCreatePhyRegOperand(regInfo->GetFpRetReg(i), k64BitSize, kRegTyFloat);
1012 RecordPhysRegs(*retReg, insnNum, true);
1013 }
1014 }
1015 ++insnNum;
1016 }
1017
1018 ComputeLiveOut(*bb, insnNum);
1019 }
1020
1021 maxInsnNum = insnNum - 1; /* insn_num started from 1 */
1022 regUsedInBB.clear();
1023 /* calculate Live Interval weight */
1024 for (auto *li : liveIntervalsArray) {
1025 if (li == nullptr || li->GetRegNO() == 0) {
1026 continue;
1027 }
1028 if (li->GetIsCall() != nullptr || (li->GetPhysUse() > 0)) {
1029 continue;
1030 }
1031 if (li->GetLastUse() > li->GetFirstDef()) {
1032 li->SetPriority(static_cast<float>(li->GetRefCount()) /
1033 static_cast<float>(li->GetLastUse() - li->GetFirstDef()));
1034 } else {
1035 li->SetPriority(static_cast<float>(li->GetRefCount()) /
1036 static_cast<float>(li->GetFirstDef() - li->GetLastUse()));
1037 }
1038 }
1039
1040 // enhance loop Live Interval Priority
1041 for (const auto *lp : loopInfo.GetLoops()) {
1042 ComputeLoopLiveIntervalPriority(*lp);
1043 }
1044
1045 if (needDump) {
1046 PrintLiveIntervals();
1047 }
1048 }
1049
1050 /* Calculate the weight of a live interval for pre-spill and flexible spill */
LiveIntervalAnalysis()1051 void LSRALinearScanRegAllocator::LiveIntervalAnalysis()
1052 {
1053 RA_TIMER_REGISTER(lsra, "LSRA BuildIntervalRanges");
1054 for (uint32 bbIdx = 0; bbIdx < bfs->sortedBBs.size(); ++bbIdx) {
1055 BB *bb = bfs->sortedBBs[bbIdx];
1056
1057 FOR_BB_INSNS(insn, bb)
1058 {
1059 /* 1 calculate live interfere */
1060 if (!insn->IsMachineInstruction() || insn->GetId() == 0) {
1061 /* New instruction inserted by reg alloc (ie spill) */
1062 continue;
1063 }
1064 /* 1.1 simple retire from active */
1065 MapleSet<LiveInterval *, ActiveCmp>::iterator it;
1066 for (it = active.begin(); it != active.end(); /* erase will update */) {
1067 auto *li = static_cast<LiveInterval *>(*it);
1068 if (li->GetLastUse() > insn->GetId()) {
1069 break;
1070 }
1071 it = active.erase(it);
1072 }
1073 const InsnDesc *md = insn->GetDesc();
1074 uint32 opndNum = insn->GetOperandSize();
1075 for (uint32 i = 0; i < opndNum; ++i) {
1076 const OpndDesc *regProp = md->GetOpndDes(i);
1077 DEBUG_ASSERT(regProp != nullptr, "pointer is null in LSRALinearScanRegAllocator::LiveIntervalAnalysis");
1078 bool isDef = regProp->IsRegDef();
1079 Operand &opnd = insn->GetOperand(i);
1080 if (isDef) {
1081 auto ®Opnd = static_cast<RegOperand &>(opnd);
1082 if (regOpnd.IsVirtualRegister() && regOpnd.GetRegisterType() != kRegTyCc) {
1083 /* 1.2 simple insert to active */
1084 uint32 regNO = regOpnd.GetRegisterNumber();
1085 LiveInterval *li = liveIntervalsArray[regNO];
1086 if (li->GetFirstDef() == insn->GetId() && li->GetStackSlot() != kSpilled) {
1087 (void)active.insert(li);
1088 }
1089 }
1090 }
1091 }
1092
1093 /* 2 get interfere info, and analysis */
1094 uint32 interNum = active.size();
1095 if (needDump) {
1096 LogInfo::MapleLogger() << "In insn " << insn->GetId() << ", " << interNum
1097 << " overlap live intervals.\n";
1098 LogInfo::MapleLogger() << "\n";
1099 }
1100
1101 /* 2.2 interfere with each other, analysis which to spill */
1102 while (interNum > CGOptions::GetOverlapNum()) {
1103 LiveInterval *lowestLi = nullptr;
1104 FindLowestPrioInActive(lowestLi);
1105 if (lowestLi != nullptr) {
1106 if (needDump) {
1107 PrintLiveInterval(*lowestLi, "Pre spilled: ");
1108 }
1109 lowestLi->SetStackSlot(kSpilled);
1110 lowestLi->SetShouldSave(false);
1111 active.erase(itFinded);
1112 interNum = active.size();
1113 } else {
1114 break;
1115 }
1116 }
1117 }
1118 }
1119 active.clear();
1120 }
1121
UpdateCallQueueAtRetirement(uint32 insnID)1122 void LSRALinearScanRegAllocator::UpdateCallQueueAtRetirement(uint32 insnID)
1123 {
1124 /*
1125 * active list is sorted based on increasing lastUse
1126 * any operand whose use is greater than current
1127 * instruction number is still in use.
1128 * If the use is less than or equal to instruction number
1129 * then it is possible to retire this live interval and
1130 * reclaim the physical register associated with it.
1131 */
1132 if (needDump) {
1133 LogInfo::MapleLogger() << "RetireActiveByInsn instr_num " << insnID << "\n";
1134 }
1135 /* Retire invalidated call from call queue */
1136 while (!callQueue.empty() && callQueue.front() <= insnID) {
1137 callQueue.pop_front();
1138 }
1139 }
1140
1141 /* update allocate info by active queue */
UpdateActiveAllocateInfo(const LiveInterval & li)1142 void LSRALinearScanRegAllocator::UpdateActiveAllocateInfo(const LiveInterval &li)
1143 {
1144 uint32 start = li.GetFirstDef();
1145 uint32 end = li.GetLastUse();
1146 if (li.GetSplitParent() != nullptr || li.IsUseBeforeDef()) {
1147 --start;
1148 }
1149 for (auto *activeLi : active) {
1150 uint32 regNO = activeLi->GetAssignedReg();
1151 uint32 rangeStartPos;
1152 auto posRange = activeLi->FindPosRange(start);
1153 if (posRange == activeLi->GetRanges().end()) {
1154 /* handle splited li */
1155 uint32 splitSafePos = activeLi->GetSplitPos();
1156 if (splitSafePos == li.GetFirstDef() && (li.GetSplitParent() != nullptr || li.IsUseBeforeDef())) {
1157 rangeStartPos = 0;
1158 } else if (splitSafePos > li.GetFirstDef()) {
1159 DEBUG_ASSERT(splitSafePos > 0, "must not be zero");
1160 rangeStartPos = splitSafePos - 1;
1161 } else {
1162 rangeStartPos = UINT32_MAX;
1163 }
1164 } else if (posRange->GetEhStart() != 0 && posRange->GetEhStart() < posRange->GetStart()) {
1165 rangeStartPos = posRange->GetEhStart();
1166 } else {
1167 rangeStartPos = posRange->GetStart();
1168 }
1169 if (rangeStartPos > start) {
1170 if (rangeStartPos < end) {
1171 blockForbiddenMask |= (1UL << activeLi->GetAssignedReg());
1172 }
1173 if (rangeStartPos < freeUntilPos[regNO]) {
1174 freeUntilPos[regNO] = rangeStartPos;
1175 }
1176 } else {
1177 freeUntilPos[regNO] = 0;
1178 }
1179 }
1180 }
1181
1182 /* update allocate info by param queue */
UpdateParamAllocateInfo(const LiveInterval & li)1183 void LSRALinearScanRegAllocator::UpdateParamAllocateInfo(const LiveInterval &li)
1184 {
1185 bool isInt = (li.GetRegType() == kRegTyInt);
1186 MapleVector<SingleQue> ¶mQueue = isInt ? intParamQueue : fpParamQueue;
1187 uint32 baseReg = isInt ? firstIntReg : firstFpReg;
1188 uint32 paramNum = isInt ? regInfo->GetIntRegs().size() : regInfo->GetFpRegs().size();
1189 uint32 start = li.GetFirstDef();
1190 uint32 end = li.GetLastUse();
1191 for (uint32 i = 0; i < paramNum; ++i) {
1192 while (!paramQueue[i].empty() && paramQueue[i].front()->GetPhysUse() <= start) {
1193 if (paramQueue[i].front()->GetPhysUse() == start && li.GetSplitParent() != nullptr) {
1194 break;
1195 }
1196 paramQueue[i].pop_front();
1197 }
1198 if (paramQueue[i].empty()) {
1199 continue;
1200 }
1201 auto regNo = paramQueue[i].front()->GetRegNO();
1202 uint32 startPos = paramQueue[i].front()->GetFirstDef();
1203 if (startPos <= start) {
1204 freeUntilPos[regNo] = 0;
1205 } else {
1206 if (startPos < end) {
1207 blockForbiddenMask |= (1UL << (i + baseReg));
1208 }
1209 if (startPos < freeUntilPos[regNo]) {
1210 freeUntilPos[regNo] = startPos;
1211 }
1212 }
1213 }
1214 }
1215
1216 /* update active in retire */
RetireActive(LiveInterval & li,uint32 insnID)1217 void LSRALinearScanRegAllocator::RetireActive(LiveInterval &li, uint32 insnID)
1218 {
1219 /* Retire live intervals from active list */
1220 MapleSet<LiveInterval *, ActiveCmp>::iterator it;
1221 for (it = active.begin(); it != active.end(); /* erase will update */) {
1222 auto *activeLi = static_cast<LiveInterval *>(*it);
1223 if (activeLi->GetLastUse() > insnID) {
1224 ++it;
1225 continue;
1226 }
1227 if (activeLi->GetLastUse() == insnID) {
1228 if (li.GetSplitParent() != nullptr || activeLi->GetSplitNext() != nullptr) {
1229 ++it;
1230 continue;
1231 }
1232 if (activeLi->IsEndByMov() && activeLi->GetRegType() == li.GetRegType()) {
1233 li.SetPrefer(activeLi->GetAssignedReg());
1234 }
1235 }
1236 /* reserve split li in active */
1237 if (activeLi->GetSplitPos() >= insnID) {
1238 ++it;
1239 continue;
1240 }
1241 /*
1242 * live interval ended for this reg in active
1243 * release physical reg assigned to free reg pool
1244 */
1245 if (needDump) {
1246 LogInfo::MapleLogger() << "Removing "
1247 << "(" << activeLi->GetAssignedReg() << ")"
1248 << "from regmask\n";
1249 PrintLiveInterval(*activeLi, "\tRemoving virt_reg li\n");
1250 }
1251 it = active.erase(it);
1252 }
1253 }
1254
1255 /* find the best physical reg by freeUntilPos */
GetRegFromMask(uint32 mask,regno_t offset,const LiveInterval & li)1256 uint32 LSRALinearScanRegAllocator::GetRegFromMask(uint32 mask, regno_t offset, const LiveInterval &li)
1257 {
1258 uint32 prefer = li.GetPrefer();
1259 if (prefer != 0) {
1260 uint32 preg = li.GetPrefer() - offset;
1261 if ((mask & (1u << preg)) != 0 && freeUntilPos[prefer] >= li.GetLastUse()) {
1262 return prefer;
1263 }
1264 }
1265 uint32 bestReg = 0;
1266 uint32 maxFreeUntilPos = 0;
1267 for (uint32 preg = 0; preg < k32BitSize; ++preg) {
1268 if ((mask & (1u << preg)) == 0) {
1269 continue;
1270 }
1271 uint32 regNO = preg + offset;
1272 if (freeUntilPos[regNO] >= li.GetLastUse()) {
1273 return regNO;
1274 }
1275 if (freeUntilPos[regNO] > maxFreeUntilPos) {
1276 maxFreeUntilPos = freeUntilPos[regNO];
1277 bestReg = regNO;
1278 }
1279 }
1280 return bestReg;
1281 }
1282
FindAvailablePhyRegByFastAlloc(LiveInterval & li)1283 uint32 LSRALinearScanRegAllocator::FindAvailablePhyRegByFastAlloc(LiveInterval &li)
1284 {
1285 uint32 regNO = 0;
1286 if (li.GetRegType() == kRegTyInt) {
1287 regNO = GetRegFromMask(intCalleeMask, firstIntReg, li);
1288 li.SetShouldSave(false);
1289 if (regNO == 0 || freeUntilPos[regNO] < li.GetLastUse()) {
1290 regNO = GetRegFromMask(intCallerMask, firstIntReg, li);
1291 li.SetShouldSave(true);
1292 }
1293 } else if (li.GetRegType() == kRegTyFloat) {
1294 regNO = GetRegFromMask(fpCalleeMask, firstFpReg, li);
1295 li.SetShouldSave(false);
1296 if (regNO == 0 || freeUntilPos[regNO] < li.GetLastUse()) {
1297 regNO = GetRegFromMask(fpCallerMask, firstFpReg, li);
1298 li.SetShouldSave(true);
1299 }
1300 }
1301 return regNO;
1302 }
1303
1304 /* Determine if live interval crosses the call */
NeedSaveAcrossCall(LiveInterval & li)1305 bool LSRALinearScanRegAllocator::NeedSaveAcrossCall(LiveInterval &li)
1306 {
1307 bool saveAcrossCall = false;
1308 for (uint32 callInsnID : callQueue) {
1309 if (callInsnID > li.GetLastUse()) {
1310 break;
1311 }
1312 if (callInsnID < li.GetFirstDef()) {
1313 continue;
1314 }
1315 /* Need to spill/fill around this call */
1316 for (auto range : li.GetRanges()) {
1317 uint32 start;
1318 if (range.GetEhStart() != 0 && range.GetEhStart() < range.GetStart()) {
1319 start = range.GetEhStart();
1320 } else {
1321 start = range.GetStart();
1322 }
1323 if (callInsnID >= start && callInsnID < range.GetEnd()) {
1324 saveAcrossCall = true;
1325 break;
1326 }
1327 }
1328 if (saveAcrossCall) {
1329 break;
1330 }
1331 }
1332 if (needDump) {
1333 if (saveAcrossCall) {
1334 LogInfo::MapleLogger() << "\t\tlive interval crosses a call\n";
1335 } else {
1336 LogInfo::MapleLogger() << "\t\tlive interval does not cross a call\n";
1337 }
1338 }
1339 return saveAcrossCall;
1340 }
1341
1342 /* Return a phys register number for the live interval. */
FindAvailablePhyReg(LiveInterval & li)1343 uint32 LSRALinearScanRegAllocator::FindAvailablePhyReg(LiveInterval &li)
1344 {
1345 if (fastAlloc) {
1346 return FindAvailablePhyRegByFastAlloc(li);
1347 }
1348 uint32 regNO = 0;
1349 if (li.GetRegType() == kRegTyInt) {
1350 regNO = FindAvailablePhyReg(li, true);
1351 } else {
1352 DEBUG_ASSERT(li.GetRegType() == kRegTyFloat, "impossible register type");
1353 regNO = FindAvailablePhyReg(li, false);
1354 }
1355 return regNO;
1356 }
1357
1358 /* Spill and reload for caller saved registers. */
InsertCallerSave(Insn & insn,Operand & opnd,bool isDef,uint32 spillIdx)1359 void LSRALinearScanRegAllocator::InsertCallerSave(Insn &insn, Operand &opnd, bool isDef, uint32 spillIdx)
1360 {
1361 auto ®Opnd = static_cast<RegOperand &>(opnd);
1362 uint32 vRegNO = regOpnd.GetRegisterNumber();
1363 if (vRegNO >= liveIntervalsArray.size()) {
1364 CHECK_FATAL(false, "index out of range in LSRALinearScanRegAllocator::InsertCallerSave");
1365 }
1366 LiveInterval *rli = liveIntervalsArray[vRegNO];
1367 auto regType = rli->GetRegType();
1368
1369 if (!isDef && rli->IsNoNeedReloadPosition(insn.GetId())) {
1370 if (needDump) {
1371 LogInfo::MapleLogger() << "InsertCallerSave R" << rli->GetRegNO() << " assigned " << rli->GetAssignedReg()
1372 << " skipping\n";
1373 }
1374 return;
1375 }
1376
1377 if (!rli->IsShouldSave()) {
1378 return;
1379 }
1380
1381 uint32 regSize = rli->GetSpillSize();
1382 PrimType spType;
1383
1384 if (regType == kRegTyInt) {
1385 spType = (regSize <= k32BitSize) ? PTY_u32 : PTY_u64;
1386 } else {
1387 spType = (regSize <= k32BitSize) ? PTY_f32 : PTY_f64;
1388 }
1389
1390 if (needDump) {
1391 LogInfo::MapleLogger() << "InsertCallerSave R" << vRegNO << (isDef ? " def" : " use") << "\n";
1392 }
1393
1394 if (!isDef && !rli->IsCallerSpilled()) {
1395 LogInfo::MapleLogger() << "WARNING: " << vRegNO << " caller restore without spill in bb "
1396 << insn.GetBB()->GetId() << " : " << cgFunc->GetName() << "\n";
1397 }
1398 rli->SetIsCallerSpilled(true);
1399
1400 MemOperand *memOpnd = nullptr;
1401 RegOperand *phyOpnd = nullptr;
1402
1403 phyOpnd = regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(rli->GetAssignedReg()), regSize, regType);
1404 std::string comment;
1405 bool isOutOfRange = false;
1406 auto tmpReg = static_cast<regno_t>(intSpillRegSet[spillIdx] + firstIntReg);
1407 if (isDef) {
1408 Insn *nextInsn = insn.GetNext();
1409 memOpnd = GetSpillMem(vRegNO, true, insn, tmpReg, isOutOfRange, regSize);
1410 Insn *stInsn = regInfo->BuildStrInsn(regSize, spType, *phyOpnd, *memOpnd);
1411 comment = " SPILL for caller_save " + std::to_string(vRegNO);
1412 ++callerSaveSpillCount;
1413 if (rli->GetLastUse() == insn.GetId()) {
1414 regInfo->FreeSpillRegMem(vRegNO);
1415 comment += " end";
1416 }
1417 stInsn->SetComment(comment);
1418 if (isOutOfRange && nextInsn != nullptr) {
1419 insn.GetBB()->InsertInsnBefore(*nextInsn, *stInsn);
1420 } else if (isOutOfRange && nextInsn == nullptr) {
1421 insn.GetBB()->AppendInsn(*stInsn);
1422 } else {
1423 insn.GetBB()->InsertInsnAfter(insn, *stInsn);
1424 }
1425 } else {
1426 memOpnd = GetSpillMem(vRegNO, false, insn, tmpReg, isOutOfRange, regSize);
1427 Insn *ldInsn = regInfo->BuildLdrInsn(regSize, spType, *phyOpnd, *memOpnd);
1428 comment = " RELOAD for caller_save " + std::to_string(vRegNO);
1429 ++callerSaveReloadCount;
1430 if (rli->GetLastUse() == insn.GetId()) {
1431 regInfo->FreeSpillRegMem(vRegNO);
1432 comment += " end";
1433 }
1434 ldInsn->SetComment(comment);
1435 insn.GetBB()->InsertInsnBefore(insn, *ldInsn);
1436 }
1437 }
1438
GetSpillMem(uint32 vRegNO,bool isDest,Insn & insn,regno_t regNO,bool & isOutOfRange,uint32 bitSize) const1439 MemOperand *LSRALinearScanRegAllocator::GetSpillMem(uint32 vRegNO, bool isDest, Insn &insn, regno_t regNO,
1440 bool &isOutOfRange, uint32 bitSize) const
1441 {
1442 MemOperand *memOpnd = regInfo->GetOrCreatSpillMem(vRegNO, bitSize);
1443 return regInfo->AdjustMemOperandIfOffsetOutOfRange(memOpnd, std::make_pair(vRegNO, regNO), isDest, insn,
1444 isOutOfRange);
1445 }
1446
1447 /*
1448 * Generate spill/reload for an operand.
1449 * spill_idx : one of 3 phys regs set aside for the purpose of spills.
1450 */
SpillOperand(Insn & insn,Operand & opnd,bool isDef,uint32 spillIdx)1451 void LSRALinearScanRegAllocator::SpillOperand(Insn &insn, Operand &opnd, bool isDef, uint32 spillIdx)
1452 {
1453 /*
1454 * Insert spill (def) and fill (use) instructions for the operand.
1455 * Keep track of the 'slot' (base 0). The actual slot on the stack
1456 * will be some 'base_slot_offset' + 'slot' off FP.
1457 * For simplification, entire 64bit register is spilled/filled.
1458 *
1459 * For example, a virtual register home 'slot' on the stack is location 5.
1460 * This represents a 64bit slot (8bytes). The base_slot_offset
1461 * from the base 'slot' determined by whoever is added, off FP.
1462 * stack address is ( FP - (5 * 8) + base_slot_offset )
1463 * So the algorithm is simple, for each virtual register that is not
1464 * allocated, it has to have a home address on the stack (a slot).
1465 * A class variable is used, start from 0, increment by 1.
1466 * Since LiveInterval already represent unique regNO information,
1467 * just add a slot number to it. Subsequent reference to a regNO
1468 * will either get an allocated physical register or a slot number
1469 * for computing the stack location.
1470 *
1471 * This function will also determine the operand to be a def or use.
1472 * For def, spill instruction(s) is appended after the insn.
1473 * For use, spill instruction(s) is prepended before the insn.
1474 * Use FP - (slot# *8) for now. Will recompute if base_slot_offset
1475 * is not 0.
1476 *
1477 * The total number of slots used will be used to compute the stack
1478 * frame size. This will require some interface external to LSRA.
1479 *
1480 * For normal instruction, two spill regs should be enough. The caller
1481 * controls which ones to use.
1482 * For more complex operations, need to break down the instruction.
1483 * eg. store v1 -> [v2 + v3] // 3 regs needed
1484 * => p1 <- v2 // address part 1
1485 * p2 <- v3 // address part 2
1486 * p1 <- p1 + p2 // freeing up p2
1487 * p2 <- v1
1488 * store p2 -> [p1]
1489 * or we can allocate more registers to the spill register set
1490 * For store multiple, need to break it down into two or more instr.
1491 */
1492 auto ®Opnd = static_cast<RegOperand &>(opnd);
1493 uint32 regNO = regOpnd.GetRegisterNumber();
1494 if (needDump) {
1495 LogInfo::MapleLogger() << "SpillOperand " << regNO << "\n";
1496 }
1497
1498 regno_t spReg;
1499 PrimType spType;
1500 CHECK_FATAL(regNO < liveIntervalsArray.size(), "index out of range in LSRALinearScanRegAllocator::SpillOperand");
1501 LiveInterval *li = liveIntervalsArray[regNO];
1502 DEBUG_ASSERT(!li->IsShouldSave(), "SpillOperand: Should not be caller");
1503 uint32 regSize = li->GetSpillSize();
1504 RegType regType = regOpnd.GetRegisterType();
1505
1506 if (li->GetRegType() == kRegTyInt) {
1507 DEBUG_ASSERT((spillIdx < intSpillRegSet.size()), "SpillOperand: ran out int spill reg");
1508 spReg = intSpillRegSet[spillIdx] + firstIntReg;
1509 spType = (regSize <= k32BitSize) ? PTY_u32 : PTY_u64;
1510 } else if (li->GetRegType() == kRegTyFloat) {
1511 DEBUG_ASSERT((spillIdx < fpSpillRegSet.size()), "SpillOperand: ran out fp spill reg");
1512 spReg = fpSpillRegSet[spillIdx] + firstFpReg;
1513 spType = (regSize <= k32BitSize) ? PTY_f32 : PTY_f64;
1514 } else {
1515 CHECK_FATAL(false, "SpillOperand: Should be int or float type");
1516 }
1517 auto tmpReg = static_cast<regno_t>(intSpillRegSet[spillIdx] + firstIntReg);
1518
1519 bool isOutOfRange = false;
1520 auto *phyOpnd = regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(spReg), regSize, regType);
1521 li->SetAssignedReg(phyOpnd->GetRegisterNumber());
1522
1523 MemOperand *memOpnd = nullptr;
1524 if (isDef) {
1525 /*
1526 * Need to assign spReg (one of the two spill reg) to the destination of the insn.
1527 * spill_vreg <- opn1 op opn2
1528 * to
1529 * spReg <- opn1 op opn2
1530 * store spReg -> spillmem
1531 */
1532 li->SetStackSlot(kSpilled);
1533
1534 ++spillCount;
1535 Insn *nextInsn = insn.GetNext();
1536 memOpnd = GetSpillMem(regNO, true, insn, tmpReg, isOutOfRange, regSize);
1537 Insn *stInsn = regInfo->BuildStrInsn(regSize, spType, *phyOpnd, *memOpnd);
1538 if (li->GetLastUse() == insn.GetId()) {
1539 regInfo->FreeSpillRegMem(regNO);
1540 }
1541 if (CGOptions::GetInstance().GenerateVerboseCG()) {
1542 std::string comment = " SPILL vreg:" + std::to_string(regNO);
1543 stInsn->SetComment(comment);
1544 }
1545
1546 if (isOutOfRange && nextInsn != nullptr) {
1547 insn.GetBB()->InsertInsnBefore(*nextInsn, *stInsn);
1548 } else if (isOutOfRange && nextInsn == nullptr) {
1549 insn.GetBB()->AppendInsn(*stInsn);
1550 } else {
1551 insn.GetBB()->InsertInsnAfter(insn, *stInsn);
1552 }
1553 } else {
1554 /* Here, reverse of isDef, change either opn1 or opn2 to the spReg. */
1555 if (li->GetStackSlot() == 0xFFFFFFFF) {
1556 LogInfo::MapleLogger() << "WARNING: " << regNO << " assigned " << li->GetAssignedReg()
1557 << " restore without spill in bb " << insn.GetBB()->GetId() << " : "
1558 << cgFunc->GetName() << "\n";
1559 }
1560 ++reloadCount;
1561 memOpnd = GetSpillMem(regNO, false, insn, tmpReg, isOutOfRange, regSize);
1562 Insn *ldInsn = regInfo->BuildLdrInsn(regSize, spType, *phyOpnd, *memOpnd);
1563 if (li->GetLastUse() == insn.GetId()) {
1564 regInfo->FreeSpillRegMem(regNO);
1565 }
1566 if (CGOptions::GetInstance().GenerateVerboseCG()) {
1567 std::string comment = " RELOAD vreg" + std::to_string(regNO);
1568 ldInsn->SetComment(comment);
1569 }
1570 insn.GetBB()->InsertInsnBefore(insn, *ldInsn);
1571 }
1572 }
1573
1574 /* find the lowest li that meets the constraints related to li0 form current active */
FindLowestPrioInActive(LiveInterval * & targetLi,LiveInterval * li0,RegType regType)1575 void LSRALinearScanRegAllocator::FindLowestPrioInActive(LiveInterval *&targetLi, LiveInterval *li0, RegType regType)
1576 {
1577 std::map<regno_t, uint32> activeLiAssignedRegCnt;
1578 for (auto *li : active) {
1579 if (li->GetAssignedReg() != 0) {
1580 ++activeLiAssignedRegCnt[li->GetAssignedReg()];
1581 }
1582 }
1583
1584 constexpr float kBiggestFloat = 1000.0;
1585 float lowestPrio = (li0 != nullptr ? li0->GetPriority() : kBiggestFloat);
1586 bool found = false;
1587 bool hintCalleeSavedReg = li0 && NeedSaveAcrossCall(*li0);
1588 MapleSet<LiveInterval *, ActiveCmp>::iterator lowestIt;
1589 for (auto it = active.begin(); it != active.end(); ++it) {
1590 LiveInterval *li = static_cast<LiveInterval *>(*it);
1591 regno_t regNO = li->GetAssignedReg();
1592 /* 1. Basic Constraints */
1593 if (li->GetPriority() >= lowestPrio || li->GetRegType() != regType || li->GetLiParent() || li->GetLiChild()) {
1594 continue;
1595 }
1596 /* 2. If li is pre-assigned to Physical register primitively, ignore it. */
1597 if (!regInfo->IsVirtualRegister(li->GetRegNO())) {
1598 continue;
1599 }
1600 /* 3. CalleeSavedReg is preferred here. If li is assigned to Non-CalleeSavedReg, ignore it. */
1601 if (hintCalleeSavedReg && !regInfo->IsCalleeSavedReg(regNO - firstIntReg)) {
1602 continue;
1603 }
1604 /* 4. if regNO is assigned to multiple active li, ignore it. */
1605 if (activeLiAssignedRegCnt[regNO] > 1) {
1606 continue;
1607 }
1608 lowestPrio = li->GetPriority();
1609 lowestIt = it;
1610 found = true;
1611 }
1612 if (found) {
1613 targetLi = *lowestIt;
1614 itFinded = lowestIt;
1615 }
1616 return;
1617 }
1618
1619 /* Set a vreg in live interval as being marked for spill. */
SetLiSpill(LiveInterval & li)1620 void LSRALinearScanRegAllocator::SetLiSpill(LiveInterval &li)
1621 {
1622 uint32 regNO = li.GetRegNO();
1623 if (needDump) {
1624 LogInfo::MapleLogger() << "SetLiSpill " << regNO;
1625 LogInfo::MapleLogger() << "(" << li.GetFirstAcrossedCall();
1626 LogInfo::MapleLogger() << ", refCount " << li.GetRefCount() << ")\n";
1627 }
1628 li.SetStackSlot(kSpilled);
1629 li.SetShouldSave(false);
1630 }
1631
HandleSpillForLi(LiveInterval & li)1632 uint32 LSRALinearScanRegAllocator::HandleSpillForLi(LiveInterval &li)
1633 {
1634 /* choose the lowest priority li to spill */
1635 RegType regType = li.GetRegType();
1636 LiveInterval *spillLi = nullptr;
1637 FindLowestPrioInActive(spillLi, &li, regType);
1638
1639 /*
1640 * compare spill_li with current li
1641 * spill_li is null and li->SetStackSlot(Spilled) when the li is spilled due to LiveIntervalAnalysis
1642 */
1643 if (!li.IsMustAllocate()) {
1644 if (spillLi == nullptr || li.GetStackSlot() == kSpilled || li.GetRefCount() <= spillLi->GetRefCount() ||
1645 freeUntilPos[spillLi->GetAssignedReg()] < li.GetLastUse()) {
1646 /* spill current li */
1647 if (needDump) {
1648 LogInfo::MapleLogger() << "Flexible Spill: still spill " << li.GetRegNO() << ".\n";
1649 }
1650 SetLiSpill(li);
1651 return 0;
1652 }
1653 }
1654 DEBUG_ASSERT(spillLi != nullptr, "spillLi is null in LSRALinearScanRegAllocator::HandleSpillForLi");
1655
1656 uint32 newRegNO = spillLi->GetAssignedReg();
1657 DEBUG_ASSERT(freeUntilPos[newRegNO] >= li.GetLastUse(), "phyReg has small free range.");
1658
1659 if (needDump) {
1660 LogInfo::MapleLogger() << "Flexible Spill: " << spillLi->GetRegNO() << " instead of " << li.GetRegNO() << ".\n";
1661 PrintLiveInterval(*spillLi, "TO spill: ");
1662 PrintLiveInterval(li, "Instead of: ");
1663 }
1664
1665 li.SetAssignedReg(newRegNO);
1666 if (!regInfo->IsCalleeSavedReg(newRegNO) && NeedSaveAcrossCall(li)) {
1667 li.SetShouldSave(true);
1668 }
1669
1670 /* spill this live interval */
1671 (void)active.erase(itFinded);
1672 SetLiSpill(*spillLi);
1673 spillLi->SetAssignedReg(0);
1674
1675 (void)active.insert(&li);
1676 return newRegNO;
1677 }
1678
FindAvailablePhyRegAcrossCall(LiveInterval & li,bool isIntReg)1679 uint32 LSRALinearScanRegAllocator::FindAvailablePhyRegAcrossCall(LiveInterval &li, bool isIntReg)
1680 {
1681 uint32 callerRegMask = isIntReg ? intCallerMask : fpCallerMask;
1682 uint32 calleeRegMask = isIntReg ? intCalleeMask : fpCalleeMask;
1683 regno_t reg0 = isIntReg ? firstIntReg : firstFpReg;
1684
1685 /* call in live interval, use callee if available */
1686 auto bestReg = GetRegFromMask(calleeRegMask, reg0, li);
1687 if (bestReg != 0 && freeUntilPos[bestReg] >= li.GetLastUse()) {
1688 li.SetShouldSave(false);
1689 return bestReg;
1690 }
1691 /* can be optimize multi use between calls rather than in bb */
1692 if (bestReg == 0 || li.IsMultiUseInBB()) {
1693 auto secondReg = GetRegFromMask(callerRegMask, reg0, li);
1694 if (freeUntilPos[secondReg] >= li.GetLastUse()) {
1695 li.SetShouldSave(true);
1696 return secondReg;
1697 }
1698 }
1699 return 0;
1700 }
1701
FindAvailablePhyReg(LiveInterval & li,bool isIntReg)1702 uint32 LSRALinearScanRegAllocator::FindAvailablePhyReg(LiveInterval &li, bool isIntReg)
1703 {
1704 /* See if register is live accross a call */
1705 bool liAcrossCall = NeedSaveAcrossCall(li);
1706 if (liAcrossCall && !regInfo->IsPrefCallerSaveRegs(li.GetRegType(), li.GetSpillSize())) {
1707 return FindAvailablePhyRegAcrossCall(li, isIntReg);
1708 }
1709
1710 uint32 callerRegMask = isIntReg ? intCallerMask : fpCallerMask;
1711 uint32 calleeRegMask = isIntReg ? intCalleeMask : fpCalleeMask;
1712 regno_t reg0 = isIntReg ? firstIntReg : firstFpReg;
1713 auto bestReg = GetRegFromMask(callerRegMask, reg0, li);
1714 if (bestReg == 0) {
1715 bestReg = GetRegFromMask(calleeRegMask, reg0, li);
1716 } else if (freeUntilPos[bestReg] < li.GetLastUse()) {
1717 auto secondReg = GetRegFromMask(calleeRegMask, reg0, li);
1718 if (secondReg != 0) {
1719 bestReg = (freeUntilPos[bestReg] > freeUntilPos[secondReg]) ? bestReg : secondReg;
1720 }
1721 }
1722
1723 if (bestReg != 0 && freeUntilPos[bestReg] < li.GetLastUse()) {
1724 DEBUG_ASSERT(freeUntilPos[bestReg] != 0, "impossible");
1725 bestReg = 0;
1726 }
1727 /* try to fill in holes */
1728 /* try first split if no hole exists */
1729 return bestReg;
1730 }
1731
1732 /* Shell function to find a physical register for an operand. */
AssignPhysRegs(LiveInterval & li)1733 uint32 LSRALinearScanRegAllocator::AssignPhysRegs(LiveInterval &li)
1734 {
1735 if (spillAll && !li.IsMustAllocate()) {
1736 return 0;
1737 }
1738
1739 /* pre spilled: */
1740 if (li.GetStackSlot() != 0xFFFFFFFF && !li.IsMustAllocate()) {
1741 return 0;
1742 }
1743
1744 if (needDump) {
1745 uint32 activeSz = active.size();
1746 LogInfo::MapleLogger() << "\tAssignPhysRegs-active_sz " << activeSz << "\n";
1747 }
1748
1749 uint32 regNO = FindAvailablePhyReg(li);
1750 if (regNO != 0) {
1751 li.SetAssignedReg(regNO);
1752 if (regInfo->IsCalleeSavedReg(regNO)) {
1753 if (needDump) {
1754 LogInfo::MapleLogger() << "\tCallee-save register for save/restore in prologue/epilogue: " << regNO
1755 << "\n";
1756 }
1757 cgFunc->AddtoCalleeSaved(regNO);
1758 }
1759 }
1760 return regNO;
1761 }
1762
AssignPhysRegsForLi(LiveInterval & li)1763 void LSRALinearScanRegAllocator::AssignPhysRegsForLi(LiveInterval &li)
1764 {
1765 uint32 newRegNO = AssignPhysRegs(li);
1766 if (newRegNO == 0) {
1767 newRegNO = HandleSpillForLi(li);
1768 }
1769
1770 if (newRegNO != 0) {
1771 (void)active.insert(&li);
1772 }
1773 }
1774
1775 /* Replace Use-Def Opnd */
GetReplaceUdOpnd(Insn & insn,Operand & opnd,uint32 & spillIdx)1776 RegOperand *LSRALinearScanRegAllocator::GetReplaceUdOpnd(Insn &insn, Operand &opnd, uint32 &spillIdx)
1777 {
1778 if (!opnd.IsRegister()) {
1779 return nullptr;
1780 }
1781 const auto *regOpnd = static_cast<RegOperand *>(&opnd);
1782
1783 uint32 vRegNO = regOpnd->GetRegisterNumber();
1784 RegType regType = regOpnd->GetRegisterType();
1785 if (regType == kRegTyCc || regType == kRegTyVary) {
1786 return nullptr;
1787 }
1788 if (regInfo->IsUnconcernedReg(vRegNO) || !regInfo->IsVirtualRegister(vRegNO)) {
1789 return nullptr;
1790 }
1791
1792 DEBUG_ASSERT(vRegNO < liveIntervalsArray.size(),
1793 "index out of range of MapleVector in LSRALinearScanRegAllocator::GetReplaceUdOpnd");
1794 LiveInterval *li = liveIntervalsArray[vRegNO];
1795
1796 regno_t regNO = li->GetAssignedReg();
1797 if (regInfo->IsCalleeSavedReg(regNO)) {
1798 cgFunc->AddtoCalleeSaved(regNO);
1799 }
1800
1801 if (li->IsShouldSave()) {
1802 InsertCallerSave(insn, opnd, false, spillIdx);
1803 InsertCallerSave(insn, opnd, true, spillIdx);
1804 } else if (li->GetStackSlot() == kSpilled) {
1805 SpillOperand(insn, opnd, false, spillIdx);
1806 SpillOperand(insn, opnd, true, spillIdx);
1807 ++spillIdx;
1808 }
1809 RegOperand *phyOpnd =
1810 regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(li->GetAssignedReg()), opnd.GetSize(), regType);
1811
1812 return phyOpnd;
1813 }
1814
1815 /*
1816 * Create an operand with physical register assigned, or a spill register
1817 * in the case where a physical register cannot be assigned.
1818 */
GetReplaceOpnd(Insn & insn,Operand & opnd,uint32 & spillIdx,bool isDef)1819 RegOperand *LSRALinearScanRegAllocator::GetReplaceOpnd(Insn &insn, Operand &opnd, uint32 &spillIdx, bool isDef)
1820 {
1821 if (!opnd.IsRegister()) {
1822 return nullptr;
1823 }
1824 const auto *regOpnd = static_cast<RegOperand *>(&opnd);
1825
1826 uint32 vRegNO = regOpnd->GetRegisterNumber();
1827 RegType regType = regOpnd->GetRegisterType();
1828 if (regType == kRegTyCc || regType == kRegTyVary) {
1829 return nullptr;
1830 }
1831 if (regInfo->IsUnconcernedReg(vRegNO) || !regInfo->IsVirtualRegister(vRegNO)) {
1832 return nullptr;
1833 }
1834
1835 DEBUG_ASSERT(vRegNO < liveIntervalsArray.size(),
1836 "index out of range of MapleVector in LSRALinearScanRegAllocator::GetReplaceOpnd");
1837 LiveInterval *li = liveIntervalsArray[vRegNO];
1838
1839 regno_t regNO = li->GetAssignedReg();
1840 if (regInfo->IsCalleeSavedReg(regNO)) {
1841 cgFunc->AddtoCalleeSaved(regNO);
1842 }
1843
1844 if (li->IsShouldSave() || li->GetStackSlot() == kSpilled) {
1845 spillIdx = isDef ? 0 : spillIdx;
1846 if (li->IsShouldSave()) {
1847 InsertCallerSave(insn, opnd, isDef, spillIdx);
1848 } else {
1849 SpillOperand(insn, opnd, isDef, spillIdx);
1850 }
1851 if (!isDef) {
1852 ++spillIdx;
1853 }
1854 }
1855 RegOperand *phyOpnd =
1856 regInfo->GetOrCreatePhyRegOperand(static_cast<regno_t>(li->GetAssignedReg()), opnd.GetSize(), regType);
1857
1858 return phyOpnd;
1859 }
1860
UpdateLocalDefWithBBLiveIn(const BB & bb)1861 bool LSRALinearScanRegAllocator::CallerSaveOpt::UpdateLocalDefWithBBLiveIn(const BB &bb)
1862 {
1863 bool changed = false;
1864 auto bbId = bb.GetId();
1865 for (uint32 i = 0; i < callerSaves.size(); ++i) {
1866 auto *li = callerSaves[i];
1867 auto &defLiveIn = localDefLiveIn[i];
1868 auto &defAfterInsn = localDefAfterInsn[i];
1869
1870 if (bb.GetLiveInRegNO().count(li->GetRegNO()) == 0) {
1871 continue;
1872 }
1873 if (defLiveIn.count(bbId) != 0) {
1874 continue;
1875 }
1876 bool isLocalDef = true;
1877 for (auto *pred : bb.GetPreds()) {
1878 if (defAfterInsn.count(pred->GetId()) == 0) {
1879 isLocalDef = false;
1880 break;
1881 }
1882 }
1883 if (isLocalDef) {
1884 defLiveIn.insert(bb.GetId());
1885 defAfterInsn.emplace(bb.GetId(), 0);
1886 changed = true;
1887 }
1888 }
1889 return changed;
1890 }
1891
CollectCallerNoNeedReloadByInsn(const Insn & insn)1892 void LSRALinearScanRegAllocator::CallerSaveOpt::CollectCallerNoNeedReloadByInsn(const Insn &insn)
1893 {
1894 auto curId = insn.GetId();
1895 for (uint32 i = 0; i < callerSaves.size(); ++i) {
1896 auto *li = callerSaves[i];
1897 auto &defLiveIn = localDefLiveIn[i];
1898 auto &defAfterInsn = localDefAfterInsn[i];
1899 auto posRange = li->FindPosRange(curId);
1900 if (posRange == li->GetRanges().end() || posRange->GetStart() > curId) {
1901 continue;
1902 }
1903 if (insn.IsCall()) {
1904 defAfterInsn.erase(insn.GetBB()->GetId());
1905 continue;
1906 }
1907 auto &usePositions = li->GetUsePositions();
1908 if (std::binary_search(usePositions.begin(), usePositions.end(), curId)) {
1909 auto iter = defAfterInsn.find(insn.GetBB()->GetId());
1910 if (iter == defAfterInsn.end() || iter->second > curId) {
1911 defAfterInsn.insert_or_assign(insn.GetBB()->GetId(), curId);
1912 continue;
1913 }
1914 if (iter->second == curId && defLiveIn.count(insn.GetBB()->GetId()) == 0) {
1915 continue;
1916 }
1917 li->AddNoNeedReloadPosition(curId);
1918 }
1919 }
1920 }
1921
Run()1922 void LSRALinearScanRegAllocator::CallerSaveOpt::Run()
1923 {
1924 bool changed;
1925 do {
1926 changed = false;
1927 for (auto *li : callerSaves) {
1928 li->InitRangeFinder();
1929 }
1930 for (BB *bb : bfs->sortedBBs) {
1931 if (!UpdateLocalDefWithBBLiveIn(*bb) && !firstTime) {
1932 continue;
1933 }
1934 changed = true;
1935 FOR_BB_INSNS(insn, bb)
1936 {
1937 if (!insn->IsMachineInstruction() || insn->GetId() == 0) {
1938 continue;
1939 }
1940 CollectCallerNoNeedReloadByInsn(*insn);
1941 }
1942 }
1943 firstTime = false;
1944 } while (changed);
1945 }
1946
FinalizeUseRegisters(Insn & insn,uint32 & spillIdx)1947 void LSRALinearScanRegAllocator::FinalizeUseRegisters(Insn &insn, uint32 &spillIdx)
1948 {
1949 const InsnDesc *md = insn.GetDesc();
1950
1951 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
1952 const OpndDesc *opndDesc = md->GetOpndDes(i);
1953 DEBUG_ASSERT(opndDesc != nullptr, "pointer is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1954 Operand &opnd = insn.GetOperand(i);
1955 if (opndDesc->IsDef() && !opnd.IsMemoryAccessOperand()) {
1956 continue;
1957 }
1958 if (opnd.IsList()) {
1959 auto &listOpnd = static_cast<ListOperand&>(opnd);
1960 auto *newList = &cgFunc->GetOpndBuilder()->CreateList(cgFunc->GetFuncScopeAllocator()->GetMemPool());
1961 for (auto *regOpnd : listOpnd.GetOperands()) {
1962 RegOperand *phyOpnd =
1963 regInfo->IsVirtualRegister(*regOpnd) ? GetReplaceOpnd(insn, *regOpnd, spillIdx, false) : regOpnd;
1964 newList->PushOpnd(*phyOpnd);
1965 }
1966 insn.SetOperand(i, *newList);
1967 } else if (opnd.IsMemoryAccessOperand()) {
1968 auto *memOpnd = static_cast<MemOperand*>(static_cast<MemOperand&>(opnd).Clone(*cgFunc->GetMemoryPool()));
1969 DEBUG_ASSERT(memOpnd != nullptr, "memopnd is null in LSRALinearScanRegAllocator::FinalizeRegisters");
1970 insn.SetOperand(i, *memOpnd);
1971 Operand *base = memOpnd->GetBaseRegister();
1972 Operand *offset = memOpnd->GetIndexRegister();
1973 if (base != nullptr) {
1974 auto *phyOpnd = GetReplaceOpnd(insn, *base, spillIdx, false);
1975 if (phyOpnd != nullptr) {
1976 memOpnd->SetBaseRegister(*phyOpnd);
1977 }
1978 }
1979 if (offset != nullptr) {
1980 auto *phyOpnd = GetReplaceOpnd(insn, *offset, spillIdx, false);
1981 if (phyOpnd != nullptr) {
1982 memOpnd->SetIndexRegister(*phyOpnd);
1983 }
1984 }
1985 } else {
1986 auto *phyOpnd = GetReplaceOpnd(insn, opnd, spillIdx, false);
1987 if (phyOpnd != nullptr) {
1988 insn.SetOperand(i, *phyOpnd);
1989 }
1990 }
1991 }
1992 }
1993
FinalizeUseDefRegisters(Insn & insn,uint32 & spillIdx)1994 void LSRALinearScanRegAllocator::FinalizeUseDefRegisters(Insn &insn, uint32 &spillIdx)
1995 {
1996 const InsnDesc *md = insn.GetDesc();
1997
1998 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
1999 const OpndDesc *opndDesc = md->GetOpndDes(i);
2000 DEBUG_ASSERT(opndDesc != nullptr, "pointer is null in LSRALinearScanRegAllocator::FinalizeRegisters");
2001 Operand &opnd = insn.GetOperand(i);
2002 bool isUseDef = opndDesc->IsRegDef() && opndDesc->IsRegUse();
2003 if (!isUseDef) {
2004 continue;
2005 }
2006 RegOperand *phyOpnd = GetReplaceUdOpnd(insn, opnd, spillIdx);
2007 if (phyOpnd != nullptr) {
2008 insn.SetOperand(i, *phyOpnd);
2009 }
2010 }
2011 }
2012
FinalizeDefRegisters(Insn & insn,uint32 & spillIdx)2013 void LSRALinearScanRegAllocator::FinalizeDefRegisters(Insn &insn, uint32 &spillIdx)
2014 {
2015 const InsnDesc *md = insn.GetDesc();
2016
2017 for (uint32 i = 0; i < insn.GetOperandSize(); ++i) {
2018 const OpndDesc *opndDesc = md->GetOpndDes(i);
2019 DEBUG_ASSERT(opndDesc != nullptr, "pointer is null in LSRALinearScanRegAllocator::FinalizeRegisters");
2020 Operand &opnd = insn.GetOperand(i);
2021 if (opndDesc->IsUse() || opnd.IsMemoryAccessOperand()) {
2022 continue;
2023 }
2024 if (opnd.IsRegister()) {
2025 RegOperand *phyOpnd = GetReplaceOpnd(insn, opnd, spillIdx, true);
2026 if (phyOpnd != nullptr) {
2027 insn.SetOperand(i, *phyOpnd);
2028 }
2029 } else if (opnd.IsList()) {
2030 auto &listOpnd = static_cast<ListOperand&>(opnd);
2031 auto *newList = &cgFunc->GetOpndBuilder()->CreateList(cgFunc->GetFuncScopeAllocator()->GetMemPool());
2032 for (auto *regOpnd : listOpnd.GetOperands()) {
2033 RegOperand *phyOpnd =
2034 regInfo->IsVirtualRegister(*regOpnd) ? GetReplaceOpnd(insn, *regOpnd, spillIdx, true) : regOpnd;
2035 newList->PushOpnd(*phyOpnd);
2036 }
2037 insn.SetOperand(i, *newList);
2038 }
2039 }
2040 }
FinalizeFreeReferenceSpillStack(Insn & insn)2041 void LSRALinearScanRegAllocator::FinalizeFreeReferenceSpillStack(Insn &insn)
2042 {
2043 if (!insn.IsCall()) {
2044 return;
2045 }
2046
2047 auto freeSpillStack = [this, insn](regno_t regno) {
2048 auto *li = liveIntervalsArray[regno];
2049 if (li != nullptr && insn.GetId() >= li->GetLastUse()) {
2050 regInfo->FreeSpillRegMem(regno);
2051 }
2052 };
2053
2054 // referencemap info
2055 for (auto regNO : insn.GetStackMapLiveIn()) {
2056 freeSpillStack(regNO);
2057 auto iter = derivedRef2Base.find(regNO);
2058 if (iter != derivedRef2Base.end()) {
2059 freeSpillStack(iter->second->GetRegisterNumber());
2060 }
2061 }
2062 // deoptinfo
2063 if (insn.GetStackMap() == nullptr) {
2064 return;
2065 }
2066 for (auto [_, opnd] : insn.GetStackMap()->GetDeoptInfo().GetDeoptBundleInfo()) {
2067 if (!opnd->IsRegister()) {
2068 continue;
2069 }
2070 freeSpillStack(static_cast<RegOperand*>(opnd)->GetRegisterNumber());
2071 }
2072 }
2073
2074 /* Iterate through all instructions and change the vreg to preg. */
FinalizeRegisters()2075 void LSRALinearScanRegAllocator::FinalizeRegisters()
2076 {
2077 RA_TIMER_REGISTER(lsra, "LSRA FinalizeRegisters");
2078 CallerSaveOpt opt(liveIntervalsArray, bfs);
2079 opt.Run();
2080 for (BB *bb : bfs->sortedBBs) {
2081 FOR_BB_INSNS(insn, bb)
2082 {
2083 if (!insn->IsMachineInstruction() || insn->GetId() == 0) {
2084 continue;
2085 }
2086
2087 uint32 spillIdx = 0;
2088 // Handle source(use) opernads first
2089 FinalizeUseRegisters(*insn, spillIdx);
2090
2091 // Handle ud(use-def) opernads
2092 FinalizeUseDefRegisters(*insn, spillIdx);
2093
2094 // Handle dest(def) opernads last
2095 FinalizeDefRegisters(*insn, spillIdx);
2096
2097 // Free spill stack for reference reg
2098 FinalizeFreeReferenceSpillStack(*insn);
2099 }
2100 }
2101 }
2102
SetStackMapDerivedInfo()2103 void LSRALinearScanRegAllocator::SetStackMapDerivedInfo()
2104 {
2105 for (BB *bb : bfs->sortedBBs) {
2106 FOR_BB_INSNS(insn, bb) {
2107 if (!insn->IsMachineInstruction()) {
2108 continue;
2109 }
2110 const InsnDesc *md = insn->GetDesc();
2111 for (uint32 i = 0; i < insn->GetOperandSize(); ++i) {
2112 if (!md->GetOpndDes(i)->IsRegDef()) {
2113 continue;
2114 }
2115 auto ®Opnd = static_cast<RegOperand&>(insn->GetOperand(i));
2116 if (regOpnd.GetBaseRefOpnd() != nullptr) {
2117 // set the base reference of derived reference for stackmap
2118 derivedRef2Base[regOpnd.GetRegisterNumber()] = regOpnd.GetBaseRefOpnd();
2119 }
2120 }
2121 }
2122 }
2123 }
2124
CollectReferenceMap()2125 void LSRALinearScanRegAllocator::CollectReferenceMap()
2126 {
2127 RA_TIMER_REGISTER(lsra, "LSRA CollectReferenceMap");
2128 const auto &referenceMapInsns = cgFunc->GetStackMapInsns();
2129 if (needDump) {
2130 LogInfo::MapleLogger() << "===========reference map stack info================\n";
2131 }
2132
2133 for (auto *insn : referenceMapInsns) {
2134 for (auto regNO : insn->GetStackMapLiveIn()) {
2135 if (!cgFunc->IsRegReference(regNO)) {
2136 continue;
2137 }
2138
2139 auto *li = liveIntervalsArray[regNO];
2140 if (li == nullptr) {
2141 continue;
2142 }
2143
2144 if (li->IsShouldSave() || li->GetStackSlot() == kSpilled) {
2145 auto itr = derivedRef2Base.find(regNO);
2146 if (itr != derivedRef2Base.end()) {
2147 auto baseRegNum = (itr->second)->GetRegisterNumber();
2148 MemOperand *baseRegMemOpnd = cgFunc->GetOrCreatSpillMem(baseRegNum, k64BitSize);
2149 int64 baseRefMemoffset = baseRegMemOpnd->GetOffsetImmediate()->GetOffsetValue();
2150 insn->GetStackMap()->GetReferenceMap().ReocordStackRoots(baseRefMemoffset);
2151 if (needDump) {
2152 LogInfo::MapleLogger() << "--------insn id: " << insn->GetId()
2153 << " base regNO: " << baseRegNum << " offset: "
2154 << baseRefMemoffset << std::endl;
2155 }
2156 }
2157 MemOperand *memOperand = cgFunc->GetOrCreatSpillMem(regNO, k64BitSize);
2158 int64 offset = memOperand->GetOffsetImmediate()->GetOffsetValue();
2159 insn->GetStackMap()->GetReferenceMap().ReocordStackRoots(offset);
2160 if (itr == derivedRef2Base.end()) {
2161 insn->GetStackMap()->GetReferenceMap().ReocordStackRoots(offset);
2162 }
2163 if (needDump) {
2164 LogInfo::MapleLogger() << "--------insn id: " << insn->GetId() << " regNO: " << regNO
2165 << " offset: " << offset << std::endl;
2166 }
2167 } else {
2168 // li->GetAssignedReg - R0/RAX?
2169 CHECK_FATAL(false, "not support currently");
2170 insn->GetStackMap()->GetReferenceMap().ReocordRegisterRoots(li->GetAssignedReg());
2171 }
2172 }
2173 }
2174
2175 if (needDump) {
2176 LogInfo::MapleLogger() << "===========reference map stack info end================\n";
2177 }
2178 if (needDump) {
2179 LogInfo::MapleLogger() << "===========reference map info================\n";
2180 for (auto *insn : referenceMapInsns) {
2181 LogInfo::MapleLogger() << " referenceMap insn: ";
2182 insn->Dump();
2183 insn->GetStackMap()->GetReferenceMap().Dump();
2184 }
2185 }
2186 }
2187
SolveRegOpndDeoptInfo(const RegOperand & regOpnd,DeoptInfo & deoptInfo,int32 deoptVregNO) const2188 void LSRALinearScanRegAllocator::SolveRegOpndDeoptInfo(const RegOperand ®Opnd, DeoptInfo &deoptInfo,
2189 int32 deoptVregNO) const
2190 {
2191 if (!regInfo->IsVirtualRegister(regOpnd)) {
2192 // Get Register No
2193 deoptInfo.RecordDeoptVreg2LocationInfo(deoptVregNO, LocationInfo({kInRegister, 0}));
2194 return;
2195 }
2196 // process virtual RegOperand
2197 regno_t vRegNO = regOpnd.GetRegisterNumber();
2198 LiveInterval *li = liveIntervalsArray[vRegNO];
2199 if (li->IsShouldSave() || li->GetStackSlot() == kSpilled) {
2200 MemOperand *memOpnd = cgFunc->GetOrCreatSpillMem(vRegNO, regOpnd.GetSize());
2201 SolveMemOpndDeoptInfo(*(static_cast<const MemOperand *>(memOpnd)), deoptInfo, deoptVregNO);
2202 } else {
2203 // Get Register NO
2204 deoptInfo.RecordDeoptVreg2LocationInfo(deoptVregNO, LocationInfo({kInRegister, li->GetAssignedReg()}));
2205 }
2206 }
2207
SolveMemOpndDeoptInfo(const MemOperand & memOpnd,DeoptInfo & deoptInfo,int32 deoptVregNO) const2208 void LSRALinearScanRegAllocator::SolveMemOpndDeoptInfo(const MemOperand &memOpnd, DeoptInfo &deoptInfo,
2209 int32 deoptVregNO) const
2210 {
2211 int64 offset = memOpnd.GetOffsetImmediate()->GetOffsetValue();
2212 deoptInfo.RecordDeoptVreg2LocationInfo(deoptVregNO, LocationInfo({kOnStack, offset}));
2213 }
2214
CollectDeoptInfo()2215 void LSRALinearScanRegAllocator::CollectDeoptInfo()
2216 {
2217 RA_TIMER_REGISTER(lsra, "LSRA CollectDeoptInfo");
2218 const auto referenceMapInsns = cgFunc->GetStackMapInsns();
2219 for (auto *insn : referenceMapInsns) {
2220 auto &deoptInfo = insn->GetStackMap()->GetDeoptInfo();
2221 const auto &deoptBundleInfo = deoptInfo.GetDeoptBundleInfo();
2222 if (deoptBundleInfo.empty()) {
2223 continue;
2224 }
2225 for (const auto &item : deoptBundleInfo) {
2226 const auto *opnd = item.second;
2227 if (opnd->IsRegister()) {
2228 SolveRegOpndDeoptInfo(*static_cast<const RegOperand *>(opnd), deoptInfo, item.first);
2229 continue;
2230 }
2231 if (opnd->IsImmediate()) {
2232 const auto *immOpnd = static_cast<const ImmOperand *>(opnd);
2233 deoptInfo.RecordDeoptVreg2LocationInfo(item.first, LocationInfo({kInConstValue, immOpnd->GetValue()}));
2234 continue;
2235 }
2236 if (opnd->IsMemoryAccessOperand()) {
2237 SolveMemOpndDeoptInfo(*(static_cast<const MemOperand *>(opnd)), deoptInfo, item.first);
2238 continue;
2239 }
2240 DEBUG_ASSERT(false, "can't reach here!");
2241 }
2242 }
2243 if (needDump) {
2244 LogInfo::MapleLogger() << "===========deopt info================\n";
2245 for (auto *insn : referenceMapInsns) {
2246 LogInfo::MapleLogger() << "---- deoptInfo insn: ";
2247 insn->Dump();
2248 insn->GetStackMap()->GetDeoptInfo().Dump();
2249 }
2250 }
2251 }
2252
SetAllocMode()2253 void LSRALinearScanRegAllocator::SetAllocMode()
2254 {
2255 if (CGOptions::IsFastAlloc()) {
2256 if (CGOptions::GetFastAllocMode() == 0) {
2257 fastAlloc = true;
2258 } else {
2259 spillAll = true;
2260 }
2261 /* In-Range spill range can still be specified (only works with --dump-func=). */
2262 } else if (cgFunc->NumBBs() > CGOptions::GetLSRABBOptSize()) {
2263 /* instruction size is checked in ComputeLieveInterval() */
2264 fastAlloc = true;
2265 }
2266
2267 if (needDump) {
2268 if (fastAlloc) {
2269 LogInfo::MapleLogger() << "fastAlloc mode on\n";
2270 }
2271 if (spillAll) {
2272 LogInfo::MapleLogger() << "spillAll mode on\n";
2273 }
2274 }
2275 }
2276
LinearScanRegAllocator()2277 void LSRALinearScanRegAllocator::LinearScanRegAllocator()
2278 {
2279 RA_TIMER_REGISTER(lsra, "LSRA LinearScanRegAllocator");
2280 if (needDump) {
2281 PrintParamQueue("Initial param queue");
2282 PrintCallQueue("Initial call queue");
2283 }
2284 freeUntilPos.resize(regInfo->GetAllRegNum(), UINT32_MAX);
2285 MapleVector<uint32> initialPosVec(freeUntilPos);
2286 uint32 curInsnID = 0;
2287
2288 while (!liQue.empty()) {
2289 LiveInterval *li = liQue.front();
2290 liQue.pop_front();
2291 if (li->GetRangesSize() == 0) {
2292 /* range building has been skiped */
2293 li->AddRange(li->GetFirstDef(), li->GetLastUse());
2294 }
2295 li->InitRangeFinder();
2296 if (needDump) {
2297 LogInfo::MapleLogger() << "======Alloc R" << li->GetRegNO() << "======"
2298 << "\n";
2299 }
2300 blockForbiddenMask = 0;
2301 freeUntilPos = initialPosVec;
2302 DEBUG_ASSERT(li->GetFirstDef() >= curInsnID, "wrong li order");
2303 curInsnID = li->GetFirstDef();
2304 RetireActive(*li, curInsnID);
2305 UpdateCallQueueAtRetirement(curInsnID);
2306 UpdateActiveAllocateInfo(*li);
2307 UpdateParamAllocateInfo(*li);
2308 if (needDump) {
2309 DebugCheckActiveList();
2310 LogInfo::MapleLogger() << "freeUntilPos:";
2311 for (uint32 i = 0; i < freeUntilPos.size(); ++i) {
2312 LogInfo::MapleLogger() << "[" << i << "," << freeUntilPos[i] << "], ";
2313 }
2314 LogInfo::MapleLogger() << "\n";
2315 }
2316 AssignPhysRegsForLi(*li);
2317 }
2318 }
2319
CollectStackMapInfo()2320 void LSRALinearScanRegAllocator::CollectStackMapInfo()
2321 {
2322 CollectReferenceMap();
2323 CollectDeoptInfo();
2324 }
2325
2326 /* Main entrance for the LSRA register allocator */
AllocateRegisters()2327 bool LSRALinearScanRegAllocator::AllocateRegisters()
2328 {
2329 cgFunc->SetIsAfterRegAlloc();
2330 liveIntervalsArray.resize(cgFunc->GetMaxVReg());
2331 regInfo->Fini();
2332 SetAllocMode();
2333 if (needDump) {
2334 const MIRModule &mirModule = cgFunc->GetMirModule();
2335 DotGenerator::GenerateDot("RA", *cgFunc, mirModule);
2336 DotGenerator::GenerateDot("RAe", *cgFunc, mirModule);
2337 LogInfo::MapleLogger() << "Entering LinearScanRegAllocator: " << cgFunc->GetName() << "\n";
2338 }
2339
2340 if (!cgFunc->IsStackMapComputed()) {
2341 SetStackMapDerivedInfo();
2342 }
2343
2344 ComputeLiveInterval();
2345
2346 if (!cgFunc->IsStackMapComputed()) {
2347 SpillStackMapInfo();
2348 }
2349
2350 if (needDump) {
2351 PrintLiveRangesGraph();
2352 }
2353
2354 bool enableDoLSRAPreSpill = true;
2355 if (enableDoLSRAPreSpill) {
2356 LiveIntervalAnalysis();
2357 }
2358
2359 InitFreeRegPool();
2360
2361 LinearScanRegAllocator();
2362
2363 if (needDump) {
2364 PrintAllLiveRanges();
2365 }
2366
2367 FinalizeRegisters();
2368
2369 if (!cgFunc->IsStackMapComputed()) {
2370 CollectStackMapInfo();
2371 }
2372
2373 if (needDump) {
2374 LogInfo::MapleLogger() << "Total " << spillCount << " spillCount in " << cgFunc->GetName() << " \n";
2375 LogInfo::MapleLogger() << "Total " << reloadCount << " reloadCount\n";
2376 LogInfo::MapleLogger() << "Total "
2377 << "(" << spillCount << "+ " << callerSaveSpillCount
2378 << ") = " << (spillCount + callerSaveSpillCount) << " SPILL\n";
2379 LogInfo::MapleLogger() << "Total "
2380 << "(" << reloadCount << "+ " << callerSaveReloadCount
2381 << ") = " << (reloadCount + callerSaveReloadCount) << " RELOAD\n";
2382 uint32_t insertInsn = spillCount + callerSaveSpillCount + reloadCount + callerSaveReloadCount;
2383 float rate = (float(insertInsn) / float(maxInsnNum));
2384 LogInfo::MapleLogger() << "insn Num Befor RA:" << maxInsnNum << ", insert " << insertInsn << " insns: "
2385 << ", insertInsn/insnNumBeforRA: " << rate << "\n";
2386 }
2387
2388 bfs = nullptr; /* bfs is not utilized outside the function. */
2389
2390 return true;
2391 }
2392
2393 } /* namespace maplebe */
2394