1 //===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 /// \file
10 /// This file implements the machine register scavenger. It can provide
11 /// information, such as unused registers, at any point in a machine basic
12 /// block. It also provides a mechanism to make registers available by evicting
13 /// them to spill slots.
14 //
15 //===----------------------------------------------------------------------===//
16
17 #include "llvm/CodeGen/RegisterScavenging.h"
18 #include "llvm/ADT/ArrayRef.h"
19 #include "llvm/ADT/BitVector.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/LiveRegUnits.h"
23 #include "llvm/CodeGen/MachineBasicBlock.h"
24 #include "llvm/CodeGen/MachineFrameInfo.h"
25 #include "llvm/CodeGen/MachineFunction.h"
26 #include "llvm/CodeGen/MachineFunctionPass.h"
27 #include "llvm/CodeGen/MachineInstr.h"
28 #include "llvm/CodeGen/MachineOperand.h"
29 #include "llvm/CodeGen/MachineRegisterInfo.h"
30 #include "llvm/CodeGen/TargetFrameLowering.h"
31 #include "llvm/CodeGen/TargetInstrInfo.h"
32 #include "llvm/CodeGen/TargetRegisterInfo.h"
33 #include "llvm/CodeGen/TargetSubtargetInfo.h"
34 #include "llvm/InitializePasses.h"
35 #include "llvm/MC/MCRegisterInfo.h"
36 #include "llvm/Pass.h"
37 #include "llvm/Support/Debug.h"
38 #include "llvm/Support/ErrorHandling.h"
39 #include "llvm/Support/raw_ostream.h"
40 #include <algorithm>
41 #include <cassert>
42 #include <iterator>
43 #include <limits>
44 #include <string>
45 #include <utility>
46
47 using namespace llvm;
48
49 #define DEBUG_TYPE "reg-scavenging"
50
51 STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
52
setRegUsed(Register Reg,LaneBitmask LaneMask)53 void RegScavenger::setRegUsed(Register Reg, LaneBitmask LaneMask) {
54 LiveUnits.addRegMasked(Reg, LaneMask);
55 }
56
init(MachineBasicBlock & MBB)57 void RegScavenger::init(MachineBasicBlock &MBB) {
58 MachineFunction &MF = *MBB.getParent();
59 TII = MF.getSubtarget().getInstrInfo();
60 TRI = MF.getSubtarget().getRegisterInfo();
61 MRI = &MF.getRegInfo();
62 LiveUnits.init(*TRI);
63
64 assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
65 "Target changed?");
66
67 // Self-initialize.
68 if (!this->MBB) {
69 NumRegUnits = TRI->getNumRegUnits();
70 KillRegUnits.resize(NumRegUnits);
71 DefRegUnits.resize(NumRegUnits);
72 TmpRegUnits.resize(NumRegUnits);
73 }
74 this->MBB = &MBB;
75
76 for (ScavengedInfo &SI : Scavenged) {
77 SI.Reg = 0;
78 SI.Restore = nullptr;
79 }
80
81 Tracking = false;
82 }
83
enterBasicBlock(MachineBasicBlock & MBB)84 void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
85 init(MBB);
86 LiveUnits.addLiveIns(MBB);
87 }
88
enterBasicBlockEnd(MachineBasicBlock & MBB)89 void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
90 init(MBB);
91 LiveUnits.addLiveOuts(MBB);
92
93 // Move internal iterator at the last instruction of the block.
94 if (MBB.begin() != MBB.end()) {
95 MBBI = std::prev(MBB.end());
96 Tracking = true;
97 }
98 }
99
addRegUnits(BitVector & BV,MCRegister Reg)100 void RegScavenger::addRegUnits(BitVector &BV, MCRegister Reg) {
101 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
102 BV.set(*RUI);
103 }
104
removeRegUnits(BitVector & BV,MCRegister Reg)105 void RegScavenger::removeRegUnits(BitVector &BV, MCRegister Reg) {
106 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
107 BV.reset(*RUI);
108 }
109
determineKillsAndDefs()110 void RegScavenger::determineKillsAndDefs() {
111 assert(Tracking && "Must be tracking to determine kills and defs");
112
113 MachineInstr &MI = *MBBI;
114 assert(!MI.isDebugInstr() && "Debug values have no kills or defs");
115
116 // Find out which registers are early clobbered, killed, defined, and marked
117 // def-dead in this instruction.
118 KillRegUnits.reset();
119 DefRegUnits.reset();
120 for (const MachineOperand &MO : MI.operands()) {
121 if (MO.isRegMask()) {
122 TmpRegUnits.clear();
123 for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) {
124 for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) {
125 if (MO.clobbersPhysReg(*RURI)) {
126 TmpRegUnits.set(RU);
127 break;
128 }
129 }
130 }
131
132 // Apply the mask.
133 KillRegUnits |= TmpRegUnits;
134 }
135 if (!MO.isReg())
136 continue;
137 if (!MO.getReg().isPhysical() || isReserved(MO.getReg()))
138 continue;
139 MCRegister Reg = MO.getReg().asMCReg();
140
141 if (MO.isUse()) {
142 // Ignore undef uses.
143 if (MO.isUndef())
144 continue;
145 if (MO.isKill())
146 addRegUnits(KillRegUnits, Reg);
147 } else {
148 assert(MO.isDef());
149 if (MO.isDead())
150 addRegUnits(KillRegUnits, Reg);
151 else
152 addRegUnits(DefRegUnits, Reg);
153 }
154 }
155 }
156
forward()157 void RegScavenger::forward() {
158 // Move ptr forward.
159 if (!Tracking) {
160 MBBI = MBB->begin();
161 Tracking = true;
162 } else {
163 assert(MBBI != MBB->end() && "Already past the end of the basic block!");
164 MBBI = std::next(MBBI);
165 }
166 assert(MBBI != MBB->end() && "Already at the end of the basic block!");
167
168 MachineInstr &MI = *MBBI;
169
170 for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(),
171 IE = Scavenged.end(); I != IE; ++I) {
172 if (I->Restore != &MI)
173 continue;
174
175 I->Reg = 0;
176 I->Restore = nullptr;
177 }
178
179 if (MI.isDebugInstr())
180 return;
181
182 determineKillsAndDefs();
183
184 // Verify uses and defs.
185 #ifndef NDEBUG
186 for (const MachineOperand &MO : MI.operands()) {
187 if (!MO.isReg())
188 continue;
189 Register Reg = MO.getReg();
190 if (!Register::isPhysicalRegister(Reg) || isReserved(Reg))
191 continue;
192 if (MO.isUse()) {
193 if (MO.isUndef())
194 continue;
195 if (!isRegUsed(Reg)) {
196 // Check if it's partial live: e.g.
197 // D0 = insert_subreg undef D0, S0
198 // ... D0
199 // The problem is the insert_subreg could be eliminated. The use of
200 // D0 is using a partially undef value. This is not *incorrect* since
201 // S1 is can be freely clobbered.
202 // Ideally we would like a way to model this, but leaving the
203 // insert_subreg around causes both correctness and performance issues.
204 bool SubUsed = false;
205 for (const MCPhysReg &SubReg : TRI->subregs(Reg))
206 if (isRegUsed(SubReg)) {
207 SubUsed = true;
208 break;
209 }
210 bool SuperUsed = false;
211 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) {
212 if (isRegUsed(*SR)) {
213 SuperUsed = true;
214 break;
215 }
216 }
217 if (!SubUsed && !SuperUsed) {
218 MBB->getParent()->verify(nullptr, "In Register Scavenger");
219 llvm_unreachable("Using an undefined register!");
220 }
221 (void)SubUsed;
222 (void)SuperUsed;
223 }
224 } else {
225 assert(MO.isDef());
226 #if 0
227 // FIXME: Enable this once we've figured out how to correctly transfer
228 // implicit kills during codegen passes like the coalescer.
229 assert((KillRegs.test(Reg) || isUnused(Reg) ||
230 isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
231 "Re-defining a live register!");
232 #endif
233 }
234 }
235 #endif // NDEBUG
236
237 // Commit the changes.
238 setUnused(KillRegUnits);
239 setUsed(DefRegUnits);
240 }
241
backward()242 void RegScavenger::backward() {
243 assert(Tracking && "Must be tracking to determine kills and defs");
244
245 const MachineInstr &MI = *MBBI;
246 LiveUnits.stepBackward(MI);
247
248 // Expire scavenge spill frameindex uses.
249 for (ScavengedInfo &I : Scavenged) {
250 if (I.Restore == &MI) {
251 I.Reg = 0;
252 I.Restore = nullptr;
253 }
254 }
255
256 if (MBBI == MBB->begin()) {
257 MBBI = MachineBasicBlock::iterator(nullptr);
258 Tracking = false;
259 } else
260 --MBBI;
261 }
262
isRegUsed(Register Reg,bool includeReserved) const263 bool RegScavenger::isRegUsed(Register Reg, bool includeReserved) const {
264 if (isReserved(Reg))
265 return includeReserved;
266 return !LiveUnits.available(Reg);
267 }
268
FindUnusedReg(const TargetRegisterClass * RC) const269 Register RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
270 for (Register Reg : *RC) {
271 if (!isRegUsed(Reg)) {
272 LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg, TRI)
273 << "\n");
274 return Reg;
275 }
276 }
277 return 0;
278 }
279
getRegsAvailable(const TargetRegisterClass * RC)280 BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
281 BitVector Mask(TRI->getNumRegs());
282 for (Register Reg : *RC)
283 if (!isRegUsed(Reg))
284 Mask.set(Reg);
285 return Mask;
286 }
287
findSurvivorReg(MachineBasicBlock::iterator StartMI,BitVector & Candidates,unsigned InstrLimit,MachineBasicBlock::iterator & UseMI)288 Register RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI,
289 BitVector &Candidates,
290 unsigned InstrLimit,
291 MachineBasicBlock::iterator &UseMI) {
292 int Survivor = Candidates.find_first();
293 assert(Survivor > 0 && "No candidates for scavenging");
294
295 MachineBasicBlock::iterator ME = MBB->getFirstTerminator();
296 assert(StartMI != ME && "MI already at terminator");
297 MachineBasicBlock::iterator RestorePointMI = StartMI;
298 MachineBasicBlock::iterator MI = StartMI;
299
300 bool inVirtLiveRange = false;
301 for (++MI; InstrLimit > 0 && MI != ME; ++MI, --InstrLimit) {
302 if (MI->isDebugInstr()) {
303 ++InstrLimit; // Don't count debug instructions
304 continue;
305 }
306 bool isVirtKillInsn = false;
307 bool isVirtDefInsn = false;
308 // Remove any candidates touched by instruction.
309 for (const MachineOperand &MO : MI->operands()) {
310 if (MO.isRegMask())
311 Candidates.clearBitsNotInMask(MO.getRegMask());
312 if (!MO.isReg() || MO.isUndef() || !MO.getReg())
313 continue;
314 if (Register::isVirtualRegister(MO.getReg())) {
315 if (MO.isDef())
316 isVirtDefInsn = true;
317 else if (MO.isKill())
318 isVirtKillInsn = true;
319 continue;
320 }
321 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
322 Candidates.reset(*AI);
323 }
324 // If we're not in a virtual reg's live range, this is a valid
325 // restore point.
326 if (!inVirtLiveRange) RestorePointMI = MI;
327
328 // Update whether we're in the live range of a virtual register
329 if (isVirtKillInsn) inVirtLiveRange = false;
330 if (isVirtDefInsn) inVirtLiveRange = true;
331
332 // Was our survivor untouched by this instruction?
333 if (Candidates.test(Survivor))
334 continue;
335
336 // All candidates gone?
337 if (Candidates.none())
338 break;
339
340 Survivor = Candidates.find_first();
341 }
342 // If we ran off the end, that's where we want to restore.
343 if (MI == ME) RestorePointMI = ME;
344 assert(RestorePointMI != StartMI &&
345 "No available scavenger restore location!");
346
347 // We ran out of candidates, so stop the search.
348 UseMI = RestorePointMI;
349 return Survivor;
350 }
351
352 /// Given the bitvector \p Available of free register units at position
353 /// \p From. Search backwards to find a register that is part of \p
354 /// Candidates and not used/clobbered until the point \p To. If there is
355 /// multiple candidates continue searching and pick the one that is not used/
356 /// clobbered for the longest time.
357 /// Returns the register and the earliest position we know it to be free or
358 /// the position MBB.end() if no register is available.
359 static std::pair<MCPhysReg, MachineBasicBlock::iterator>
findSurvivorBackwards(const MachineRegisterInfo & MRI,MachineBasicBlock::iterator From,MachineBasicBlock::iterator To,const LiveRegUnits & LiveOut,ArrayRef<MCPhysReg> AllocationOrder,bool RestoreAfter)360 findSurvivorBackwards(const MachineRegisterInfo &MRI,
361 MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
362 const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
363 bool RestoreAfter) {
364 bool FoundTo = false;
365 MCPhysReg Survivor = 0;
366 MachineBasicBlock::iterator Pos;
367 MachineBasicBlock &MBB = *From->getParent();
368 unsigned InstrLimit = 25;
369 unsigned InstrCountDown = InstrLimit;
370 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
371 LiveRegUnits Used(TRI);
372
373 for (MachineBasicBlock::iterator I = From;; --I) {
374 const MachineInstr &MI = *I;
375
376 Used.accumulate(MI);
377
378 if (I == To) {
379 // See if one of the registers in RC wasn't used so far.
380 for (MCPhysReg Reg : AllocationOrder) {
381 if (!MRI.isReserved(Reg) && Used.available(Reg) &&
382 LiveOut.available(Reg))
383 return std::make_pair(Reg, MBB.end());
384 }
385 // Otherwise we will continue up to InstrLimit instructions to find
386 // the register which is not defined/used for the longest time.
387 FoundTo = true;
388 Pos = To;
389 // Note: It was fine so far to start our search at From, however now that
390 // we have to spill, and can only place the restore after From then
391 // add the regs used/defed by std::next(From) to the set.
392 if (RestoreAfter)
393 Used.accumulate(*std::next(From));
394 }
395 if (FoundTo) {
396 if (Survivor == 0 || !Used.available(Survivor)) {
397 MCPhysReg AvilableReg = 0;
398 for (MCPhysReg Reg : AllocationOrder) {
399 if (!MRI.isReserved(Reg) && Used.available(Reg)) {
400 AvilableReg = Reg;
401 break;
402 }
403 }
404 if (AvilableReg == 0)
405 break;
406 Survivor = AvilableReg;
407 }
408 if (--InstrCountDown == 0)
409 break;
410
411 // Keep searching when we find a vreg since the spilled register will
412 // be usefull for this other vreg as well later.
413 bool FoundVReg = false;
414 for (const MachineOperand &MO : MI.operands()) {
415 if (MO.isReg() && Register::isVirtualRegister(MO.getReg())) {
416 FoundVReg = true;
417 break;
418 }
419 }
420 if (FoundVReg) {
421 InstrCountDown = InstrLimit;
422 Pos = I;
423 }
424 if (I == MBB.begin())
425 break;
426 }
427 }
428
429 return std::make_pair(Survivor, Pos);
430 }
431
getFrameIndexOperandNum(MachineInstr & MI)432 static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
433 unsigned i = 0;
434 while (!MI.getOperand(i).isFI()) {
435 ++i;
436 assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
437 }
438 return i;
439 }
440
441 RegScavenger::ScavengedInfo &
spill(Register Reg,const TargetRegisterClass & RC,int SPAdj,MachineBasicBlock::iterator Before,MachineBasicBlock::iterator & UseMI)442 RegScavenger::spill(Register Reg, const TargetRegisterClass &RC, int SPAdj,
443 MachineBasicBlock::iterator Before,
444 MachineBasicBlock::iterator &UseMI) {
445 // Find an available scavenging slot with size and alignment matching
446 // the requirements of the class RC.
447 const MachineFunction &MF = *Before->getMF();
448 const MachineFrameInfo &MFI = MF.getFrameInfo();
449 unsigned NeedSize = TRI->getSpillSize(RC);
450 Align NeedAlign = TRI->getSpillAlign(RC);
451
452 unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
453 int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
454 for (unsigned I = 0; I < Scavenged.size(); ++I) {
455 if (Scavenged[I].Reg != 0)
456 continue;
457 // Verify that this slot is valid for this register.
458 int FI = Scavenged[I].FrameIndex;
459 if (FI < FIB || FI >= FIE)
460 continue;
461 unsigned S = MFI.getObjectSize(FI);
462 Align A = MFI.getObjectAlign(FI);
463 if (NeedSize > S || NeedAlign > A)
464 continue;
465 // Avoid wasting slots with large size and/or large alignment. Pick one
466 // that is the best fit for this register class (in street metric).
467 // Picking a larger slot than necessary could happen if a slot for a
468 // larger register is reserved before a slot for a smaller one. When
469 // trying to spill a smaller register, the large slot would be found
470 // first, thus making it impossible to spill the larger register later.
471 unsigned D = (S - NeedSize) + (A.value() - NeedAlign.value());
472 if (D < Diff) {
473 SI = I;
474 Diff = D;
475 }
476 }
477
478 if (SI == Scavenged.size()) {
479 // We need to scavenge a register but have no spill slot, the target
480 // must know how to do it (if not, we'll assert below).
481 Scavenged.push_back(ScavengedInfo(FIE));
482 }
483
484 // Avoid infinite regress
485 Scavenged[SI].Reg = Reg;
486
487 // If the target knows how to save/restore the register, let it do so;
488 // otherwise, use the emergency stack spill slot.
489 if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) {
490 // Spill the scavenged register before \p Before.
491 int FI = Scavenged[SI].FrameIndex;
492 if (FI < FIB || FI >= FIE) {
493 std::string Msg = std::string("Error while trying to spill ") +
494 TRI->getName(Reg) + " from class " + TRI->getRegClassName(&RC) +
495 ": Cannot scavenge register without an emergency spill slot!";
496 report_fatal_error(Msg.c_str());
497 }
498 TII->storeRegToStackSlot(*MBB, Before, Reg, true, Scavenged[SI].FrameIndex,
499 &RC, TRI);
500 MachineBasicBlock::iterator II = std::prev(Before);
501
502 unsigned FIOperandNum = getFrameIndexOperandNum(*II);
503 TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
504
505 // Restore the scavenged register before its use (or first terminator).
506 TII->loadRegFromStackSlot(*MBB, UseMI, Reg, Scavenged[SI].FrameIndex,
507 &RC, TRI);
508 II = std::prev(UseMI);
509
510 FIOperandNum = getFrameIndexOperandNum(*II);
511 TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
512 }
513 return Scavenged[SI];
514 }
515
scavengeRegister(const TargetRegisterClass * RC,MachineBasicBlock::iterator I,int SPAdj,bool AllowSpill)516 Register RegScavenger::scavengeRegister(const TargetRegisterClass *RC,
517 MachineBasicBlock::iterator I,
518 int SPAdj, bool AllowSpill) {
519 MachineInstr &MI = *I;
520 const MachineFunction &MF = *MI.getMF();
521 // Consider all allocatable registers in the register class initially
522 BitVector Candidates = TRI->getAllocatableSet(MF, RC);
523
524 // Exclude all the registers being used by the instruction.
525 for (const MachineOperand &MO : MI.operands()) {
526 if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) &&
527 !Register::isVirtualRegister(MO.getReg()))
528 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
529 Candidates.reset(*AI);
530 }
531
532 // Try to find a register that's unused if there is one, as then we won't
533 // have to spill.
534 BitVector Available = getRegsAvailable(RC);
535 Available &= Candidates;
536 if (Available.any())
537 Candidates = Available;
538
539 // Find the register whose use is furthest away.
540 MachineBasicBlock::iterator UseMI;
541 Register SReg = findSurvivorReg(I, Candidates, 25, UseMI);
542
543 // If we found an unused register there is no reason to spill it.
544 if (!isRegUsed(SReg)) {
545 LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg, TRI) << "\n");
546 return SReg;
547 }
548
549 if (!AllowSpill)
550 return 0;
551
552 ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI);
553 Scavenged.Restore = &*std::prev(UseMI);
554
555 LLVM_DEBUG(dbgs() << "Scavenged register (with spill): "
556 << printReg(SReg, TRI) << "\n");
557
558 return SReg;
559 }
560
scavengeRegisterBackwards(const TargetRegisterClass & RC,MachineBasicBlock::iterator To,bool RestoreAfter,int SPAdj,bool AllowSpill)561 Register RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
562 MachineBasicBlock::iterator To,
563 bool RestoreAfter, int SPAdj,
564 bool AllowSpill) {
565 const MachineBasicBlock &MBB = *To->getParent();
566 const MachineFunction &MF = *MBB.getParent();
567
568 // Find the register whose use is furthest away.
569 MachineBasicBlock::iterator UseMI;
570 ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
571 std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
572 findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
573 RestoreAfter);
574 MCPhysReg Reg = P.first;
575 MachineBasicBlock::iterator SpillBefore = P.second;
576 assert(Reg != 0 && "No register left to scavenge!");
577 // Found an available register?
578 if (SpillBefore == MBB.end()) {
579 LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg, TRI)
580 << '\n');
581 return Reg;
582 }
583
584 if (!AllowSpill)
585 return 0;
586
587 MachineBasicBlock::iterator ReloadAfter =
588 RestoreAfter ? std::next(MBBI) : MBBI;
589 MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
590 if (ReloadBefore != MBB.end())
591 LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
592 ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
593 Scavenged.Restore = &*std::prev(SpillBefore);
594 LiveUnits.removeReg(Reg);
595 LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg, TRI)
596 << " until " << *SpillBefore);
597 return Reg;
598 }
599
600 /// Allocate a register for the virtual register \p VReg. The last use of
601 /// \p VReg is around the current position of the register scavenger \p RS.
602 /// \p ReserveAfter controls whether the scavenged register needs to be reserved
603 /// after the current instruction, otherwise it will only be reserved before the
604 /// current instruction.
scavengeVReg(MachineRegisterInfo & MRI,RegScavenger & RS,Register VReg,bool ReserveAfter)605 static Register scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
606 Register VReg, bool ReserveAfter) {
607 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
608 #ifndef NDEBUG
609 // Verify that all definitions and uses are in the same basic block.
610 const MachineBasicBlock *CommonMBB = nullptr;
611 // Real definition for the reg, re-definitions are not considered.
612 const MachineInstr *RealDef = nullptr;
613 for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
614 MachineBasicBlock *MBB = MO.getParent()->getParent();
615 if (CommonMBB == nullptr)
616 CommonMBB = MBB;
617 assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
618 if (MO.isDef()) {
619 const MachineInstr &MI = *MO.getParent();
620 if (!MI.readsRegister(VReg, &TRI)) {
621 assert((!RealDef || RealDef == &MI) &&
622 "Can have at most one definition which is not a redefinition");
623 RealDef = &MI;
624 }
625 }
626 }
627 assert(RealDef != nullptr && "Must have at least 1 Def");
628 #endif
629
630 // We should only have one definition of the register. However to accommodate
631 // the requirements of two address code we also allow definitions in
632 // subsequent instructions provided they also read the register. That way
633 // we get a single contiguous lifetime.
634 //
635 // Definitions in MRI.def_begin() are unordered, search for the first.
636 MachineRegisterInfo::def_iterator FirstDef =
637 std::find_if(MRI.def_begin(VReg), MRI.def_end(),
638 [VReg, &TRI](const MachineOperand &MO) {
639 return !MO.getParent()->readsRegister(VReg, &TRI);
640 });
641 assert(FirstDef != MRI.def_end() &&
642 "Must have one definition that does not redefine vreg");
643 MachineInstr &DefMI = *FirstDef->getParent();
644
645 // The register scavenger will report a free register inserting an emergency
646 // spill/reload if necessary.
647 int SPAdj = 0;
648 const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
649 Register SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
650 ReserveAfter, SPAdj);
651 MRI.replaceRegWith(VReg, SReg);
652 ++NumScavengedRegs;
653 return SReg;
654 }
655
656 /// Allocate (scavenge) vregs inside a single basic block.
657 /// Returns true if the target spill callback created new vregs and a 2nd pass
658 /// is necessary.
scavengeFrameVirtualRegsInBlock(MachineRegisterInfo & MRI,RegScavenger & RS,MachineBasicBlock & MBB)659 static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
660 RegScavenger &RS,
661 MachineBasicBlock &MBB) {
662 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
663 RS.enterBasicBlockEnd(MBB);
664
665 unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
666 bool NextInstructionReadsVReg = false;
667 for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) {
668 --I;
669 // Move RegScavenger to the position between *I and *std::next(I).
670 RS.backward(I);
671
672 // Look for unassigned vregs in the uses of *std::next(I).
673 if (NextInstructionReadsVReg) {
674 MachineBasicBlock::iterator N = std::next(I);
675 const MachineInstr &NMI = *N;
676 for (const MachineOperand &MO : NMI.operands()) {
677 if (!MO.isReg())
678 continue;
679 Register Reg = MO.getReg();
680 // We only care about virtual registers and ignore virtual registers
681 // created by the target callbacks in the process (those will be handled
682 // in a scavenging round).
683 if (!Register::isVirtualRegister(Reg) ||
684 Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
685 continue;
686 if (!MO.readsReg())
687 continue;
688
689 Register SReg = scavengeVReg(MRI, RS, Reg, true);
690 N->addRegisterKilled(SReg, &TRI, false);
691 RS.setRegUsed(SReg);
692 }
693 }
694
695 // Look for unassigned vregs in the defs of *I.
696 NextInstructionReadsVReg = false;
697 const MachineInstr &MI = *I;
698 for (const MachineOperand &MO : MI.operands()) {
699 if (!MO.isReg())
700 continue;
701 Register Reg = MO.getReg();
702 // Only vregs, no newly created vregs (see above).
703 if (!Register::isVirtualRegister(Reg) ||
704 Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
705 continue;
706 // We have to look at all operands anyway so we can precalculate here
707 // whether there is a reading operand. This allows use to skip the use
708 // step in the next iteration if there was none.
709 assert(!MO.isInternalRead() && "Cannot assign inside bundles");
710 assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
711 if (MO.readsReg()) {
712 NextInstructionReadsVReg = true;
713 }
714 if (MO.isDef()) {
715 Register SReg = scavengeVReg(MRI, RS, Reg, false);
716 I->addRegisterDead(SReg, &TRI, false);
717 }
718 }
719 }
720 #ifndef NDEBUG
721 for (const MachineOperand &MO : MBB.front().operands()) {
722 if (!MO.isReg() || !Register::isVirtualRegister(MO.getReg()))
723 continue;
724 assert(!MO.isInternalRead() && "Cannot assign inside bundles");
725 assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
726 assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
727 }
728 #endif
729
730 return MRI.getNumVirtRegs() != InitialNumVirtRegs;
731 }
732
scavengeFrameVirtualRegs(MachineFunction & MF,RegScavenger & RS)733 void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
734 // FIXME: Iterating over the instruction stream is unnecessary. We can simply
735 // iterate over the vreg use list, which at this point only contains machine
736 // operands for which eliminateFrameIndex need a new scratch reg.
737 MachineRegisterInfo &MRI = MF.getRegInfo();
738 // Shortcut.
739 if (MRI.getNumVirtRegs() == 0) {
740 MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
741 return;
742 }
743
744 // Run through the instructions and find any virtual registers.
745 for (MachineBasicBlock &MBB : MF) {
746 if (MBB.empty())
747 continue;
748
749 bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
750 if (Again) {
751 LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
752 << MBB.getName() << '\n');
753 Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
754 // The target required a 2nd run (because it created new vregs while
755 // spilling). Refuse to do another pass to keep compiletime in check.
756 if (Again)
757 report_fatal_error("Incomplete scavenging after 2nd pass");
758 }
759 }
760
761 MRI.clearVirtRegs();
762 MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
763 }
764
765 namespace {
766
767 /// This class runs register scavenging independ of the PrologEpilogInserter.
768 /// This is used in for testing.
769 class ScavengerTest : public MachineFunctionPass {
770 public:
771 static char ID;
772
ScavengerTest()773 ScavengerTest() : MachineFunctionPass(ID) {}
774
runOnMachineFunction(MachineFunction & MF)775 bool runOnMachineFunction(MachineFunction &MF) override {
776 const TargetSubtargetInfo &STI = MF.getSubtarget();
777 const TargetFrameLowering &TFL = *STI.getFrameLowering();
778
779 RegScavenger RS;
780 // Let's hope that calling those outside of PrologEpilogueInserter works
781 // well enough to initialize the scavenger with some emergency spillslots
782 // for the target.
783 BitVector SavedRegs;
784 TFL.determineCalleeSaves(MF, SavedRegs, &RS);
785 TFL.processFunctionBeforeFrameFinalized(MF, &RS);
786
787 // Let's scavenge the current function
788 scavengeFrameVirtualRegs(MF, RS);
789 return true;
790 }
791 };
792
793 } // end anonymous namespace
794
795 char ScavengerTest::ID;
796
797 INITIALIZE_PASS(ScavengerTest, "scavenger-test",
798 "Scavenge virtual registers inside basic blocks", false, false)
799