1 //===- llvm/CodeGen/VirtRegMap.cpp - Virtual Register Map -----------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the VirtRegMap class.
10 //
11 // It also contains implementations of the Spiller interface, which, given a
12 // virtual register map and a machine function, eliminates all virtual
13 // references by replacing them with physical register references - adding spill
14 // code as necessary.
15 //
16 //===----------------------------------------------------------------------===//
17
18 #include "llvm/CodeGen/VirtRegMap.h"
19 #include "LiveDebugVariables.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/LiveInterval.h"
23 #include "llvm/CodeGen/LiveIntervals.h"
24 #include "llvm/CodeGen/LiveStacks.h"
25 #include "llvm/CodeGen/MachineBasicBlock.h"
26 #include "llvm/CodeGen/MachineFrameInfo.h"
27 #include "llvm/CodeGen/MachineFunction.h"
28 #include "llvm/CodeGen/MachineFunctionPass.h"
29 #include "llvm/CodeGen/MachineInstr.h"
30 #include "llvm/CodeGen/MachineOperand.h"
31 #include "llvm/CodeGen/MachineRegisterInfo.h"
32 #include "llvm/CodeGen/SlotIndexes.h"
33 #include "llvm/CodeGen/TargetInstrInfo.h"
34 #include "llvm/CodeGen/TargetOpcodes.h"
35 #include "llvm/CodeGen/TargetRegisterInfo.h"
36 #include "llvm/CodeGen/TargetSubtargetInfo.h"
37 #include "llvm/Config/llvm-config.h"
38 #include "llvm/MC/LaneBitmask.h"
39 #include "llvm/Pass.h"
40 #include "llvm/Support/Compiler.h"
41 #include "llvm/Support/Debug.h"
42 #include "llvm/Support/raw_ostream.h"
43 #include <cassert>
44 #include <iterator>
45 #include <utility>
46
47 using namespace llvm;
48
49 #define DEBUG_TYPE "regalloc"
50
51 STATISTIC(NumSpillSlots, "Number of spill slots allocated");
52 STATISTIC(NumIdCopies, "Number of identity moves eliminated after rewriting");
53
54 //===----------------------------------------------------------------------===//
55 // VirtRegMap implementation
56 //===----------------------------------------------------------------------===//
57
58 char VirtRegMap::ID = 0;
59
60 INITIALIZE_PASS(VirtRegMap, "virtregmap", "Virtual Register Map", false, false)
61
runOnMachineFunction(MachineFunction & mf)62 bool VirtRegMap::runOnMachineFunction(MachineFunction &mf) {
63 MRI = &mf.getRegInfo();
64 TII = mf.getSubtarget().getInstrInfo();
65 TRI = mf.getSubtarget().getRegisterInfo();
66 MF = &mf;
67
68 Virt2PhysMap.clear();
69 Virt2StackSlotMap.clear();
70 Virt2SplitMap.clear();
71
72 grow();
73 return false;
74 }
75
grow()76 void VirtRegMap::grow() {
77 unsigned NumRegs = MF->getRegInfo().getNumVirtRegs();
78 Virt2PhysMap.resize(NumRegs);
79 Virt2StackSlotMap.resize(NumRegs);
80 Virt2SplitMap.resize(NumRegs);
81 }
82
assignVirt2Phys(Register virtReg,MCPhysReg physReg)83 void VirtRegMap::assignVirt2Phys(Register virtReg, MCPhysReg physReg) {
84 assert(virtReg.isVirtual() && Register::isPhysicalRegister(physReg));
85 assert(Virt2PhysMap[virtReg.id()] == NO_PHYS_REG &&
86 "attempt to assign physical register to already mapped "
87 "virtual register");
88 assert(!getRegInfo().isReserved(physReg) &&
89 "Attempt to map virtReg to a reserved physReg");
90 Virt2PhysMap[virtReg.id()] = physReg;
91 }
92
createSpillSlot(const TargetRegisterClass * RC)93 unsigned VirtRegMap::createSpillSlot(const TargetRegisterClass *RC) {
94 unsigned Size = TRI->getSpillSize(*RC);
95 unsigned Align = TRI->getSpillAlignment(*RC);
96 int SS = MF->getFrameInfo().CreateSpillStackObject(Size, Align);
97 ++NumSpillSlots;
98 return SS;
99 }
100
hasPreferredPhys(Register VirtReg)101 bool VirtRegMap::hasPreferredPhys(Register VirtReg) {
102 Register Hint = MRI->getSimpleHint(VirtReg);
103 if (!Hint.isValid())
104 return false;
105 if (Hint.isVirtual())
106 Hint = getPhys(Hint);
107 return getPhys(VirtReg) == Hint;
108 }
109
hasKnownPreference(Register VirtReg)110 bool VirtRegMap::hasKnownPreference(Register VirtReg) {
111 std::pair<unsigned, unsigned> Hint = MRI->getRegAllocationHint(VirtReg);
112 if (Register::isPhysicalRegister(Hint.second))
113 return true;
114 if (Register::isVirtualRegister(Hint.second))
115 return hasPhys(Hint.second);
116 return false;
117 }
118
assignVirt2StackSlot(Register virtReg)119 int VirtRegMap::assignVirt2StackSlot(Register virtReg) {
120 assert(virtReg.isVirtual());
121 assert(Virt2StackSlotMap[virtReg.id()] == NO_STACK_SLOT &&
122 "attempt to assign stack slot to already spilled register");
123 const TargetRegisterClass* RC = MF->getRegInfo().getRegClass(virtReg);
124 return Virt2StackSlotMap[virtReg.id()] = createSpillSlot(RC);
125 }
126
assignVirt2StackSlot(Register virtReg,int SS)127 void VirtRegMap::assignVirt2StackSlot(Register virtReg, int SS) {
128 assert(virtReg.isVirtual());
129 assert(Virt2StackSlotMap[virtReg.id()] == NO_STACK_SLOT &&
130 "attempt to assign stack slot to already spilled register");
131 assert((SS >= 0 ||
132 (SS >= MF->getFrameInfo().getObjectIndexBegin())) &&
133 "illegal fixed frame index");
134 Virt2StackSlotMap[virtReg.id()] = SS;
135 }
136
print(raw_ostream & OS,const Module *) const137 void VirtRegMap::print(raw_ostream &OS, const Module*) const {
138 OS << "********** REGISTER MAP **********\n";
139 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
140 unsigned Reg = Register::index2VirtReg(i);
141 if (Virt2PhysMap[Reg] != (unsigned)VirtRegMap::NO_PHYS_REG) {
142 OS << '[' << printReg(Reg, TRI) << " -> "
143 << printReg(Virt2PhysMap[Reg], TRI) << "] "
144 << TRI->getRegClassName(MRI->getRegClass(Reg)) << "\n";
145 }
146 }
147
148 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
149 unsigned Reg = Register::index2VirtReg(i);
150 if (Virt2StackSlotMap[Reg] != VirtRegMap::NO_STACK_SLOT) {
151 OS << '[' << printReg(Reg, TRI) << " -> fi#" << Virt2StackSlotMap[Reg]
152 << "] " << TRI->getRegClassName(MRI->getRegClass(Reg)) << "\n";
153 }
154 }
155 OS << '\n';
156 }
157
158 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
dump() const159 LLVM_DUMP_METHOD void VirtRegMap::dump() const {
160 print(dbgs());
161 }
162 #endif
163
164 //===----------------------------------------------------------------------===//
165 // VirtRegRewriter
166 //===----------------------------------------------------------------------===//
167 //
168 // The VirtRegRewriter is the last of the register allocator passes.
169 // It rewrites virtual registers to physical registers as specified in the
170 // VirtRegMap analysis. It also updates live-in information on basic blocks
171 // according to LiveIntervals.
172 //
173 namespace {
174
175 class VirtRegRewriter : public MachineFunctionPass {
176 MachineFunction *MF;
177 const TargetRegisterInfo *TRI;
178 const TargetInstrInfo *TII;
179 MachineRegisterInfo *MRI;
180 SlotIndexes *Indexes;
181 LiveIntervals *LIS;
182 VirtRegMap *VRM;
183
184 void rewrite();
185 void addMBBLiveIns();
186 bool readsUndefSubreg(const MachineOperand &MO) const;
187 void addLiveInsForSubRanges(const LiveInterval &LI, Register PhysReg) const;
188 void handleIdentityCopy(MachineInstr &MI) const;
189 void expandCopyBundle(MachineInstr &MI) const;
190 bool subRegLiveThrough(const MachineInstr &MI, Register SuperPhysReg) const;
191
192 public:
193 static char ID;
194
VirtRegRewriter()195 VirtRegRewriter() : MachineFunctionPass(ID) {}
196
197 void getAnalysisUsage(AnalysisUsage &AU) const override;
198
199 bool runOnMachineFunction(MachineFunction&) override;
200
getSetProperties() const201 MachineFunctionProperties getSetProperties() const override {
202 return MachineFunctionProperties().set(
203 MachineFunctionProperties::Property::NoVRegs);
204 }
205 };
206
207 } // end anonymous namespace
208
209 char VirtRegRewriter::ID = 0;
210
211 char &llvm::VirtRegRewriterID = VirtRegRewriter::ID;
212
213 INITIALIZE_PASS_BEGIN(VirtRegRewriter, "virtregrewriter",
214 "Virtual Register Rewriter", false, false)
INITIALIZE_PASS_DEPENDENCY(SlotIndexes)215 INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
216 INITIALIZE_PASS_DEPENDENCY(LiveIntervals)
217 INITIALIZE_PASS_DEPENDENCY(LiveDebugVariables)
218 INITIALIZE_PASS_DEPENDENCY(LiveStacks)
219 INITIALIZE_PASS_DEPENDENCY(VirtRegMap)
220 INITIALIZE_PASS_END(VirtRegRewriter, "virtregrewriter",
221 "Virtual Register Rewriter", false, false)
222
223 void VirtRegRewriter::getAnalysisUsage(AnalysisUsage &AU) const {
224 AU.setPreservesCFG();
225 AU.addRequired<LiveIntervals>();
226 AU.addRequired<SlotIndexes>();
227 AU.addPreserved<SlotIndexes>();
228 AU.addRequired<LiveDebugVariables>();
229 AU.addRequired<LiveStacks>();
230 AU.addPreserved<LiveStacks>();
231 AU.addRequired<VirtRegMap>();
232 MachineFunctionPass::getAnalysisUsage(AU);
233 }
234
runOnMachineFunction(MachineFunction & fn)235 bool VirtRegRewriter::runOnMachineFunction(MachineFunction &fn) {
236 MF = &fn;
237 TRI = MF->getSubtarget().getRegisterInfo();
238 TII = MF->getSubtarget().getInstrInfo();
239 MRI = &MF->getRegInfo();
240 Indexes = &getAnalysis<SlotIndexes>();
241 LIS = &getAnalysis<LiveIntervals>();
242 VRM = &getAnalysis<VirtRegMap>();
243 LLVM_DEBUG(dbgs() << "********** REWRITE VIRTUAL REGISTERS **********\n"
244 << "********** Function: " << MF->getName() << '\n');
245 LLVM_DEBUG(VRM->dump());
246
247 // Add kill flags while we still have virtual registers.
248 LIS->addKillFlags(VRM);
249
250 // Live-in lists on basic blocks are required for physregs.
251 addMBBLiveIns();
252
253 // Rewrite virtual registers.
254 rewrite();
255
256 // Write out new DBG_VALUE instructions.
257 getAnalysis<LiveDebugVariables>().emitDebugValues(VRM);
258
259 // All machine operands and other references to virtual registers have been
260 // replaced. Remove the virtual registers and release all the transient data.
261 VRM->clearAllVirt();
262 MRI->clearVirtRegs();
263 return true;
264 }
265
addLiveInsForSubRanges(const LiveInterval & LI,Register PhysReg) const266 void VirtRegRewriter::addLiveInsForSubRanges(const LiveInterval &LI,
267 Register PhysReg) const {
268 assert(!LI.empty());
269 assert(LI.hasSubRanges());
270
271 using SubRangeIteratorPair =
272 std::pair<const LiveInterval::SubRange *, LiveInterval::const_iterator>;
273
274 SmallVector<SubRangeIteratorPair, 4> SubRanges;
275 SlotIndex First;
276 SlotIndex Last;
277 for (const LiveInterval::SubRange &SR : LI.subranges()) {
278 SubRanges.push_back(std::make_pair(&SR, SR.begin()));
279 if (!First.isValid() || SR.segments.front().start < First)
280 First = SR.segments.front().start;
281 if (!Last.isValid() || SR.segments.back().end > Last)
282 Last = SR.segments.back().end;
283 }
284
285 // Check all mbb start positions between First and Last while
286 // simulatenously advancing an iterator for each subrange.
287 for (SlotIndexes::MBBIndexIterator MBBI = Indexes->findMBBIndex(First);
288 MBBI != Indexes->MBBIndexEnd() && MBBI->first <= Last; ++MBBI) {
289 SlotIndex MBBBegin = MBBI->first;
290 // Advance all subrange iterators so that their end position is just
291 // behind MBBBegin (or the iterator is at the end).
292 LaneBitmask LaneMask;
293 for (auto &RangeIterPair : SubRanges) {
294 const LiveInterval::SubRange *SR = RangeIterPair.first;
295 LiveInterval::const_iterator &SRI = RangeIterPair.second;
296 while (SRI != SR->end() && SRI->end <= MBBBegin)
297 ++SRI;
298 if (SRI == SR->end())
299 continue;
300 if (SRI->start <= MBBBegin)
301 LaneMask |= SR->LaneMask;
302 }
303 if (LaneMask.none())
304 continue;
305 MachineBasicBlock *MBB = MBBI->second;
306 MBB->addLiveIn(PhysReg, LaneMask);
307 }
308 }
309
310 // Compute MBB live-in lists from virtual register live ranges and their
311 // assignments.
addMBBLiveIns()312 void VirtRegRewriter::addMBBLiveIns() {
313 for (unsigned Idx = 0, IdxE = MRI->getNumVirtRegs(); Idx != IdxE; ++Idx) {
314 Register VirtReg = Register::index2VirtReg(Idx);
315 if (MRI->reg_nodbg_empty(VirtReg))
316 continue;
317 LiveInterval &LI = LIS->getInterval(VirtReg);
318 if (LI.empty() || LIS->intervalIsInOneMBB(LI))
319 continue;
320 // This is a virtual register that is live across basic blocks. Its
321 // assigned PhysReg must be marked as live-in to those blocks.
322 Register PhysReg = VRM->getPhys(VirtReg);
323 assert(PhysReg != VirtRegMap::NO_PHYS_REG && "Unmapped virtual register.");
324
325 if (LI.hasSubRanges()) {
326 addLiveInsForSubRanges(LI, PhysReg);
327 } else {
328 // Go over MBB begin positions and see if we have segments covering them.
329 // The following works because segments and the MBBIndex list are both
330 // sorted by slot indexes.
331 SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin();
332 for (const auto &Seg : LI) {
333 I = Indexes->advanceMBBIndex(I, Seg.start);
334 for (; I != Indexes->MBBIndexEnd() && I->first < Seg.end; ++I) {
335 MachineBasicBlock *MBB = I->second;
336 MBB->addLiveIn(PhysReg);
337 }
338 }
339 }
340 }
341
342 // Sort and unique MBB LiveIns as we've not checked if SubReg/PhysReg were in
343 // each MBB's LiveIns set before calling addLiveIn on them.
344 for (MachineBasicBlock &MBB : *MF)
345 MBB.sortUniqueLiveIns();
346 }
347
348 /// Returns true if the given machine operand \p MO only reads undefined lanes.
349 /// The function only works for use operands with a subregister set.
readsUndefSubreg(const MachineOperand & MO) const350 bool VirtRegRewriter::readsUndefSubreg(const MachineOperand &MO) const {
351 // Shortcut if the operand is already marked undef.
352 if (MO.isUndef())
353 return true;
354
355 Register Reg = MO.getReg();
356 const LiveInterval &LI = LIS->getInterval(Reg);
357 const MachineInstr &MI = *MO.getParent();
358 SlotIndex BaseIndex = LIS->getInstructionIndex(MI);
359 // This code is only meant to handle reading undefined subregisters which
360 // we couldn't properly detect before.
361 assert(LI.liveAt(BaseIndex) &&
362 "Reads of completely dead register should be marked undef already");
363 unsigned SubRegIdx = MO.getSubReg();
364 assert(SubRegIdx != 0 && LI.hasSubRanges());
365 LaneBitmask UseMask = TRI->getSubRegIndexLaneMask(SubRegIdx);
366 // See if any of the relevant subregister liveranges is defined at this point.
367 for (const LiveInterval::SubRange &SR : LI.subranges()) {
368 if ((SR.LaneMask & UseMask).any() && SR.liveAt(BaseIndex))
369 return false;
370 }
371 return true;
372 }
373
handleIdentityCopy(MachineInstr & MI) const374 void VirtRegRewriter::handleIdentityCopy(MachineInstr &MI) const {
375 if (!MI.isIdentityCopy())
376 return;
377 LLVM_DEBUG(dbgs() << "Identity copy: " << MI);
378 ++NumIdCopies;
379
380 // Copies like:
381 // %r0 = COPY undef %r0
382 // %al = COPY %al, implicit-def %eax
383 // give us additional liveness information: The target (super-)register
384 // must not be valid before this point. Replace the COPY with a KILL
385 // instruction to maintain this information.
386 if (MI.getOperand(1).isUndef() || MI.getNumOperands() > 2) {
387 MI.setDesc(TII->get(TargetOpcode::KILL));
388 LLVM_DEBUG(dbgs() << " replace by: " << MI);
389 return;
390 }
391
392 if (Indexes)
393 Indexes->removeSingleMachineInstrFromMaps(MI);
394 MI.eraseFromBundle();
395 LLVM_DEBUG(dbgs() << " deleted.\n");
396 }
397
398 /// The liverange splitting logic sometimes produces bundles of copies when
399 /// subregisters are involved. Expand these into a sequence of copy instructions
400 /// after processing the last in the bundle. Does not update LiveIntervals
401 /// which we shouldn't need for this instruction anymore.
expandCopyBundle(MachineInstr & MI) const402 void VirtRegRewriter::expandCopyBundle(MachineInstr &MI) const {
403 if (!MI.isCopy())
404 return;
405
406 if (MI.isBundledWithPred() && !MI.isBundledWithSucc()) {
407 SmallVector<MachineInstr *, 2> MIs({&MI});
408
409 // Only do this when the complete bundle is made out of COPYs.
410 MachineBasicBlock &MBB = *MI.getParent();
411 for (MachineBasicBlock::reverse_instr_iterator I =
412 std::next(MI.getReverseIterator()), E = MBB.instr_rend();
413 I != E && I->isBundledWithSucc(); ++I) {
414 if (!I->isCopy())
415 return;
416 MIs.push_back(&*I);
417 }
418 MachineInstr *FirstMI = MIs.back();
419
420 auto anyRegsAlias = [](const MachineInstr *Dst,
421 ArrayRef<MachineInstr *> Srcs,
422 const TargetRegisterInfo *TRI) {
423 for (const MachineInstr *Src : Srcs)
424 if (Src != Dst)
425 if (TRI->regsOverlap(Dst->getOperand(0).getReg(),
426 Src->getOperand(1).getReg()))
427 return true;
428 return false;
429 };
430
431 // If any of the destination registers in the bundle of copies alias any of
432 // the source registers, try to schedule the instructions to avoid any
433 // clobbering.
434 for (int E = MIs.size(), PrevE = E; E > 1; PrevE = E) {
435 for (int I = E; I--; )
436 if (!anyRegsAlias(MIs[I], makeArrayRef(MIs).take_front(E), TRI)) {
437 if (I + 1 != E)
438 std::swap(MIs[I], MIs[E - 1]);
439 --E;
440 }
441 if (PrevE == E) {
442 MF->getFunction().getContext().emitError(
443 "register rewriting failed: cycle in copy bundle");
444 break;
445 }
446 }
447
448 MachineInstr *BundleStart = FirstMI;
449 for (MachineInstr *BundledMI : llvm::reverse(MIs)) {
450 // If instruction is in the middle of the bundle, move it before the
451 // bundle starts, otherwise, just unbundle it. When we get to the last
452 // instruction, the bundle will have been completely undone.
453 if (BundledMI != BundleStart) {
454 BundledMI->removeFromBundle();
455 MBB.insert(FirstMI, BundledMI);
456 } else if (BundledMI->isBundledWithSucc()) {
457 BundledMI->unbundleFromSucc();
458 BundleStart = &*std::next(BundledMI->getIterator());
459 }
460
461 if (Indexes && BundledMI != FirstMI)
462 Indexes->insertMachineInstrInMaps(*BundledMI);
463 }
464 }
465 }
466
467 /// Check whether (part of) \p SuperPhysReg is live through \p MI.
468 /// \pre \p MI defines a subregister of a virtual register that
469 /// has been assigned to \p SuperPhysReg.
subRegLiveThrough(const MachineInstr & MI,Register SuperPhysReg) const470 bool VirtRegRewriter::subRegLiveThrough(const MachineInstr &MI,
471 Register SuperPhysReg) const {
472 SlotIndex MIIndex = LIS->getInstructionIndex(MI);
473 SlotIndex BeforeMIUses = MIIndex.getBaseIndex();
474 SlotIndex AfterMIDefs = MIIndex.getBoundaryIndex();
475 for (MCRegUnitIterator Unit(SuperPhysReg, TRI); Unit.isValid(); ++Unit) {
476 const LiveRange &UnitRange = LIS->getRegUnit(*Unit);
477 // If the regunit is live both before and after MI,
478 // we assume it is live through.
479 // Generally speaking, this is not true, because something like
480 // "RU = op RU" would match that description.
481 // However, we know that we are trying to assess whether
482 // a def of a virtual reg, vreg, is live at the same time of RU.
483 // If we are in the "RU = op RU" situation, that means that vreg
484 // is defined at the same time as RU (i.e., "vreg, RU = op RU").
485 // Thus, vreg and RU interferes and vreg cannot be assigned to
486 // SuperPhysReg. Therefore, this situation cannot happen.
487 if (UnitRange.liveAt(AfterMIDefs) && UnitRange.liveAt(BeforeMIUses))
488 return true;
489 }
490 return false;
491 }
492
rewrite()493 void VirtRegRewriter::rewrite() {
494 bool NoSubRegLiveness = !MRI->subRegLivenessEnabled();
495 SmallVector<Register, 8> SuperDeads;
496 SmallVector<Register, 8> SuperDefs;
497 SmallVector<Register, 8> SuperKills;
498
499 for (MachineFunction::iterator MBBI = MF->begin(), MBBE = MF->end();
500 MBBI != MBBE; ++MBBI) {
501 LLVM_DEBUG(MBBI->print(dbgs(), Indexes));
502 for (MachineBasicBlock::instr_iterator
503 MII = MBBI->instr_begin(), MIE = MBBI->instr_end(); MII != MIE;) {
504 MachineInstr *MI = &*MII;
505 ++MII;
506
507 for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
508 MOE = MI->operands_end(); MOI != MOE; ++MOI) {
509 MachineOperand &MO = *MOI;
510
511 // Make sure MRI knows about registers clobbered by regmasks.
512 if (MO.isRegMask())
513 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
514
515 if (!MO.isReg() || !MO.getReg().isVirtual())
516 continue;
517 Register VirtReg = MO.getReg();
518 Register PhysReg = VRM->getPhys(VirtReg);
519 assert(PhysReg != VirtRegMap::NO_PHYS_REG &&
520 "Instruction uses unmapped VirtReg");
521 assert(!MRI->isReserved(PhysReg) && "Reserved register assignment");
522
523 // Preserve semantics of sub-register operands.
524 unsigned SubReg = MO.getSubReg();
525 if (SubReg != 0) {
526 if (NoSubRegLiveness || !MRI->shouldTrackSubRegLiveness(VirtReg)) {
527 // A virtual register kill refers to the whole register, so we may
528 // have to add implicit killed operands for the super-register. A
529 // partial redef always kills and redefines the super-register.
530 if ((MO.readsReg() && (MO.isDef() || MO.isKill())) ||
531 (MO.isDef() && subRegLiveThrough(*MI, PhysReg)))
532 SuperKills.push_back(PhysReg);
533
534 if (MO.isDef()) {
535 // Also add implicit defs for the super-register.
536 if (MO.isDead())
537 SuperDeads.push_back(PhysReg);
538 else
539 SuperDefs.push_back(PhysReg);
540 }
541 } else {
542 if (MO.isUse()) {
543 if (readsUndefSubreg(MO))
544 // We need to add an <undef> flag if the subregister is
545 // completely undefined (and we are not adding super-register
546 // defs).
547 MO.setIsUndef(true);
548 } else if (!MO.isDead()) {
549 assert(MO.isDef());
550 }
551 }
552
553 // The def undef and def internal flags only make sense for
554 // sub-register defs, and we are substituting a full physreg. An
555 // implicit killed operand from the SuperKills list will represent the
556 // partial read of the super-register.
557 if (MO.isDef()) {
558 MO.setIsUndef(false);
559 MO.setIsInternalRead(false);
560 }
561
562 // PhysReg operands cannot have subregister indexes.
563 PhysReg = TRI->getSubReg(PhysReg, SubReg);
564 assert(PhysReg.isValid() && "Invalid SubReg for physical register");
565 MO.setSubReg(0);
566 }
567 // Rewrite. Note we could have used MachineOperand::substPhysReg(), but
568 // we need the inlining here.
569 MO.setReg(PhysReg);
570 MO.setIsRenamable(true);
571 }
572
573 // Add any missing super-register kills after rewriting the whole
574 // instruction.
575 while (!SuperKills.empty())
576 MI->addRegisterKilled(SuperKills.pop_back_val(), TRI, true);
577
578 while (!SuperDeads.empty())
579 MI->addRegisterDead(SuperDeads.pop_back_val(), TRI, true);
580
581 while (!SuperDefs.empty())
582 MI->addRegisterDefined(SuperDefs.pop_back_val(), TRI);
583
584 LLVM_DEBUG(dbgs() << "> " << *MI);
585
586 expandCopyBundle(*MI);
587
588 // We can remove identity copies right now.
589 handleIdentityCopy(*MI);
590 }
591 }
592 }
593