1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements the AggressiveAntiDepBreaker class, which
11 // implements register anti-dependence breaking during post-RA
12 // scheduling. It attempts to break all anti-dependencies within a
13 // block.
14 //
15 //===----------------------------------------------------------------------===//
16
17 #include "AggressiveAntiDepBreaker.h"
18 #include "llvm/CodeGen/MachineBasicBlock.h"
19 #include "llvm/CodeGen/MachineFrameInfo.h"
20 #include "llvm/CodeGen/MachineInstr.h"
21 #include "llvm/CodeGen/RegisterClassInfo.h"
22 #include "llvm/Support/CommandLine.h"
23 #include "llvm/Support/Debug.h"
24 #include "llvm/Support/ErrorHandling.h"
25 #include "llvm/Support/raw_ostream.h"
26 #include "llvm/Target/TargetInstrInfo.h"
27 #include "llvm/Target/TargetMachine.h"
28 #include "llvm/Target/TargetRegisterInfo.h"
29 using namespace llvm;
30
31 #define DEBUG_TYPE "post-RA-sched"
32
33 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod
34 static cl::opt<int>
35 DebugDiv("agg-antidep-debugdiv",
36 cl::desc("Debug control for aggressive anti-dep breaker"),
37 cl::init(0), cl::Hidden);
38 static cl::opt<int>
39 DebugMod("agg-antidep-debugmod",
40 cl::desc("Debug control for aggressive anti-dep breaker"),
41 cl::init(0), cl::Hidden);
42
AggressiveAntiDepState(const unsigned TargetRegs,MachineBasicBlock * BB)43 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs,
44 MachineBasicBlock *BB) :
45 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0),
46 GroupNodeIndices(TargetRegs, 0),
47 KillIndices(TargetRegs, 0),
48 DefIndices(TargetRegs, 0)
49 {
50 const unsigned BBSize = BB->size();
51 for (unsigned i = 0; i < NumTargetRegs; ++i) {
52 // Initialize all registers to be in their own group. Initially we
53 // assign the register to the same-indexed GroupNode.
54 GroupNodeIndices[i] = i;
55 // Initialize the indices to indicate that no registers are live.
56 KillIndices[i] = ~0u;
57 DefIndices[i] = BBSize;
58 }
59 }
60
GetGroup(unsigned Reg)61 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) {
62 unsigned Node = GroupNodeIndices[Reg];
63 while (GroupNodes[Node] != Node)
64 Node = GroupNodes[Node];
65
66 return Node;
67 }
68
GetGroupRegs(unsigned Group,std::vector<unsigned> & Regs,std::multimap<unsigned,AggressiveAntiDepState::RegisterReference> * RegRefs)69 void AggressiveAntiDepState::GetGroupRegs(
70 unsigned Group,
71 std::vector<unsigned> &Regs,
72 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs)
73 {
74 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) {
75 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0))
76 Regs.push_back(Reg);
77 }
78 }
79
UnionGroups(unsigned Reg1,unsigned Reg2)80 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2)
81 {
82 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!");
83 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!");
84
85 // find group for each register
86 unsigned Group1 = GetGroup(Reg1);
87 unsigned Group2 = GetGroup(Reg2);
88
89 // if either group is 0, then that must become the parent
90 unsigned Parent = (Group1 == 0) ? Group1 : Group2;
91 unsigned Other = (Parent == Group1) ? Group2 : Group1;
92 GroupNodes.at(Other) = Parent;
93 return Parent;
94 }
95
LeaveGroup(unsigned Reg)96 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg)
97 {
98 // Create a new GroupNode for Reg. Reg's existing GroupNode must
99 // stay as is because there could be other GroupNodes referring to
100 // it.
101 unsigned idx = GroupNodes.size();
102 GroupNodes.push_back(idx);
103 GroupNodeIndices[Reg] = idx;
104 return idx;
105 }
106
IsLive(unsigned Reg)107 bool AggressiveAntiDepState::IsLive(unsigned Reg)
108 {
109 // KillIndex must be defined and DefIndex not defined for a register
110 // to be live.
111 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u));
112 }
113
114
115
116 AggressiveAntiDepBreaker::
AggressiveAntiDepBreaker(MachineFunction & MFi,const RegisterClassInfo & RCI,TargetSubtargetInfo::RegClassVector & CriticalPathRCs)117 AggressiveAntiDepBreaker(MachineFunction& MFi,
118 const RegisterClassInfo &RCI,
119 TargetSubtargetInfo::RegClassVector& CriticalPathRCs) :
120 AntiDepBreaker(), MF(MFi),
121 MRI(MF.getRegInfo()),
122 TII(MF.getTarget().getInstrInfo()),
123 TRI(MF.getTarget().getRegisterInfo()),
124 RegClassInfo(RCI),
125 State(nullptr) {
126 /* Collect a bitset of all registers that are only broken if they
127 are on the critical path. */
128 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) {
129 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]);
130 if (CriticalPathSet.none())
131 CriticalPathSet = CPSet;
132 else
133 CriticalPathSet |= CPSet;
134 }
135
136 DEBUG(dbgs() << "AntiDep Critical-Path Registers:");
137 DEBUG(for (int r = CriticalPathSet.find_first(); r != -1;
138 r = CriticalPathSet.find_next(r))
139 dbgs() << " " << TRI->getName(r));
140 DEBUG(dbgs() << '\n');
141 }
142
~AggressiveAntiDepBreaker()143 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() {
144 delete State;
145 }
146
StartBlock(MachineBasicBlock * BB)147 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) {
148 assert(!State);
149 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB);
150
151 bool IsReturnBlock = (!BB->empty() && BB->back().isReturn());
152 std::vector<unsigned> &KillIndices = State->GetKillIndices();
153 std::vector<unsigned> &DefIndices = State->GetDefIndices();
154
155 // Examine the live-in regs of all successors.
156 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(),
157 SE = BB->succ_end(); SI != SE; ++SI)
158 for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(),
159 E = (*SI)->livein_end(); I != E; ++I) {
160 for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) {
161 unsigned Reg = *AI;
162 State->UnionGroups(Reg, 0);
163 KillIndices[Reg] = BB->size();
164 DefIndices[Reg] = ~0u;
165 }
166 }
167
168 // Mark live-out callee-saved registers. In a return block this is
169 // all callee-saved registers. In non-return this is any
170 // callee-saved register that is not saved in the prolog.
171 const MachineFrameInfo *MFI = MF.getFrameInfo();
172 BitVector Pristine = MFI->getPristineRegs(BB);
173 for (const MCPhysReg *I = TRI->getCalleeSavedRegs(&MF); *I; ++I) {
174 unsigned Reg = *I;
175 if (!IsReturnBlock && !Pristine.test(Reg)) continue;
176 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
177 unsigned AliasReg = *AI;
178 State->UnionGroups(AliasReg, 0);
179 KillIndices[AliasReg] = BB->size();
180 DefIndices[AliasReg] = ~0u;
181 }
182 }
183 }
184
FinishBlock()185 void AggressiveAntiDepBreaker::FinishBlock() {
186 delete State;
187 State = nullptr;
188 }
189
Observe(MachineInstr * MI,unsigned Count,unsigned InsertPosIndex)190 void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count,
191 unsigned InsertPosIndex) {
192 assert(Count < InsertPosIndex && "Instruction index out of expected range!");
193
194 std::set<unsigned> PassthruRegs;
195 GetPassthruRegs(MI, PassthruRegs);
196 PrescanInstruction(MI, Count, PassthruRegs);
197 ScanInstruction(MI, Count);
198
199 DEBUG(dbgs() << "Observe: ");
200 DEBUG(MI->dump());
201 DEBUG(dbgs() << "\tRegs:");
202
203 std::vector<unsigned> &DefIndices = State->GetDefIndices();
204 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) {
205 // If Reg is current live, then mark that it can't be renamed as
206 // we don't know the extent of its live-range anymore (now that it
207 // has been scheduled). If it is not live but was defined in the
208 // previous schedule region, then set its def index to the most
209 // conservative location (i.e. the beginning of the previous
210 // schedule region).
211 if (State->IsLive(Reg)) {
212 DEBUG(if (State->GetGroup(Reg) != 0)
213 dbgs() << " " << TRI->getName(Reg) << "=g" <<
214 State->GetGroup(Reg) << "->g0(region live-out)");
215 State->UnionGroups(Reg, 0);
216 } else if ((DefIndices[Reg] < InsertPosIndex)
217 && (DefIndices[Reg] >= Count)) {
218 DefIndices[Reg] = Count;
219 }
220 }
221 DEBUG(dbgs() << '\n');
222 }
223
IsImplicitDefUse(MachineInstr * MI,MachineOperand & MO)224 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI,
225 MachineOperand& MO)
226 {
227 if (!MO.isReg() || !MO.isImplicit())
228 return false;
229
230 unsigned Reg = MO.getReg();
231 if (Reg == 0)
232 return false;
233
234 MachineOperand *Op = nullptr;
235 if (MO.isDef())
236 Op = MI->findRegisterUseOperand(Reg, true);
237 else
238 Op = MI->findRegisterDefOperand(Reg);
239
240 return(Op && Op->isImplicit());
241 }
242
GetPassthruRegs(MachineInstr * MI,std::set<unsigned> & PassthruRegs)243 void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI,
244 std::set<unsigned>& PassthruRegs) {
245 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
246 MachineOperand &MO = MI->getOperand(i);
247 if (!MO.isReg()) continue;
248 if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) ||
249 IsImplicitDefUse(MI, MO)) {
250 const unsigned Reg = MO.getReg();
251 for (MCSubRegIterator SubRegs(Reg, TRI, /*IncludeSelf=*/true);
252 SubRegs.isValid(); ++SubRegs)
253 PassthruRegs.insert(*SubRegs);
254 }
255 }
256 }
257
258 /// AntiDepEdges - Return in Edges the anti- and output- dependencies
259 /// in SU that we want to consider for breaking.
AntiDepEdges(const SUnit * SU,std::vector<const SDep * > & Edges)260 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) {
261 SmallSet<unsigned, 4> RegSet;
262 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
263 P != PE; ++P) {
264 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
265 unsigned Reg = P->getReg();
266 if (RegSet.count(Reg) == 0) {
267 Edges.push_back(&*P);
268 RegSet.insert(Reg);
269 }
270 }
271 }
272 }
273
274 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up
275 /// critical path.
CriticalPathStep(const SUnit * SU)276 static const SUnit *CriticalPathStep(const SUnit *SU) {
277 const SDep *Next = nullptr;
278 unsigned NextDepth = 0;
279 // Find the predecessor edge with the greatest depth.
280 if (SU) {
281 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
282 P != PE; ++P) {
283 const SUnit *PredSU = P->getSUnit();
284 unsigned PredLatency = P->getLatency();
285 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency;
286 // In the case of a latency tie, prefer an anti-dependency edge over
287 // other types of edges.
288 if (NextDepth < PredTotalLatency ||
289 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) {
290 NextDepth = PredTotalLatency;
291 Next = &*P;
292 }
293 }
294 }
295
296 return (Next) ? Next->getSUnit() : nullptr;
297 }
298
HandleLastUse(unsigned Reg,unsigned KillIdx,const char * tag,const char * header,const char * footer)299 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx,
300 const char *tag,
301 const char *header,
302 const char *footer) {
303 std::vector<unsigned> &KillIndices = State->GetKillIndices();
304 std::vector<unsigned> &DefIndices = State->GetDefIndices();
305 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
306 RegRefs = State->GetRegRefs();
307
308 if (!State->IsLive(Reg)) {
309 KillIndices[Reg] = KillIdx;
310 DefIndices[Reg] = ~0u;
311 RegRefs.erase(Reg);
312 State->LeaveGroup(Reg);
313 DEBUG(if (header) {
314 dbgs() << header << TRI->getName(Reg); header = nullptr; });
315 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag);
316 }
317 // Repeat for subregisters.
318 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
319 unsigned SubregReg = *SubRegs;
320 if (!State->IsLive(SubregReg)) {
321 KillIndices[SubregReg] = KillIdx;
322 DefIndices[SubregReg] = ~0u;
323 RegRefs.erase(SubregReg);
324 State->LeaveGroup(SubregReg);
325 DEBUG(if (header) {
326 dbgs() << header << TRI->getName(Reg); header = nullptr; });
327 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" <<
328 State->GetGroup(SubregReg) << tag);
329 }
330 }
331
332 DEBUG(if (!header && footer) dbgs() << footer);
333 }
334
PrescanInstruction(MachineInstr * MI,unsigned Count,std::set<unsigned> & PassthruRegs)335 void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI,
336 unsigned Count,
337 std::set<unsigned>& PassthruRegs) {
338 std::vector<unsigned> &DefIndices = State->GetDefIndices();
339 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
340 RegRefs = State->GetRegRefs();
341
342 // Handle dead defs by simulating a last-use of the register just
343 // after the def. A dead def can occur because the def is truly
344 // dead, or because only a subregister is live at the def. If we
345 // don't do this the dead def will be incorrectly merged into the
346 // previous def.
347 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
348 MachineOperand &MO = MI->getOperand(i);
349 if (!MO.isReg() || !MO.isDef()) continue;
350 unsigned Reg = MO.getReg();
351 if (Reg == 0) continue;
352
353 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n");
354 }
355
356 DEBUG(dbgs() << "\tDef Groups:");
357 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
358 MachineOperand &MO = MI->getOperand(i);
359 if (!MO.isReg() || !MO.isDef()) continue;
360 unsigned Reg = MO.getReg();
361 if (Reg == 0) continue;
362
363 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg));
364
365 // If MI's defs have a special allocation requirement, don't allow
366 // any def registers to be changed. Also assume all registers
367 // defined in a call must not be changed (ABI).
368 if (MI->isCall() || MI->hasExtraDefRegAllocReq() ||
369 TII->isPredicated(MI)) {
370 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
371 State->UnionGroups(Reg, 0);
372 }
373
374 // Any aliased that are live at this point are completely or
375 // partially defined here, so group those aliases with Reg.
376 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) {
377 unsigned AliasReg = *AI;
378 if (State->IsLive(AliasReg)) {
379 State->UnionGroups(Reg, AliasReg);
380 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " <<
381 TRI->getName(AliasReg) << ")");
382 }
383 }
384
385 // Note register reference...
386 const TargetRegisterClass *RC = nullptr;
387 if (i < MI->getDesc().getNumOperands())
388 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
389 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
390 RegRefs.insert(std::make_pair(Reg, RR));
391 }
392
393 DEBUG(dbgs() << '\n');
394
395 // Scan the register defs for this instruction and update
396 // live-ranges.
397 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
398 MachineOperand &MO = MI->getOperand(i);
399 if (!MO.isReg() || !MO.isDef()) continue;
400 unsigned Reg = MO.getReg();
401 if (Reg == 0) continue;
402 // Ignore KILLs and passthru registers for liveness...
403 if (MI->isKill() || (PassthruRegs.count(Reg) != 0))
404 continue;
405
406 // Update def for Reg and aliases.
407 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
408 // We need to be careful here not to define already-live super registers.
409 // If the super register is already live, then this definition is not
410 // a definition of the whole super register (just a partial insertion
411 // into it). Earlier subregister definitions (which we've not yet visited
412 // because we're iterating bottom-up) need to be linked to the same group
413 // as this definition.
414 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI))
415 continue;
416
417 DefIndices[*AI] = Count;
418 }
419 }
420 }
421
ScanInstruction(MachineInstr * MI,unsigned Count)422 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI,
423 unsigned Count) {
424 DEBUG(dbgs() << "\tUse Groups:");
425 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
426 RegRefs = State->GetRegRefs();
427
428 // If MI's uses have special allocation requirement, don't allow
429 // any use registers to be changed. Also assume all registers
430 // used in a call must not be changed (ABI).
431 // FIXME: The issue with predicated instruction is more complex. We are being
432 // conservatively here because the kill markers cannot be trusted after
433 // if-conversion:
434 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14]
435 // ...
436 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395]
437 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12]
438 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8)
439 //
440 // The first R6 kill is not really a kill since it's killed by a predicated
441 // instruction which may not be executed. The second R6 def may or may not
442 // re-define R6 so it's not safe to change it since the last R6 use cannot be
443 // changed.
444 bool Special = MI->isCall() ||
445 MI->hasExtraSrcRegAllocReq() ||
446 TII->isPredicated(MI);
447
448 // Scan the register uses for this instruction and update
449 // live-ranges, groups and RegRefs.
450 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
451 MachineOperand &MO = MI->getOperand(i);
452 if (!MO.isReg() || !MO.isUse()) continue;
453 unsigned Reg = MO.getReg();
454 if (Reg == 0) continue;
455
456 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" <<
457 State->GetGroup(Reg));
458
459 // It wasn't previously live but now it is, this is a kill. Forget
460 // the previous live-range information and start a new live-range
461 // for the register.
462 HandleLastUse(Reg, Count, "(last-use)");
463
464 if (Special) {
465 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
466 State->UnionGroups(Reg, 0);
467 }
468
469 // Note register reference...
470 const TargetRegisterClass *RC = nullptr;
471 if (i < MI->getDesc().getNumOperands())
472 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
473 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
474 RegRefs.insert(std::make_pair(Reg, RR));
475 }
476
477 DEBUG(dbgs() << '\n');
478
479 // Form a group of all defs and uses of a KILL instruction to ensure
480 // that all registers are renamed as a group.
481 if (MI->isKill()) {
482 DEBUG(dbgs() << "\tKill Group:");
483
484 unsigned FirstReg = 0;
485 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
486 MachineOperand &MO = MI->getOperand(i);
487 if (!MO.isReg()) continue;
488 unsigned Reg = MO.getReg();
489 if (Reg == 0) continue;
490
491 if (FirstReg != 0) {
492 DEBUG(dbgs() << "=" << TRI->getName(Reg));
493 State->UnionGroups(FirstReg, Reg);
494 } else {
495 DEBUG(dbgs() << " " << TRI->getName(Reg));
496 FirstReg = Reg;
497 }
498 }
499
500 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n');
501 }
502 }
503
GetRenameRegisters(unsigned Reg)504 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) {
505 BitVector BV(TRI->getNumRegs(), false);
506 bool first = true;
507
508 // Check all references that need rewriting for Reg. For each, use
509 // the corresponding register class to narrow the set of registers
510 // that are appropriate for renaming.
511 std::pair<std::multimap<unsigned,
512 AggressiveAntiDepState::RegisterReference>::iterator,
513 std::multimap<unsigned,
514 AggressiveAntiDepState::RegisterReference>::iterator>
515 Range = State->GetRegRefs().equal_range(Reg);
516 for (std::multimap<unsigned,
517 AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first,
518 QE = Range.second; Q != QE; ++Q) {
519 const TargetRegisterClass *RC = Q->second.RC;
520 if (!RC) continue;
521
522 BitVector RCBV = TRI->getAllocatableSet(MF, RC);
523 if (first) {
524 BV |= RCBV;
525 first = false;
526 } else {
527 BV &= RCBV;
528 }
529
530 DEBUG(dbgs() << " " << RC->getName());
531 }
532
533 return BV;
534 }
535
FindSuitableFreeRegisters(unsigned AntiDepGroupIndex,RenameOrderType & RenameOrder,std::map<unsigned,unsigned> & RenameMap)536 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters(
537 unsigned AntiDepGroupIndex,
538 RenameOrderType& RenameOrder,
539 std::map<unsigned, unsigned> &RenameMap) {
540 std::vector<unsigned> &KillIndices = State->GetKillIndices();
541 std::vector<unsigned> &DefIndices = State->GetDefIndices();
542 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
543 RegRefs = State->GetRegRefs();
544
545 // Collect all referenced registers in the same group as
546 // AntiDepReg. These all need to be renamed together if we are to
547 // break the anti-dependence.
548 std::vector<unsigned> Regs;
549 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs);
550 assert(Regs.size() > 0 && "Empty register group!");
551 if (Regs.size() == 0)
552 return false;
553
554 // Find the "superest" register in the group. At the same time,
555 // collect the BitVector of registers that can be used to rename
556 // each register.
557 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex
558 << ":\n");
559 std::map<unsigned, BitVector> RenameRegisterMap;
560 unsigned SuperReg = 0;
561 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
562 unsigned Reg = Regs[i];
563 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg))
564 SuperReg = Reg;
565
566 // If Reg has any references, then collect possible rename regs
567 if (RegRefs.count(Reg) > 0) {
568 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":");
569
570 BitVector BV = GetRenameRegisters(Reg);
571 RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV));
572
573 DEBUG(dbgs() << " ::");
574 DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r))
575 dbgs() << " " << TRI->getName(r));
576 DEBUG(dbgs() << "\n");
577 }
578 }
579
580 // All group registers should be a subreg of SuperReg.
581 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
582 unsigned Reg = Regs[i];
583 if (Reg == SuperReg) continue;
584 bool IsSub = TRI->isSubRegister(SuperReg, Reg);
585 assert(IsSub && "Expecting group subregister");
586 if (!IsSub)
587 return false;
588 }
589
590 #ifndef NDEBUG
591 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod
592 if (DebugDiv > 0) {
593 static int renamecnt = 0;
594 if (renamecnt++ % DebugDiv != DebugMod)
595 return false;
596
597 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) <<
598 " for debug ***\n";
599 }
600 #endif
601
602 // Check each possible rename register for SuperReg in round-robin
603 // order. If that register is available, and the corresponding
604 // registers are available for the other group subregisters, then we
605 // can use those registers to rename.
606
607 // FIXME: Using getMinimalPhysRegClass is very conservative. We should
608 // check every use of the register and find the largest register class
609 // that can be used in all of them.
610 const TargetRegisterClass *SuperRC =
611 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other);
612
613 ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC);
614 if (Order.empty()) {
615 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n");
616 return false;
617 }
618
619 DEBUG(dbgs() << "\tFind Registers:");
620
621 if (RenameOrder.count(SuperRC) == 0)
622 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size()));
623
624 unsigned OrigR = RenameOrder[SuperRC];
625 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR);
626 unsigned R = OrigR;
627 do {
628 if (R == 0) R = Order.size();
629 --R;
630 const unsigned NewSuperReg = Order[R];
631 // Don't consider non-allocatable registers
632 if (!MRI.isAllocatable(NewSuperReg)) continue;
633 // Don't replace a register with itself.
634 if (NewSuperReg == SuperReg) continue;
635
636 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':');
637 RenameMap.clear();
638
639 // For each referenced group register (which must be a SuperReg or
640 // a subregister of SuperReg), find the corresponding subregister
641 // of NewSuperReg and make sure it is free to be renamed.
642 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
643 unsigned Reg = Regs[i];
644 unsigned NewReg = 0;
645 if (Reg == SuperReg) {
646 NewReg = NewSuperReg;
647 } else {
648 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg);
649 if (NewSubRegIdx != 0)
650 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx);
651 }
652
653 DEBUG(dbgs() << " " << TRI->getName(NewReg));
654
655 // Check if Reg can be renamed to NewReg.
656 BitVector BV = RenameRegisterMap[Reg];
657 if (!BV.test(NewReg)) {
658 DEBUG(dbgs() << "(no rename)");
659 goto next_super_reg;
660 }
661
662 // If NewReg is dead and NewReg's most recent def is not before
663 // Regs's kill, it's safe to replace Reg with NewReg. We
664 // must also check all aliases of NewReg, because we can't define a
665 // register when any sub or super is already live.
666 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) {
667 DEBUG(dbgs() << "(live)");
668 goto next_super_reg;
669 } else {
670 bool found = false;
671 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) {
672 unsigned AliasReg = *AI;
673 if (State->IsLive(AliasReg) ||
674 (KillIndices[Reg] > DefIndices[AliasReg])) {
675 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)");
676 found = true;
677 break;
678 }
679 }
680 if (found)
681 goto next_super_reg;
682 }
683
684 // Record that 'Reg' can be renamed to 'NewReg'.
685 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg));
686 }
687
688 // If we fall-out here, then every register in the group can be
689 // renamed, as recorded in RenameMap.
690 RenameOrder.erase(SuperRC);
691 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R));
692 DEBUG(dbgs() << "]\n");
693 return true;
694
695 next_super_reg:
696 DEBUG(dbgs() << ']');
697 } while (R != EndR);
698
699 DEBUG(dbgs() << '\n');
700
701 // No registers are free and available!
702 return false;
703 }
704
705 /// BreakAntiDependencies - Identifiy anti-dependencies within the
706 /// ScheduleDAG and break them by renaming registers.
707 ///
BreakAntiDependencies(const std::vector<SUnit> & SUnits,MachineBasicBlock::iterator Begin,MachineBasicBlock::iterator End,unsigned InsertPosIndex,DbgValueVector & DbgValues)708 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies(
709 const std::vector<SUnit>& SUnits,
710 MachineBasicBlock::iterator Begin,
711 MachineBasicBlock::iterator End,
712 unsigned InsertPosIndex,
713 DbgValueVector &DbgValues) {
714
715 std::vector<unsigned> &KillIndices = State->GetKillIndices();
716 std::vector<unsigned> &DefIndices = State->GetDefIndices();
717 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
718 RegRefs = State->GetRegRefs();
719
720 // The code below assumes that there is at least one instruction,
721 // so just duck out immediately if the block is empty.
722 if (SUnits.empty()) return 0;
723
724 // For each regclass the next register to use for renaming.
725 RenameOrderType RenameOrder;
726
727 // ...need a map from MI to SUnit.
728 std::map<MachineInstr *, const SUnit *> MISUnitMap;
729 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
730 const SUnit *SU = &SUnits[i];
731 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(),
732 SU));
733 }
734
735 // Track progress along the critical path through the SUnit graph as
736 // we walk the instructions. This is needed for regclasses that only
737 // break critical-path anti-dependencies.
738 const SUnit *CriticalPathSU = nullptr;
739 MachineInstr *CriticalPathMI = nullptr;
740 if (CriticalPathSet.any()) {
741 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
742 const SUnit *SU = &SUnits[i];
743 if (!CriticalPathSU ||
744 ((SU->getDepth() + SU->Latency) >
745 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) {
746 CriticalPathSU = SU;
747 }
748 }
749
750 CriticalPathMI = CriticalPathSU->getInstr();
751 }
752
753 #ifndef NDEBUG
754 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n");
755 DEBUG(dbgs() << "Available regs:");
756 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) {
757 if (!State->IsLive(Reg))
758 DEBUG(dbgs() << " " << TRI->getName(Reg));
759 }
760 DEBUG(dbgs() << '\n');
761 #endif
762
763 // Attempt to break anti-dependence edges. Walk the instructions
764 // from the bottom up, tracking information about liveness as we go
765 // to help determine which registers are available.
766 unsigned Broken = 0;
767 unsigned Count = InsertPosIndex - 1;
768 for (MachineBasicBlock::iterator I = End, E = Begin;
769 I != E; --Count) {
770 MachineInstr *MI = --I;
771
772 if (MI->isDebugValue())
773 continue;
774
775 DEBUG(dbgs() << "Anti: ");
776 DEBUG(MI->dump());
777
778 std::set<unsigned> PassthruRegs;
779 GetPassthruRegs(MI, PassthruRegs);
780
781 // Process the defs in MI...
782 PrescanInstruction(MI, Count, PassthruRegs);
783
784 // The dependence edges that represent anti- and output-
785 // dependencies that are candidates for breaking.
786 std::vector<const SDep *> Edges;
787 const SUnit *PathSU = MISUnitMap[MI];
788 AntiDepEdges(PathSU, Edges);
789
790 // If MI is not on the critical path, then we don't rename
791 // registers in the CriticalPathSet.
792 BitVector *ExcludeRegs = nullptr;
793 if (MI == CriticalPathMI) {
794 CriticalPathSU = CriticalPathStep(CriticalPathSU);
795 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : nullptr;
796 } else if (CriticalPathSet.any()) {
797 ExcludeRegs = &CriticalPathSet;
798 }
799
800 // Ignore KILL instructions (they form a group in ScanInstruction
801 // but don't cause any anti-dependence breaking themselves)
802 if (!MI->isKill()) {
803 // Attempt to break each anti-dependency...
804 for (unsigned i = 0, e = Edges.size(); i != e; ++i) {
805 const SDep *Edge = Edges[i];
806 SUnit *NextSU = Edge->getSUnit();
807
808 if ((Edge->getKind() != SDep::Anti) &&
809 (Edge->getKind() != SDep::Output)) continue;
810
811 unsigned AntiDepReg = Edge->getReg();
812 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg));
813 assert(AntiDepReg != 0 && "Anti-dependence on reg0?");
814
815 if (!MRI.isAllocatable(AntiDepReg)) {
816 // Don't break anti-dependencies on non-allocatable registers.
817 DEBUG(dbgs() << " (non-allocatable)\n");
818 continue;
819 } else if (ExcludeRegs && ExcludeRegs->test(AntiDepReg)) {
820 // Don't break anti-dependencies for critical path registers
821 // if not on the critical path
822 DEBUG(dbgs() << " (not critical-path)\n");
823 continue;
824 } else if (PassthruRegs.count(AntiDepReg) != 0) {
825 // If the anti-dep register liveness "passes-thru", then
826 // don't try to change it. It will be changed along with
827 // the use if required to break an earlier antidep.
828 DEBUG(dbgs() << " (passthru)\n");
829 continue;
830 } else {
831 // No anti-dep breaking for implicit deps
832 MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg);
833 assert(AntiDepOp && "Can't find index for defined register operand");
834 if (!AntiDepOp || AntiDepOp->isImplicit()) {
835 DEBUG(dbgs() << " (implicit)\n");
836 continue;
837 }
838
839 // If the SUnit has other dependencies on the SUnit that
840 // it anti-depends on, don't bother breaking the
841 // anti-dependency since those edges would prevent such
842 // units from being scheduled past each other
843 // regardless.
844 //
845 // Also, if there are dependencies on other SUnits with the
846 // same register as the anti-dependency, don't attempt to
847 // break it.
848 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
849 PE = PathSU->Preds.end(); P != PE; ++P) {
850 if (P->getSUnit() == NextSU ?
851 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) :
852 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) {
853 AntiDepReg = 0;
854 break;
855 }
856 }
857 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
858 PE = PathSU->Preds.end(); P != PE; ++P) {
859 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) &&
860 (P->getKind() != SDep::Output)) {
861 DEBUG(dbgs() << " (real dependency)\n");
862 AntiDepReg = 0;
863 break;
864 } else if ((P->getSUnit() != NextSU) &&
865 (P->getKind() == SDep::Data) &&
866 (P->getReg() == AntiDepReg)) {
867 DEBUG(dbgs() << " (other dependency)\n");
868 AntiDepReg = 0;
869 break;
870 }
871 }
872
873 if (AntiDepReg == 0) continue;
874 }
875
876 assert(AntiDepReg != 0);
877 if (AntiDepReg == 0) continue;
878
879 // Determine AntiDepReg's register group.
880 const unsigned GroupIndex = State->GetGroup(AntiDepReg);
881 if (GroupIndex == 0) {
882 DEBUG(dbgs() << " (zero group)\n");
883 continue;
884 }
885
886 DEBUG(dbgs() << '\n');
887
888 // Look for a suitable register to use to break the anti-dependence.
889 std::map<unsigned, unsigned> RenameMap;
890 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) {
891 DEBUG(dbgs() << "\tBreaking anti-dependence edge on "
892 << TRI->getName(AntiDepReg) << ":");
893
894 // Handle each group register...
895 for (std::map<unsigned, unsigned>::iterator
896 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) {
897 unsigned CurrReg = S->first;
898 unsigned NewReg = S->second;
899
900 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" <<
901 TRI->getName(NewReg) << "(" <<
902 RegRefs.count(CurrReg) << " refs)");
903
904 // Update the references to the old register CurrReg to
905 // refer to the new register NewReg.
906 std::pair<std::multimap<unsigned,
907 AggressiveAntiDepState::RegisterReference>::iterator,
908 std::multimap<unsigned,
909 AggressiveAntiDepState::RegisterReference>::iterator>
910 Range = RegRefs.equal_range(CurrReg);
911 for (std::multimap<unsigned,
912 AggressiveAntiDepState::RegisterReference>::iterator
913 Q = Range.first, QE = Range.second; Q != QE; ++Q) {
914 Q->second.Operand->setReg(NewReg);
915 // If the SU for the instruction being updated has debug
916 // information related to the anti-dependency register, make
917 // sure to update that as well.
918 const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()];
919 if (!SU) continue;
920 for (DbgValueVector::iterator DVI = DbgValues.begin(),
921 DVE = DbgValues.end(); DVI != DVE; ++DVI)
922 if (DVI->second == Q->second.Operand->getParent())
923 UpdateDbgValue(DVI->first, AntiDepReg, NewReg);
924 }
925
926 // We just went back in time and modified history; the
927 // liveness information for CurrReg is now inconsistent. Set
928 // the state as if it were dead.
929 State->UnionGroups(NewReg, 0);
930 RegRefs.erase(NewReg);
931 DefIndices[NewReg] = DefIndices[CurrReg];
932 KillIndices[NewReg] = KillIndices[CurrReg];
933
934 State->UnionGroups(CurrReg, 0);
935 RegRefs.erase(CurrReg);
936 DefIndices[CurrReg] = KillIndices[CurrReg];
937 KillIndices[CurrReg] = ~0u;
938 assert(((KillIndices[CurrReg] == ~0u) !=
939 (DefIndices[CurrReg] == ~0u)) &&
940 "Kill and Def maps aren't consistent for AntiDepReg!");
941 }
942
943 ++Broken;
944 DEBUG(dbgs() << '\n');
945 }
946 }
947 }
948
949 ScanInstruction(MI, Count);
950 }
951
952 return Broken;
953 }
954