• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements the AggressiveAntiDepBreaker class, which
11 // implements register anti-dependence breaking during post-RA
12 // scheduling. It attempts to break all anti-dependencies within a
13 // block.
14 //
15 //===----------------------------------------------------------------------===//
16 
17 #define DEBUG_TYPE "post-RA-sched"
18 #include "AggressiveAntiDepBreaker.h"
19 #include "RegisterClassInfo.h"
20 #include "llvm/CodeGen/MachineBasicBlock.h"
21 #include "llvm/CodeGen/MachineFrameInfo.h"
22 #include "llvm/CodeGen/MachineInstr.h"
23 #include "llvm/Target/TargetInstrInfo.h"
24 #include "llvm/Target/TargetMachine.h"
25 #include "llvm/Target/TargetInstrInfo.h"
26 #include "llvm/Target/TargetRegisterInfo.h"
27 #include "llvm/Support/CommandLine.h"
28 #include "llvm/Support/Debug.h"
29 #include "llvm/Support/ErrorHandling.h"
30 #include "llvm/Support/raw_ostream.h"
31 using namespace llvm;
32 
33 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod
34 static cl::opt<int>
35 DebugDiv("agg-antidep-debugdiv",
36          cl::desc("Debug control for aggressive anti-dep breaker"),
37          cl::init(0), cl::Hidden);
38 static cl::opt<int>
39 DebugMod("agg-antidep-debugmod",
40          cl::desc("Debug control for aggressive anti-dep breaker"),
41          cl::init(0), cl::Hidden);
42 
AggressiveAntiDepState(const unsigned TargetRegs,MachineBasicBlock * BB)43 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs,
44                                                MachineBasicBlock *BB) :
45   NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0),
46   GroupNodeIndices(TargetRegs, 0),
47   KillIndices(TargetRegs, 0),
48   DefIndices(TargetRegs, 0)
49 {
50   const unsigned BBSize = BB->size();
51   for (unsigned i = 0; i < NumTargetRegs; ++i) {
52     // Initialize all registers to be in their own group. Initially we
53     // assign the register to the same-indexed GroupNode.
54     GroupNodeIndices[i] = i;
55     // Initialize the indices to indicate that no registers are live.
56     KillIndices[i] = ~0u;
57     DefIndices[i] = BBSize;
58   }
59 }
60 
GetGroup(unsigned Reg)61 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) {
62   unsigned Node = GroupNodeIndices[Reg];
63   while (GroupNodes[Node] != Node)
64     Node = GroupNodes[Node];
65 
66   return Node;
67 }
68 
GetGroupRegs(unsigned Group,std::vector<unsigned> & Regs,std::multimap<unsigned,AggressiveAntiDepState::RegisterReference> * RegRefs)69 void AggressiveAntiDepState::GetGroupRegs(
70   unsigned Group,
71   std::vector<unsigned> &Regs,
72   std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs)
73 {
74   for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) {
75     if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0))
76       Regs.push_back(Reg);
77   }
78 }
79 
UnionGroups(unsigned Reg1,unsigned Reg2)80 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2)
81 {
82   assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!");
83   assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!");
84 
85   // find group for each register
86   unsigned Group1 = GetGroup(Reg1);
87   unsigned Group2 = GetGroup(Reg2);
88 
89   // if either group is 0, then that must become the parent
90   unsigned Parent = (Group1 == 0) ? Group1 : Group2;
91   unsigned Other = (Parent == Group1) ? Group2 : Group1;
92   GroupNodes.at(Other) = Parent;
93   return Parent;
94 }
95 
LeaveGroup(unsigned Reg)96 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg)
97 {
98   // Create a new GroupNode for Reg. Reg's existing GroupNode must
99   // stay as is because there could be other GroupNodes referring to
100   // it.
101   unsigned idx = GroupNodes.size();
102   GroupNodes.push_back(idx);
103   GroupNodeIndices[Reg] = idx;
104   return idx;
105 }
106 
IsLive(unsigned Reg)107 bool AggressiveAntiDepState::IsLive(unsigned Reg)
108 {
109   // KillIndex must be defined and DefIndex not defined for a register
110   // to be live.
111   return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u));
112 }
113 
114 
115 
116 AggressiveAntiDepBreaker::
AggressiveAntiDepBreaker(MachineFunction & MFi,const RegisterClassInfo & RCI,TargetSubtargetInfo::RegClassVector & CriticalPathRCs)117 AggressiveAntiDepBreaker(MachineFunction& MFi,
118                          const RegisterClassInfo &RCI,
119                          TargetSubtargetInfo::RegClassVector& CriticalPathRCs) :
120   AntiDepBreaker(), MF(MFi),
121   MRI(MF.getRegInfo()),
122   TII(MF.getTarget().getInstrInfo()),
123   TRI(MF.getTarget().getRegisterInfo()),
124   RegClassInfo(RCI),
125   State(NULL) {
126   /* Collect a bitset of all registers that are only broken if they
127      are on the critical path. */
128   for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) {
129     BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]);
130     if (CriticalPathSet.none())
131       CriticalPathSet = CPSet;
132     else
133       CriticalPathSet |= CPSet;
134    }
135 
136   DEBUG(dbgs() << "AntiDep Critical-Path Registers:");
137   DEBUG(for (int r = CriticalPathSet.find_first(); r != -1;
138              r = CriticalPathSet.find_next(r))
139           dbgs() << " " << TRI->getName(r));
140   DEBUG(dbgs() << '\n');
141 }
142 
~AggressiveAntiDepBreaker()143 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() {
144   delete State;
145 }
146 
StartBlock(MachineBasicBlock * BB)147 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) {
148   assert(State == NULL);
149   State = new AggressiveAntiDepState(TRI->getNumRegs(), BB);
150 
151   bool IsReturnBlock = (!BB->empty() && BB->back().getDesc().isReturn());
152   std::vector<unsigned> &KillIndices = State->GetKillIndices();
153   std::vector<unsigned> &DefIndices = State->GetDefIndices();
154 
155   // Determine the live-out physregs for this block.
156   if (IsReturnBlock) {
157     // In a return block, examine the function live-out regs.
158     for (MachineRegisterInfo::liveout_iterator I = MRI.liveout_begin(),
159          E = MRI.liveout_end(); I != E; ++I) {
160       for (const unsigned *Alias = TRI->getOverlaps(*I);
161            unsigned Reg = *Alias; ++Alias) {
162         State->UnionGroups(Reg, 0);
163         KillIndices[Reg] = BB->size();
164         DefIndices[Reg] = ~0u;
165       }
166     }
167   }
168 
169   // In a non-return block, examine the live-in regs of all successors.
170   // Note a return block can have successors if the return instruction is
171   // predicated.
172   for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(),
173          SE = BB->succ_end(); SI != SE; ++SI)
174     for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(),
175            E = (*SI)->livein_end(); I != E; ++I) {
176       for (const unsigned *Alias = TRI->getOverlaps(*I);
177            unsigned Reg = *Alias; ++Alias) {
178         State->UnionGroups(Reg, 0);
179         KillIndices[Reg] = BB->size();
180         DefIndices[Reg] = ~0u;
181       }
182     }
183 
184   // Mark live-out callee-saved registers. In a return block this is
185   // all callee-saved registers. In non-return this is any
186   // callee-saved register that is not saved in the prolog.
187   const MachineFrameInfo *MFI = MF.getFrameInfo();
188   BitVector Pristine = MFI->getPristineRegs(BB);
189   for (const unsigned *I = TRI->getCalleeSavedRegs(); *I; ++I) {
190     unsigned Reg = *I;
191     if (!IsReturnBlock && !Pristine.test(Reg)) continue;
192     for (const unsigned *Alias = TRI->getOverlaps(Reg);
193          unsigned AliasReg = *Alias; ++Alias) {
194       State->UnionGroups(AliasReg, 0);
195       KillIndices[AliasReg] = BB->size();
196       DefIndices[AliasReg] = ~0u;
197     }
198   }
199 }
200 
FinishBlock()201 void AggressiveAntiDepBreaker::FinishBlock() {
202   delete State;
203   State = NULL;
204 }
205 
Observe(MachineInstr * MI,unsigned Count,unsigned InsertPosIndex)206 void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count,
207                                        unsigned InsertPosIndex) {
208   assert(Count < InsertPosIndex && "Instruction index out of expected range!");
209 
210   std::set<unsigned> PassthruRegs;
211   GetPassthruRegs(MI, PassthruRegs);
212   PrescanInstruction(MI, Count, PassthruRegs);
213   ScanInstruction(MI, Count);
214 
215   DEBUG(dbgs() << "Observe: ");
216   DEBUG(MI->dump());
217   DEBUG(dbgs() << "\tRegs:");
218 
219   std::vector<unsigned> &DefIndices = State->GetDefIndices();
220   for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) {
221     // If Reg is current live, then mark that it can't be renamed as
222     // we don't know the extent of its live-range anymore (now that it
223     // has been scheduled). If it is not live but was defined in the
224     // previous schedule region, then set its def index to the most
225     // conservative location (i.e. the beginning of the previous
226     // schedule region).
227     if (State->IsLive(Reg)) {
228       DEBUG(if (State->GetGroup(Reg) != 0)
229               dbgs() << " " << TRI->getName(Reg) << "=g" <<
230                 State->GetGroup(Reg) << "->g0(region live-out)");
231       State->UnionGroups(Reg, 0);
232     } else if ((DefIndices[Reg] < InsertPosIndex)
233                && (DefIndices[Reg] >= Count)) {
234       DefIndices[Reg] = Count;
235     }
236   }
237   DEBUG(dbgs() << '\n');
238 }
239 
IsImplicitDefUse(MachineInstr * MI,MachineOperand & MO)240 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI,
241                                                 MachineOperand& MO)
242 {
243   if (!MO.isReg() || !MO.isImplicit())
244     return false;
245 
246   unsigned Reg = MO.getReg();
247   if (Reg == 0)
248     return false;
249 
250   MachineOperand *Op = NULL;
251   if (MO.isDef())
252     Op = MI->findRegisterUseOperand(Reg, true);
253   else
254     Op = MI->findRegisterDefOperand(Reg);
255 
256   return((Op != NULL) && Op->isImplicit());
257 }
258 
GetPassthruRegs(MachineInstr * MI,std::set<unsigned> & PassthruRegs)259 void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI,
260                                            std::set<unsigned>& PassthruRegs) {
261   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
262     MachineOperand &MO = MI->getOperand(i);
263     if (!MO.isReg()) continue;
264     if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) ||
265         IsImplicitDefUse(MI, MO)) {
266       const unsigned Reg = MO.getReg();
267       PassthruRegs.insert(Reg);
268       for (const unsigned *Subreg = TRI->getSubRegisters(Reg);
269            *Subreg; ++Subreg) {
270         PassthruRegs.insert(*Subreg);
271       }
272     }
273   }
274 }
275 
276 /// AntiDepEdges - Return in Edges the anti- and output- dependencies
277 /// in SU that we want to consider for breaking.
AntiDepEdges(const SUnit * SU,std::vector<const SDep * > & Edges)278 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) {
279   SmallSet<unsigned, 4> RegSet;
280   for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
281        P != PE; ++P) {
282     if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
283       unsigned Reg = P->getReg();
284       if (RegSet.count(Reg) == 0) {
285         Edges.push_back(&*P);
286         RegSet.insert(Reg);
287       }
288     }
289   }
290 }
291 
292 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up
293 /// critical path.
CriticalPathStep(const SUnit * SU)294 static const SUnit *CriticalPathStep(const SUnit *SU) {
295   const SDep *Next = 0;
296   unsigned NextDepth = 0;
297   // Find the predecessor edge with the greatest depth.
298   if (SU != 0) {
299     for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
300          P != PE; ++P) {
301       const SUnit *PredSU = P->getSUnit();
302       unsigned PredLatency = P->getLatency();
303       unsigned PredTotalLatency = PredSU->getDepth() + PredLatency;
304       // In the case of a latency tie, prefer an anti-dependency edge over
305       // other types of edges.
306       if (NextDepth < PredTotalLatency ||
307           (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) {
308         NextDepth = PredTotalLatency;
309         Next = &*P;
310       }
311     }
312   }
313 
314   return (Next) ? Next->getSUnit() : 0;
315 }
316 
HandleLastUse(unsigned Reg,unsigned KillIdx,const char * tag,const char * header,const char * footer)317 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx,
318                                              const char *tag,
319                                              const char *header,
320                                              const char *footer) {
321   std::vector<unsigned> &KillIndices = State->GetKillIndices();
322   std::vector<unsigned> &DefIndices = State->GetDefIndices();
323   std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
324     RegRefs = State->GetRegRefs();
325 
326   if (!State->IsLive(Reg)) {
327     KillIndices[Reg] = KillIdx;
328     DefIndices[Reg] = ~0u;
329     RegRefs.erase(Reg);
330     State->LeaveGroup(Reg);
331     DEBUG(if (header != NULL) {
332         dbgs() << header << TRI->getName(Reg); header = NULL; });
333     DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag);
334   }
335   // Repeat for subregisters.
336   for (const unsigned *Subreg = TRI->getSubRegisters(Reg);
337        *Subreg; ++Subreg) {
338     unsigned SubregReg = *Subreg;
339     if (!State->IsLive(SubregReg)) {
340       KillIndices[SubregReg] = KillIdx;
341       DefIndices[SubregReg] = ~0u;
342       RegRefs.erase(SubregReg);
343       State->LeaveGroup(SubregReg);
344       DEBUG(if (header != NULL) {
345           dbgs() << header << TRI->getName(Reg); header = NULL; });
346       DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" <<
347             State->GetGroup(SubregReg) << tag);
348     }
349   }
350 
351   DEBUG(if ((header == NULL) && (footer != NULL)) dbgs() << footer);
352 }
353 
PrescanInstruction(MachineInstr * MI,unsigned Count,std::set<unsigned> & PassthruRegs)354 void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI,
355                                                   unsigned Count,
356                                              std::set<unsigned>& PassthruRegs) {
357   std::vector<unsigned> &DefIndices = State->GetDefIndices();
358   std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
359     RegRefs = State->GetRegRefs();
360 
361   // Handle dead defs by simulating a last-use of the register just
362   // after the def. A dead def can occur because the def is truly
363   // dead, or because only a subregister is live at the def. If we
364   // don't do this the dead def will be incorrectly merged into the
365   // previous def.
366   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
367     MachineOperand &MO = MI->getOperand(i);
368     if (!MO.isReg() || !MO.isDef()) continue;
369     unsigned Reg = MO.getReg();
370     if (Reg == 0) continue;
371 
372     HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n");
373   }
374 
375   DEBUG(dbgs() << "\tDef Groups:");
376   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
377     MachineOperand &MO = MI->getOperand(i);
378     if (!MO.isReg() || !MO.isDef()) continue;
379     unsigned Reg = MO.getReg();
380     if (Reg == 0) continue;
381 
382     DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg));
383 
384     // If MI's defs have a special allocation requirement, don't allow
385     // any def registers to be changed. Also assume all registers
386     // defined in a call must not be changed (ABI).
387     if (MI->getDesc().isCall() || MI->getDesc().hasExtraDefRegAllocReq() ||
388         TII->isPredicated(MI)) {
389       DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
390       State->UnionGroups(Reg, 0);
391     }
392 
393     // Any aliased that are live at this point are completely or
394     // partially defined here, so group those aliases with Reg.
395     for (const unsigned *Alias = TRI->getAliasSet(Reg); *Alias; ++Alias) {
396       unsigned AliasReg = *Alias;
397       if (State->IsLive(AliasReg)) {
398         State->UnionGroups(Reg, AliasReg);
399         DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " <<
400               TRI->getName(AliasReg) << ")");
401       }
402     }
403 
404     // Note register reference...
405     const TargetRegisterClass *RC = NULL;
406     if (i < MI->getDesc().getNumOperands())
407       RC = TII->getRegClass(MI->getDesc(), i, TRI);
408     AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
409     RegRefs.insert(std::make_pair(Reg, RR));
410   }
411 
412   DEBUG(dbgs() << '\n');
413 
414   // Scan the register defs for this instruction and update
415   // live-ranges.
416   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
417     MachineOperand &MO = MI->getOperand(i);
418     if (!MO.isReg() || !MO.isDef()) continue;
419     unsigned Reg = MO.getReg();
420     if (Reg == 0) continue;
421     // Ignore KILLs and passthru registers for liveness...
422     if (MI->isKill() || (PassthruRegs.count(Reg) != 0))
423       continue;
424 
425     // Update def for Reg and aliases.
426     for (const unsigned *Alias = TRI->getOverlaps(Reg);
427          unsigned AliasReg = *Alias; ++Alias)
428       DefIndices[AliasReg] = Count;
429   }
430 }
431 
ScanInstruction(MachineInstr * MI,unsigned Count)432 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI,
433                                                unsigned Count) {
434   DEBUG(dbgs() << "\tUse Groups:");
435   std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
436     RegRefs = State->GetRegRefs();
437 
438   // If MI's uses have special allocation requirement, don't allow
439   // any use registers to be changed. Also assume all registers
440   // used in a call must not be changed (ABI).
441   // FIXME: The issue with predicated instruction is more complex. We are being
442   // conservatively here because the kill markers cannot be trusted after
443   // if-conversion:
444   // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14]
445   // ...
446   // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395]
447   // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12]
448   // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8)
449   //
450   // The first R6 kill is not really a kill since it's killed by a predicated
451   // instruction which may not be executed. The second R6 def may or may not
452   // re-define R6 so it's not safe to change it since the last R6 use cannot be
453   // changed.
454   bool Special = MI->getDesc().isCall() ||
455     MI->getDesc().hasExtraSrcRegAllocReq() ||
456     TII->isPredicated(MI);
457 
458   // Scan the register uses for this instruction and update
459   // live-ranges, groups and RegRefs.
460   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
461     MachineOperand &MO = MI->getOperand(i);
462     if (!MO.isReg() || !MO.isUse()) continue;
463     unsigned Reg = MO.getReg();
464     if (Reg == 0) continue;
465 
466     DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" <<
467           State->GetGroup(Reg));
468 
469     // It wasn't previously live but now it is, this is a kill. Forget
470     // the previous live-range information and start a new live-range
471     // for the register.
472     HandleLastUse(Reg, Count, "(last-use)");
473 
474     if (Special) {
475       DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
476       State->UnionGroups(Reg, 0);
477     }
478 
479     // Note register reference...
480     const TargetRegisterClass *RC = NULL;
481     if (i < MI->getDesc().getNumOperands())
482       RC = TII->getRegClass(MI->getDesc(), i, TRI);
483     AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
484     RegRefs.insert(std::make_pair(Reg, RR));
485   }
486 
487   DEBUG(dbgs() << '\n');
488 
489   // Form a group of all defs and uses of a KILL instruction to ensure
490   // that all registers are renamed as a group.
491   if (MI->isKill()) {
492     DEBUG(dbgs() << "\tKill Group:");
493 
494     unsigned FirstReg = 0;
495     for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
496       MachineOperand &MO = MI->getOperand(i);
497       if (!MO.isReg()) continue;
498       unsigned Reg = MO.getReg();
499       if (Reg == 0) continue;
500 
501       if (FirstReg != 0) {
502         DEBUG(dbgs() << "=" << TRI->getName(Reg));
503         State->UnionGroups(FirstReg, Reg);
504       } else {
505         DEBUG(dbgs() << " " << TRI->getName(Reg));
506         FirstReg = Reg;
507       }
508     }
509 
510     DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n');
511   }
512 }
513 
GetRenameRegisters(unsigned Reg)514 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) {
515   BitVector BV(TRI->getNumRegs(), false);
516   bool first = true;
517 
518   // Check all references that need rewriting for Reg. For each, use
519   // the corresponding register class to narrow the set of registers
520   // that are appropriate for renaming.
521   std::pair<std::multimap<unsigned,
522                      AggressiveAntiDepState::RegisterReference>::iterator,
523             std::multimap<unsigned,
524                      AggressiveAntiDepState::RegisterReference>::iterator>
525     Range = State->GetRegRefs().equal_range(Reg);
526   for (std::multimap<unsigned,
527        AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first,
528        QE = Range.second; Q != QE; ++Q) {
529     const TargetRegisterClass *RC = Q->second.RC;
530     if (RC == NULL) continue;
531 
532     BitVector RCBV = TRI->getAllocatableSet(MF, RC);
533     if (first) {
534       BV |= RCBV;
535       first = false;
536     } else {
537       BV &= RCBV;
538     }
539 
540     DEBUG(dbgs() << " " << RC->getName());
541   }
542 
543   return BV;
544 }
545 
FindSuitableFreeRegisters(unsigned AntiDepGroupIndex,RenameOrderType & RenameOrder,std::map<unsigned,unsigned> & RenameMap)546 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters(
547                                 unsigned AntiDepGroupIndex,
548                                 RenameOrderType& RenameOrder,
549                                 std::map<unsigned, unsigned> &RenameMap) {
550   std::vector<unsigned> &KillIndices = State->GetKillIndices();
551   std::vector<unsigned> &DefIndices = State->GetDefIndices();
552   std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
553     RegRefs = State->GetRegRefs();
554 
555   // Collect all referenced registers in the same group as
556   // AntiDepReg. These all need to be renamed together if we are to
557   // break the anti-dependence.
558   std::vector<unsigned> Regs;
559   State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs);
560   assert(Regs.size() > 0 && "Empty register group!");
561   if (Regs.size() == 0)
562     return false;
563 
564   // Find the "superest" register in the group. At the same time,
565   // collect the BitVector of registers that can be used to rename
566   // each register.
567   DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex
568         << ":\n");
569   std::map<unsigned, BitVector> RenameRegisterMap;
570   unsigned SuperReg = 0;
571   for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
572     unsigned Reg = Regs[i];
573     if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg))
574       SuperReg = Reg;
575 
576     // If Reg has any references, then collect possible rename regs
577     if (RegRefs.count(Reg) > 0) {
578       DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":");
579 
580       BitVector BV = GetRenameRegisters(Reg);
581       RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV));
582 
583       DEBUG(dbgs() << " ::");
584       DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r))
585               dbgs() << " " << TRI->getName(r));
586       DEBUG(dbgs() << "\n");
587     }
588   }
589 
590   // All group registers should be a subreg of SuperReg.
591   for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
592     unsigned Reg = Regs[i];
593     if (Reg == SuperReg) continue;
594     bool IsSub = TRI->isSubRegister(SuperReg, Reg);
595     assert(IsSub && "Expecting group subregister");
596     if (!IsSub)
597       return false;
598   }
599 
600 #ifndef NDEBUG
601   // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod
602   if (DebugDiv > 0) {
603     static int renamecnt = 0;
604     if (renamecnt++ % DebugDiv != DebugMod)
605       return false;
606 
607     dbgs() << "*** Performing rename " << TRI->getName(SuperReg) <<
608       " for debug ***\n";
609   }
610 #endif
611 
612   // Check each possible rename register for SuperReg in round-robin
613   // order. If that register is available, and the corresponding
614   // registers are available for the other group subregisters, then we
615   // can use those registers to rename.
616 
617   // FIXME: Using getMinimalPhysRegClass is very conservative. We should
618   // check every use of the register and find the largest register class
619   // that can be used in all of them.
620   const TargetRegisterClass *SuperRC =
621     TRI->getMinimalPhysRegClass(SuperReg, MVT::Other);
622 
623   ArrayRef<unsigned> Order = RegClassInfo.getOrder(SuperRC);
624   if (Order.empty()) {
625     DEBUG(dbgs() << "\tEmpty Super Regclass!!\n");
626     return false;
627   }
628 
629   DEBUG(dbgs() << "\tFind Registers:");
630 
631   if (RenameOrder.count(SuperRC) == 0)
632     RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size()));
633 
634   unsigned OrigR = RenameOrder[SuperRC];
635   unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR);
636   unsigned R = OrigR;
637   do {
638     if (R == 0) R = Order.size();
639     --R;
640     const unsigned NewSuperReg = Order[R];
641     // Don't consider non-allocatable registers
642     if (!RegClassInfo.isAllocatable(NewSuperReg)) continue;
643     // Don't replace a register with itself.
644     if (NewSuperReg == SuperReg) continue;
645 
646     DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':');
647     RenameMap.clear();
648 
649     // For each referenced group register (which must be a SuperReg or
650     // a subregister of SuperReg), find the corresponding subregister
651     // of NewSuperReg and make sure it is free to be renamed.
652     for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
653       unsigned Reg = Regs[i];
654       unsigned NewReg = 0;
655       if (Reg == SuperReg) {
656         NewReg = NewSuperReg;
657       } else {
658         unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg);
659         if (NewSubRegIdx != 0)
660           NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx);
661       }
662 
663       DEBUG(dbgs() << " " << TRI->getName(NewReg));
664 
665       // Check if Reg can be renamed to NewReg.
666       BitVector BV = RenameRegisterMap[Reg];
667       if (!BV.test(NewReg)) {
668         DEBUG(dbgs() << "(no rename)");
669         goto next_super_reg;
670       }
671 
672       // If NewReg is dead and NewReg's most recent def is not before
673       // Regs's kill, it's safe to replace Reg with NewReg. We
674       // must also check all aliases of NewReg, because we can't define a
675       // register when any sub or super is already live.
676       if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) {
677         DEBUG(dbgs() << "(live)");
678         goto next_super_reg;
679       } else {
680         bool found = false;
681         for (const unsigned *Alias = TRI->getAliasSet(NewReg);
682              *Alias; ++Alias) {
683           unsigned AliasReg = *Alias;
684           if (State->IsLive(AliasReg) ||
685               (KillIndices[Reg] > DefIndices[AliasReg])) {
686             DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)");
687             found = true;
688             break;
689           }
690         }
691         if (found)
692           goto next_super_reg;
693       }
694 
695       // Record that 'Reg' can be renamed to 'NewReg'.
696       RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg));
697     }
698 
699     // If we fall-out here, then every register in the group can be
700     // renamed, as recorded in RenameMap.
701     RenameOrder.erase(SuperRC);
702     RenameOrder.insert(RenameOrderType::value_type(SuperRC, R));
703     DEBUG(dbgs() << "]\n");
704     return true;
705 
706   next_super_reg:
707     DEBUG(dbgs() << ']');
708   } while (R != EndR);
709 
710   DEBUG(dbgs() << '\n');
711 
712   // No registers are free and available!
713   return false;
714 }
715 
716 /// BreakAntiDependencies - Identifiy anti-dependencies within the
717 /// ScheduleDAG and break them by renaming registers.
718 ///
BreakAntiDependencies(const std::vector<SUnit> & SUnits,MachineBasicBlock::iterator Begin,MachineBasicBlock::iterator End,unsigned InsertPosIndex,DbgValueVector & DbgValues)719 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies(
720                               const std::vector<SUnit>& SUnits,
721                               MachineBasicBlock::iterator Begin,
722                               MachineBasicBlock::iterator End,
723                               unsigned InsertPosIndex,
724                               DbgValueVector &DbgValues) {
725 
726   std::vector<unsigned> &KillIndices = State->GetKillIndices();
727   std::vector<unsigned> &DefIndices = State->GetDefIndices();
728   std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
729     RegRefs = State->GetRegRefs();
730 
731   // The code below assumes that there is at least one instruction,
732   // so just duck out immediately if the block is empty.
733   if (SUnits.empty()) return 0;
734 
735   // For each regclass the next register to use for renaming.
736   RenameOrderType RenameOrder;
737 
738   // ...need a map from MI to SUnit.
739   std::map<MachineInstr *, const SUnit *> MISUnitMap;
740   for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
741     const SUnit *SU = &SUnits[i];
742     MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(),
743                                                                SU));
744   }
745 
746   // Track progress along the critical path through the SUnit graph as
747   // we walk the instructions. This is needed for regclasses that only
748   // break critical-path anti-dependencies.
749   const SUnit *CriticalPathSU = 0;
750   MachineInstr *CriticalPathMI = 0;
751   if (CriticalPathSet.any()) {
752     for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
753       const SUnit *SU = &SUnits[i];
754       if (!CriticalPathSU ||
755           ((SU->getDepth() + SU->Latency) >
756            (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) {
757         CriticalPathSU = SU;
758       }
759     }
760 
761     CriticalPathMI = CriticalPathSU->getInstr();
762   }
763 
764 #ifndef NDEBUG
765   DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n");
766   DEBUG(dbgs() << "Available regs:");
767   for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) {
768     if (!State->IsLive(Reg))
769       DEBUG(dbgs() << " " << TRI->getName(Reg));
770   }
771   DEBUG(dbgs() << '\n');
772 #endif
773 
774   // Attempt to break anti-dependence edges. Walk the instructions
775   // from the bottom up, tracking information about liveness as we go
776   // to help determine which registers are available.
777   unsigned Broken = 0;
778   unsigned Count = InsertPosIndex - 1;
779   for (MachineBasicBlock::iterator I = End, E = Begin;
780        I != E; --Count) {
781     MachineInstr *MI = --I;
782 
783     DEBUG(dbgs() << "Anti: ");
784     DEBUG(MI->dump());
785 
786     std::set<unsigned> PassthruRegs;
787     GetPassthruRegs(MI, PassthruRegs);
788 
789     // Process the defs in MI...
790     PrescanInstruction(MI, Count, PassthruRegs);
791 
792     // The dependence edges that represent anti- and output-
793     // dependencies that are candidates for breaking.
794     std::vector<const SDep *> Edges;
795     const SUnit *PathSU = MISUnitMap[MI];
796     AntiDepEdges(PathSU, Edges);
797 
798     // If MI is not on the critical path, then we don't rename
799     // registers in the CriticalPathSet.
800     BitVector *ExcludeRegs = NULL;
801     if (MI == CriticalPathMI) {
802       CriticalPathSU = CriticalPathStep(CriticalPathSU);
803       CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : 0;
804     } else {
805       ExcludeRegs = &CriticalPathSet;
806     }
807 
808     // Ignore KILL instructions (they form a group in ScanInstruction
809     // but don't cause any anti-dependence breaking themselves)
810     if (!MI->isKill()) {
811       // Attempt to break each anti-dependency...
812       for (unsigned i = 0, e = Edges.size(); i != e; ++i) {
813         const SDep *Edge = Edges[i];
814         SUnit *NextSU = Edge->getSUnit();
815 
816         if ((Edge->getKind() != SDep::Anti) &&
817             (Edge->getKind() != SDep::Output)) continue;
818 
819         unsigned AntiDepReg = Edge->getReg();
820         DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg));
821         assert(AntiDepReg != 0 && "Anti-dependence on reg0?");
822 
823         if (!RegClassInfo.isAllocatable(AntiDepReg)) {
824           // Don't break anti-dependencies on non-allocatable registers.
825           DEBUG(dbgs() << " (non-allocatable)\n");
826           continue;
827         } else if ((ExcludeRegs != NULL) && ExcludeRegs->test(AntiDepReg)) {
828           // Don't break anti-dependencies for critical path registers
829           // if not on the critical path
830           DEBUG(dbgs() << " (not critical-path)\n");
831           continue;
832         } else if (PassthruRegs.count(AntiDepReg) != 0) {
833           // If the anti-dep register liveness "passes-thru", then
834           // don't try to change it. It will be changed along with
835           // the use if required to break an earlier antidep.
836           DEBUG(dbgs() << " (passthru)\n");
837           continue;
838         } else {
839           // No anti-dep breaking for implicit deps
840           MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg);
841           assert(AntiDepOp != NULL &&
842                  "Can't find index for defined register operand");
843           if ((AntiDepOp == NULL) || AntiDepOp->isImplicit()) {
844             DEBUG(dbgs() << " (implicit)\n");
845             continue;
846           }
847 
848           // If the SUnit has other dependencies on the SUnit that
849           // it anti-depends on, don't bother breaking the
850           // anti-dependency since those edges would prevent such
851           // units from being scheduled past each other
852           // regardless.
853           //
854           // Also, if there are dependencies on other SUnits with the
855           // same register as the anti-dependency, don't attempt to
856           // break it.
857           for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
858                  PE = PathSU->Preds.end(); P != PE; ++P) {
859             if (P->getSUnit() == NextSU ?
860                 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) :
861                 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) {
862               AntiDepReg = 0;
863               break;
864             }
865           }
866           for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
867                  PE = PathSU->Preds.end(); P != PE; ++P) {
868             if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) &&
869                 (P->getKind() != SDep::Output)) {
870               DEBUG(dbgs() << " (real dependency)\n");
871               AntiDepReg = 0;
872               break;
873             } else if ((P->getSUnit() != NextSU) &&
874                        (P->getKind() == SDep::Data) &&
875                        (P->getReg() == AntiDepReg)) {
876               DEBUG(dbgs() << " (other dependency)\n");
877               AntiDepReg = 0;
878               break;
879             }
880           }
881 
882           if (AntiDepReg == 0) continue;
883         }
884 
885         assert(AntiDepReg != 0);
886         if (AntiDepReg == 0) continue;
887 
888         // Determine AntiDepReg's register group.
889         const unsigned GroupIndex = State->GetGroup(AntiDepReg);
890         if (GroupIndex == 0) {
891           DEBUG(dbgs() << " (zero group)\n");
892           continue;
893         }
894 
895         DEBUG(dbgs() << '\n');
896 
897         // Look for a suitable register to use to break the anti-dependence.
898         std::map<unsigned, unsigned> RenameMap;
899         if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) {
900           DEBUG(dbgs() << "\tBreaking anti-dependence edge on "
901                 << TRI->getName(AntiDepReg) << ":");
902 
903           // Handle each group register...
904           for (std::map<unsigned, unsigned>::iterator
905                  S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) {
906             unsigned CurrReg = S->first;
907             unsigned NewReg = S->second;
908 
909             DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" <<
910                   TRI->getName(NewReg) << "(" <<
911                   RegRefs.count(CurrReg) << " refs)");
912 
913             // Update the references to the old register CurrReg to
914             // refer to the new register NewReg.
915             std::pair<std::multimap<unsigned,
916                            AggressiveAntiDepState::RegisterReference>::iterator,
917                       std::multimap<unsigned,
918                            AggressiveAntiDepState::RegisterReference>::iterator>
919               Range = RegRefs.equal_range(CurrReg);
920             for (std::multimap<unsigned,
921                  AggressiveAntiDepState::RegisterReference>::iterator
922                    Q = Range.first, QE = Range.second; Q != QE; ++Q) {
923               Q->second.Operand->setReg(NewReg);
924               // If the SU for the instruction being updated has debug
925               // information related to the anti-dependency register, make
926               // sure to update that as well.
927               const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()];
928               if (!SU) continue;
929               for (DbgValueVector::iterator DVI = DbgValues.begin(),
930                      DVE = DbgValues.end(); DVI != DVE; ++DVI)
931                 if (DVI->second == Q->second.Operand->getParent())
932                   UpdateDbgValue(DVI->first, AntiDepReg, NewReg);
933             }
934 
935             // We just went back in time and modified history; the
936             // liveness information for CurrReg is now inconsistent. Set
937             // the state as if it were dead.
938             State->UnionGroups(NewReg, 0);
939             RegRefs.erase(NewReg);
940             DefIndices[NewReg] = DefIndices[CurrReg];
941             KillIndices[NewReg] = KillIndices[CurrReg];
942 
943             State->UnionGroups(CurrReg, 0);
944             RegRefs.erase(CurrReg);
945             DefIndices[CurrReg] = KillIndices[CurrReg];
946             KillIndices[CurrReg] = ~0u;
947             assert(((KillIndices[CurrReg] == ~0u) !=
948                     (DefIndices[CurrReg] == ~0u)) &&
949                    "Kill and Def maps aren't consistent for AntiDepReg!");
950           }
951 
952           ++Broken;
953           DEBUG(dbgs() << '\n');
954         }
955       }
956     }
957 
958     ScanInstruction(MI, Count);
959   }
960 
961   return Broken;
962 }
963