1 //===- lib/CodeGen/MachineTraceMetrics.cpp ----------------------*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9
10 #define DEBUG_TYPE "machine-trace-metrics"
11 #include "MachineTraceMetrics.h"
12 #include "llvm/CodeGen/MachineBasicBlock.h"
13 #include "llvm/CodeGen/MachineBranchProbabilityInfo.h"
14 #include "llvm/CodeGen/MachineLoopInfo.h"
15 #include "llvm/CodeGen/MachineRegisterInfo.h"
16 #include "llvm/CodeGen/Passes.h"
17 #include "llvm/MC/MCInstrItineraries.h"
18 #include "llvm/Target/TargetInstrInfo.h"
19 #include "llvm/Target/TargetRegisterInfo.h"
20 #include "llvm/Support/Debug.h"
21 #include "llvm/Support/raw_ostream.h"
22 #include "llvm/ADT/PostOrderIterator.h"
23 #include "llvm/ADT/SparseSet.h"
24
25 using namespace llvm;
26
27 char MachineTraceMetrics::ID = 0;
28 char &llvm::MachineTraceMetricsID = MachineTraceMetrics::ID;
29
30 INITIALIZE_PASS_BEGIN(MachineTraceMetrics,
31 "machine-trace-metrics", "Machine Trace Metrics", false, true)
INITIALIZE_PASS_DEPENDENCY(MachineBranchProbabilityInfo)32 INITIALIZE_PASS_DEPENDENCY(MachineBranchProbabilityInfo)
33 INITIALIZE_PASS_DEPENDENCY(MachineLoopInfo)
34 INITIALIZE_PASS_END(MachineTraceMetrics,
35 "machine-trace-metrics", "Machine Trace Metrics", false, true)
36
37 MachineTraceMetrics::MachineTraceMetrics()
38 : MachineFunctionPass(ID), MF(0), TII(0), TRI(0), MRI(0), Loops(0) {
39 std::fill(Ensembles, array_endof(Ensembles), (Ensemble*)0);
40 }
41
getAnalysisUsage(AnalysisUsage & AU) const42 void MachineTraceMetrics::getAnalysisUsage(AnalysisUsage &AU) const {
43 AU.setPreservesAll();
44 AU.addRequired<MachineBranchProbabilityInfo>();
45 AU.addRequired<MachineLoopInfo>();
46 MachineFunctionPass::getAnalysisUsage(AU);
47 }
48
runOnMachineFunction(MachineFunction & Func)49 bool MachineTraceMetrics::runOnMachineFunction(MachineFunction &Func) {
50 MF = &Func;
51 TII = MF->getTarget().getInstrInfo();
52 TRI = MF->getTarget().getRegisterInfo();
53 ItinData = MF->getTarget().getInstrItineraryData();
54 MRI = &MF->getRegInfo();
55 Loops = &getAnalysis<MachineLoopInfo>();
56 BlockInfo.resize(MF->getNumBlockIDs());
57 return false;
58 }
59
releaseMemory()60 void MachineTraceMetrics::releaseMemory() {
61 MF = 0;
62 BlockInfo.clear();
63 for (unsigned i = 0; i != TS_NumStrategies; ++i) {
64 delete Ensembles[i];
65 Ensembles[i] = 0;
66 }
67 }
68
69 //===----------------------------------------------------------------------===//
70 // Fixed block information
71 //===----------------------------------------------------------------------===//
72 //
73 // The number of instructions in a basic block and the CPU resources used by
74 // those instructions don't depend on any given trace strategy.
75
76 /// Compute the resource usage in basic block MBB.
77 const MachineTraceMetrics::FixedBlockInfo*
getResources(const MachineBasicBlock * MBB)78 MachineTraceMetrics::getResources(const MachineBasicBlock *MBB) {
79 assert(MBB && "No basic block");
80 FixedBlockInfo *FBI = &BlockInfo[MBB->getNumber()];
81 if (FBI->hasResources())
82 return FBI;
83
84 // Compute resource usage in the block.
85 // FIXME: Compute per-functional unit counts.
86 FBI->HasCalls = false;
87 unsigned InstrCount = 0;
88 for (MachineBasicBlock::const_iterator I = MBB->begin(), E = MBB->end();
89 I != E; ++I) {
90 const MachineInstr *MI = I;
91 if (MI->isTransient())
92 continue;
93 ++InstrCount;
94 if (MI->isCall())
95 FBI->HasCalls = true;
96 }
97 FBI->InstrCount = InstrCount;
98 return FBI;
99 }
100
101 //===----------------------------------------------------------------------===//
102 // Ensemble utility functions
103 //===----------------------------------------------------------------------===//
104
Ensemble(MachineTraceMetrics * ct)105 MachineTraceMetrics::Ensemble::Ensemble(MachineTraceMetrics *ct)
106 : MTM(*ct) {
107 BlockInfo.resize(MTM.BlockInfo.size());
108 }
109
110 // Virtual destructor serves as an anchor.
~Ensemble()111 MachineTraceMetrics::Ensemble::~Ensemble() {}
112
113 const MachineLoop*
getLoopFor(const MachineBasicBlock * MBB) const114 MachineTraceMetrics::Ensemble::getLoopFor(const MachineBasicBlock *MBB) const {
115 return MTM.Loops->getLoopFor(MBB);
116 }
117
118 // Update resource-related information in the TraceBlockInfo for MBB.
119 // Only update resources related to the trace above MBB.
120 void MachineTraceMetrics::Ensemble::
computeDepthResources(const MachineBasicBlock * MBB)121 computeDepthResources(const MachineBasicBlock *MBB) {
122 TraceBlockInfo *TBI = &BlockInfo[MBB->getNumber()];
123
124 // Compute resources from trace above. The top block is simple.
125 if (!TBI->Pred) {
126 TBI->InstrDepth = 0;
127 TBI->Head = MBB->getNumber();
128 return;
129 }
130
131 // Compute from the block above. A post-order traversal ensures the
132 // predecessor is always computed first.
133 TraceBlockInfo *PredTBI = &BlockInfo[TBI->Pred->getNumber()];
134 assert(PredTBI->hasValidDepth() && "Trace above has not been computed yet");
135 const FixedBlockInfo *PredFBI = MTM.getResources(TBI->Pred);
136 TBI->InstrDepth = PredTBI->InstrDepth + PredFBI->InstrCount;
137 TBI->Head = PredTBI->Head;
138 }
139
140 // Update resource-related information in the TraceBlockInfo for MBB.
141 // Only update resources related to the trace below MBB.
142 void MachineTraceMetrics::Ensemble::
computeHeightResources(const MachineBasicBlock * MBB)143 computeHeightResources(const MachineBasicBlock *MBB) {
144 TraceBlockInfo *TBI = &BlockInfo[MBB->getNumber()];
145
146 // Compute resources for the current block.
147 TBI->InstrHeight = MTM.getResources(MBB)->InstrCount;
148
149 // The trace tail is done.
150 if (!TBI->Succ) {
151 TBI->Tail = MBB->getNumber();
152 return;
153 }
154
155 // Compute from the block below. A post-order traversal ensures the
156 // predecessor is always computed first.
157 TraceBlockInfo *SuccTBI = &BlockInfo[TBI->Succ->getNumber()];
158 assert(SuccTBI->hasValidHeight() && "Trace below has not been computed yet");
159 TBI->InstrHeight += SuccTBI->InstrHeight;
160 TBI->Tail = SuccTBI->Tail;
161 }
162
163 // Check if depth resources for MBB are valid and return the TBI.
164 // Return NULL if the resources have been invalidated.
165 const MachineTraceMetrics::TraceBlockInfo*
166 MachineTraceMetrics::Ensemble::
getDepthResources(const MachineBasicBlock * MBB) const167 getDepthResources(const MachineBasicBlock *MBB) const {
168 const TraceBlockInfo *TBI = &BlockInfo[MBB->getNumber()];
169 return TBI->hasValidDepth() ? TBI : 0;
170 }
171
172 // Check if height resources for MBB are valid and return the TBI.
173 // Return NULL if the resources have been invalidated.
174 const MachineTraceMetrics::TraceBlockInfo*
175 MachineTraceMetrics::Ensemble::
getHeightResources(const MachineBasicBlock * MBB) const176 getHeightResources(const MachineBasicBlock *MBB) const {
177 const TraceBlockInfo *TBI = &BlockInfo[MBB->getNumber()];
178 return TBI->hasValidHeight() ? TBI : 0;
179 }
180
181 //===----------------------------------------------------------------------===//
182 // Trace Selection Strategies
183 //===----------------------------------------------------------------------===//
184 //
185 // A trace selection strategy is implemented as a sub-class of Ensemble. The
186 // trace through a block B is computed by two DFS traversals of the CFG
187 // starting from B. One upwards, and one downwards. During the upwards DFS,
188 // pickTracePred() is called on the post-ordered blocks. During the downwards
189 // DFS, pickTraceSucc() is called in a post-order.
190 //
191
192 // We never allow traces that leave loops, but we do allow traces to enter
193 // nested loops. We also never allow traces to contain back-edges.
194 //
195 // This means that a loop header can never appear above the center block of a
196 // trace, except as the trace head. Below the center block, loop exiting edges
197 // are banned.
198 //
199 // Return true if an edge from the From loop to the To loop is leaving a loop.
200 // Either of To and From can be null.
isExitingLoop(const MachineLoop * From,const MachineLoop * To)201 static bool isExitingLoop(const MachineLoop *From, const MachineLoop *To) {
202 return From && !From->contains(To);
203 }
204
205 // MinInstrCountEnsemble - Pick the trace that executes the least number of
206 // instructions.
207 namespace {
208 class MinInstrCountEnsemble : public MachineTraceMetrics::Ensemble {
getName() const209 const char *getName() const { return "MinInstr"; }
210 const MachineBasicBlock *pickTracePred(const MachineBasicBlock*);
211 const MachineBasicBlock *pickTraceSucc(const MachineBasicBlock*);
212
213 public:
MinInstrCountEnsemble(MachineTraceMetrics * mtm)214 MinInstrCountEnsemble(MachineTraceMetrics *mtm)
215 : MachineTraceMetrics::Ensemble(mtm) {}
216 };
217 }
218
219 // Select the preferred predecessor for MBB.
220 const MachineBasicBlock*
pickTracePred(const MachineBasicBlock * MBB)221 MinInstrCountEnsemble::pickTracePred(const MachineBasicBlock *MBB) {
222 if (MBB->pred_empty())
223 return 0;
224 const MachineLoop *CurLoop = getLoopFor(MBB);
225 // Don't leave loops, and never follow back-edges.
226 if (CurLoop && MBB == CurLoop->getHeader())
227 return 0;
228 unsigned CurCount = MTM.getResources(MBB)->InstrCount;
229 const MachineBasicBlock *Best = 0;
230 unsigned BestDepth = 0;
231 for (MachineBasicBlock::const_pred_iterator
232 I = MBB->pred_begin(), E = MBB->pred_end(); I != E; ++I) {
233 const MachineBasicBlock *Pred = *I;
234 const MachineTraceMetrics::TraceBlockInfo *PredTBI =
235 getDepthResources(Pred);
236 // Ignore cycles that aren't natural loops.
237 if (!PredTBI)
238 continue;
239 // Pick the predecessor that would give this block the smallest InstrDepth.
240 unsigned Depth = PredTBI->InstrDepth + CurCount;
241 if (!Best || Depth < BestDepth)
242 Best = Pred, BestDepth = Depth;
243 }
244 return Best;
245 }
246
247 // Select the preferred successor for MBB.
248 const MachineBasicBlock*
pickTraceSucc(const MachineBasicBlock * MBB)249 MinInstrCountEnsemble::pickTraceSucc(const MachineBasicBlock *MBB) {
250 if (MBB->pred_empty())
251 return 0;
252 const MachineLoop *CurLoop = getLoopFor(MBB);
253 const MachineBasicBlock *Best = 0;
254 unsigned BestHeight = 0;
255 for (MachineBasicBlock::const_succ_iterator
256 I = MBB->succ_begin(), E = MBB->succ_end(); I != E; ++I) {
257 const MachineBasicBlock *Succ = *I;
258 // Don't consider back-edges.
259 if (CurLoop && Succ == CurLoop->getHeader())
260 continue;
261 // Don't consider successors exiting CurLoop.
262 if (isExitingLoop(CurLoop, getLoopFor(Succ)))
263 continue;
264 const MachineTraceMetrics::TraceBlockInfo *SuccTBI =
265 getHeightResources(Succ);
266 // Ignore cycles that aren't natural loops.
267 if (!SuccTBI)
268 continue;
269 // Pick the successor that would give this block the smallest InstrHeight.
270 unsigned Height = SuccTBI->InstrHeight;
271 if (!Best || Height < BestHeight)
272 Best = Succ, BestHeight = Height;
273 }
274 return Best;
275 }
276
277 // Get an Ensemble sub-class for the requested trace strategy.
278 MachineTraceMetrics::Ensemble *
getEnsemble(MachineTraceMetrics::Strategy strategy)279 MachineTraceMetrics::getEnsemble(MachineTraceMetrics::Strategy strategy) {
280 assert(strategy < TS_NumStrategies && "Invalid trace strategy enum");
281 Ensemble *&E = Ensembles[strategy];
282 if (E)
283 return E;
284
285 // Allocate new Ensemble on demand.
286 switch (strategy) {
287 case TS_MinInstrCount: return (E = new MinInstrCountEnsemble(this));
288 default: llvm_unreachable("Invalid trace strategy enum");
289 }
290 }
291
invalidate(const MachineBasicBlock * MBB)292 void MachineTraceMetrics::invalidate(const MachineBasicBlock *MBB) {
293 DEBUG(dbgs() << "Invalidate traces through BB#" << MBB->getNumber() << '\n');
294 BlockInfo[MBB->getNumber()].invalidate();
295 for (unsigned i = 0; i != TS_NumStrategies; ++i)
296 if (Ensembles[i])
297 Ensembles[i]->invalidate(MBB);
298 }
299
verifyAnalysis() const300 void MachineTraceMetrics::verifyAnalysis() const {
301 if (!MF)
302 return;
303 #ifndef NDEBUG
304 assert(BlockInfo.size() == MF->getNumBlockIDs() && "Outdated BlockInfo size");
305 for (unsigned i = 0; i != TS_NumStrategies; ++i)
306 if (Ensembles[i])
307 Ensembles[i]->verify();
308 #endif
309 }
310
311 //===----------------------------------------------------------------------===//
312 // Trace building
313 //===----------------------------------------------------------------------===//
314 //
315 // Traces are built by two CFG traversals. To avoid recomputing too much, use a
316 // set abstraction that confines the search to the current loop, and doesn't
317 // revisit blocks.
318
319 namespace {
320 struct LoopBounds {
321 MutableArrayRef<MachineTraceMetrics::TraceBlockInfo> Blocks;
322 SmallPtrSet<const MachineBasicBlock*, 8> Visited;
323 const MachineLoopInfo *Loops;
324 bool Downward;
LoopBounds__anon8b94c6970211::LoopBounds325 LoopBounds(MutableArrayRef<MachineTraceMetrics::TraceBlockInfo> blocks,
326 const MachineLoopInfo *loops)
327 : Blocks(blocks), Loops(loops), Downward(false) {}
328 };
329 }
330
331 // Specialize po_iterator_storage in order to prune the post-order traversal so
332 // it is limited to the current loop and doesn't traverse the loop back edges.
333 namespace llvm {
334 template<>
335 class po_iterator_storage<LoopBounds, true> {
336 LoopBounds &LB;
337 public:
po_iterator_storage(LoopBounds & lb)338 po_iterator_storage(LoopBounds &lb) : LB(lb) {}
finishPostorder(const MachineBasicBlock *)339 void finishPostorder(const MachineBasicBlock*) {}
340
insertEdge(const MachineBasicBlock * From,const MachineBasicBlock * To)341 bool insertEdge(const MachineBasicBlock *From, const MachineBasicBlock *To) {
342 // Skip already visited To blocks.
343 MachineTraceMetrics::TraceBlockInfo &TBI = LB.Blocks[To->getNumber()];
344 if (LB.Downward ? TBI.hasValidHeight() : TBI.hasValidDepth())
345 return false;
346 // From is null once when To is the trace center block.
347 if (From) {
348 if (const MachineLoop *FromLoop = LB.Loops->getLoopFor(From)) {
349 // Don't follow backedges, don't leave FromLoop when going upwards.
350 if ((LB.Downward ? To : From) == FromLoop->getHeader())
351 return false;
352 // Don't leave FromLoop.
353 if (isExitingLoop(FromLoop, LB.Loops->getLoopFor(To)))
354 return false;
355 }
356 }
357 // To is a new block. Mark the block as visited in case the CFG has cycles
358 // that MachineLoopInfo didn't recognize as a natural loop.
359 return LB.Visited.insert(To);
360 }
361 };
362 }
363
364 /// Compute the trace through MBB.
computeTrace(const MachineBasicBlock * MBB)365 void MachineTraceMetrics::Ensemble::computeTrace(const MachineBasicBlock *MBB) {
366 DEBUG(dbgs() << "Computing " << getName() << " trace through BB#"
367 << MBB->getNumber() << '\n');
368 // Set up loop bounds for the backwards post-order traversal.
369 LoopBounds Bounds(BlockInfo, MTM.Loops);
370
371 // Run an upwards post-order search for the trace start.
372 Bounds.Downward = false;
373 Bounds.Visited.clear();
374 typedef ipo_ext_iterator<const MachineBasicBlock*, LoopBounds> UpwardPO;
375 for (UpwardPO I = ipo_ext_begin(MBB, Bounds), E = ipo_ext_end(MBB, Bounds);
376 I != E; ++I) {
377 DEBUG(dbgs() << " pred for BB#" << I->getNumber() << ": ");
378 TraceBlockInfo &TBI = BlockInfo[I->getNumber()];
379 // All the predecessors have been visited, pick the preferred one.
380 TBI.Pred = pickTracePred(*I);
381 DEBUG({
382 if (TBI.Pred)
383 dbgs() << "BB#" << TBI.Pred->getNumber() << '\n';
384 else
385 dbgs() << "null\n";
386 });
387 // The trace leading to I is now known, compute the depth resources.
388 computeDepthResources(*I);
389 }
390
391 // Run a downwards post-order search for the trace end.
392 Bounds.Downward = true;
393 Bounds.Visited.clear();
394 typedef po_ext_iterator<const MachineBasicBlock*, LoopBounds> DownwardPO;
395 for (DownwardPO I = po_ext_begin(MBB, Bounds), E = po_ext_end(MBB, Bounds);
396 I != E; ++I) {
397 DEBUG(dbgs() << " succ for BB#" << I->getNumber() << ": ");
398 TraceBlockInfo &TBI = BlockInfo[I->getNumber()];
399 // All the successors have been visited, pick the preferred one.
400 TBI.Succ = pickTraceSucc(*I);
401 DEBUG({
402 if (TBI.Succ)
403 dbgs() << "BB#" << TBI.Succ->getNumber() << '\n';
404 else
405 dbgs() << "null\n";
406 });
407 // The trace leaving I is now known, compute the height resources.
408 computeHeightResources(*I);
409 }
410 }
411
412 /// Invalidate traces through BadMBB.
413 void
invalidate(const MachineBasicBlock * BadMBB)414 MachineTraceMetrics::Ensemble::invalidate(const MachineBasicBlock *BadMBB) {
415 SmallVector<const MachineBasicBlock*, 16> WorkList;
416 TraceBlockInfo &BadTBI = BlockInfo[BadMBB->getNumber()];
417
418 // Invalidate height resources of blocks above MBB.
419 if (BadTBI.hasValidHeight()) {
420 BadTBI.invalidateHeight();
421 WorkList.push_back(BadMBB);
422 do {
423 const MachineBasicBlock *MBB = WorkList.pop_back_val();
424 DEBUG(dbgs() << "Invalidate BB#" << MBB->getNumber() << ' ' << getName()
425 << " height.\n");
426 // Find any MBB predecessors that have MBB as their preferred successor.
427 // They are the only ones that need to be invalidated.
428 for (MachineBasicBlock::const_pred_iterator
429 I = MBB->pred_begin(), E = MBB->pred_end(); I != E; ++I) {
430 TraceBlockInfo &TBI = BlockInfo[(*I)->getNumber()];
431 if (!TBI.hasValidHeight())
432 continue;
433 if (TBI.Succ == MBB) {
434 TBI.invalidateHeight();
435 WorkList.push_back(*I);
436 continue;
437 }
438 // Verify that TBI.Succ is actually a *I successor.
439 assert((!TBI.Succ || (*I)->isSuccessor(TBI.Succ)) && "CFG changed");
440 }
441 } while (!WorkList.empty());
442 }
443
444 // Invalidate depth resources of blocks below MBB.
445 if (BadTBI.hasValidDepth()) {
446 BadTBI.invalidateDepth();
447 WorkList.push_back(BadMBB);
448 do {
449 const MachineBasicBlock *MBB = WorkList.pop_back_val();
450 DEBUG(dbgs() << "Invalidate BB#" << MBB->getNumber() << ' ' << getName()
451 << " depth.\n");
452 // Find any MBB successors that have MBB as their preferred predecessor.
453 // They are the only ones that need to be invalidated.
454 for (MachineBasicBlock::const_succ_iterator
455 I = MBB->succ_begin(), E = MBB->succ_end(); I != E; ++I) {
456 TraceBlockInfo &TBI = BlockInfo[(*I)->getNumber()];
457 if (!TBI.hasValidDepth())
458 continue;
459 if (TBI.Pred == MBB) {
460 TBI.invalidateDepth();
461 WorkList.push_back(*I);
462 continue;
463 }
464 // Verify that TBI.Pred is actually a *I predecessor.
465 assert((!TBI.Pred || (*I)->isPredecessor(TBI.Pred)) && "CFG changed");
466 }
467 } while (!WorkList.empty());
468 }
469
470 // Clear any per-instruction data. We only have to do this for BadMBB itself
471 // because the instructions in that block may change. Other blocks may be
472 // invalidated, but their instructions will stay the same, so there is no
473 // need to erase the Cycle entries. They will be overwritten when we
474 // recompute.
475 for (MachineBasicBlock::const_iterator I = BadMBB->begin(), E = BadMBB->end();
476 I != E; ++I)
477 Cycles.erase(I);
478 }
479
verify() const480 void MachineTraceMetrics::Ensemble::verify() const {
481 #ifndef NDEBUG
482 assert(BlockInfo.size() == MTM.MF->getNumBlockIDs() &&
483 "Outdated BlockInfo size");
484 for (unsigned Num = 0, e = BlockInfo.size(); Num != e; ++Num) {
485 const TraceBlockInfo &TBI = BlockInfo[Num];
486 if (TBI.hasValidDepth() && TBI.Pred) {
487 const MachineBasicBlock *MBB = MTM.MF->getBlockNumbered(Num);
488 assert(MBB->isPredecessor(TBI.Pred) && "CFG doesn't match trace");
489 assert(BlockInfo[TBI.Pred->getNumber()].hasValidDepth() &&
490 "Trace is broken, depth should have been invalidated.");
491 const MachineLoop *Loop = getLoopFor(MBB);
492 assert(!(Loop && MBB == Loop->getHeader()) && "Trace contains backedge");
493 }
494 if (TBI.hasValidHeight() && TBI.Succ) {
495 const MachineBasicBlock *MBB = MTM.MF->getBlockNumbered(Num);
496 assert(MBB->isSuccessor(TBI.Succ) && "CFG doesn't match trace");
497 assert(BlockInfo[TBI.Succ->getNumber()].hasValidHeight() &&
498 "Trace is broken, height should have been invalidated.");
499 const MachineLoop *Loop = getLoopFor(MBB);
500 const MachineLoop *SuccLoop = getLoopFor(TBI.Succ);
501 assert(!(Loop && Loop == SuccLoop && TBI.Succ == Loop->getHeader()) &&
502 "Trace contains backedge");
503 }
504 }
505 #endif
506 }
507
508 //===----------------------------------------------------------------------===//
509 // Data Dependencies
510 //===----------------------------------------------------------------------===//
511 //
512 // Compute the depth and height of each instruction based on data dependencies
513 // and instruction latencies. These cycle numbers assume that the CPU can issue
514 // an infinite number of instructions per cycle as long as their dependencies
515 // are ready.
516
517 // A data dependency is represented as a defining MI and operand numbers on the
518 // defining and using MI.
519 namespace {
520 struct DataDep {
521 const MachineInstr *DefMI;
522 unsigned DefOp;
523 unsigned UseOp;
524
DataDep__anon8b94c6970311::DataDep525 DataDep(const MachineInstr *DefMI, unsigned DefOp, unsigned UseOp)
526 : DefMI(DefMI), DefOp(DefOp), UseOp(UseOp) {}
527
528 /// Create a DataDep from an SSA form virtual register.
DataDep__anon8b94c6970311::DataDep529 DataDep(const MachineRegisterInfo *MRI, unsigned VirtReg, unsigned UseOp)
530 : UseOp(UseOp) {
531 assert(TargetRegisterInfo::isVirtualRegister(VirtReg));
532 MachineRegisterInfo::def_iterator DefI = MRI->def_begin(VirtReg);
533 assert(!DefI.atEnd() && "Register has no defs");
534 DefMI = &*DefI;
535 DefOp = DefI.getOperandNo();
536 assert((++DefI).atEnd() && "Register has multiple defs");
537 }
538 };
539 }
540
541 // Get the input data dependencies that must be ready before UseMI can issue.
542 // Return true if UseMI has any physreg operands.
getDataDeps(const MachineInstr * UseMI,SmallVectorImpl<DataDep> & Deps,const MachineRegisterInfo * MRI)543 static bool getDataDeps(const MachineInstr *UseMI,
544 SmallVectorImpl<DataDep> &Deps,
545 const MachineRegisterInfo *MRI) {
546 bool HasPhysRegs = false;
547 for (ConstMIOperands MO(UseMI); MO.isValid(); ++MO) {
548 if (!MO->isReg())
549 continue;
550 unsigned Reg = MO->getReg();
551 if (!Reg)
552 continue;
553 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
554 HasPhysRegs = true;
555 continue;
556 }
557 // Collect virtual register reads.
558 if (MO->readsReg())
559 Deps.push_back(DataDep(MRI, Reg, MO.getOperandNo()));
560 }
561 return HasPhysRegs;
562 }
563
564 // Get the input data dependencies of a PHI instruction, using Pred as the
565 // preferred predecessor.
566 // This will add at most one dependency to Deps.
getPHIDeps(const MachineInstr * UseMI,SmallVectorImpl<DataDep> & Deps,const MachineBasicBlock * Pred,const MachineRegisterInfo * MRI)567 static void getPHIDeps(const MachineInstr *UseMI,
568 SmallVectorImpl<DataDep> &Deps,
569 const MachineBasicBlock *Pred,
570 const MachineRegisterInfo *MRI) {
571 // No predecessor at the beginning of a trace. Ignore dependencies.
572 if (!Pred)
573 return;
574 assert(UseMI->isPHI() && UseMI->getNumOperands() % 2 && "Bad PHI");
575 for (unsigned i = 1; i != UseMI->getNumOperands(); i += 2) {
576 if (UseMI->getOperand(i + 1).getMBB() == Pred) {
577 unsigned Reg = UseMI->getOperand(i).getReg();
578 Deps.push_back(DataDep(MRI, Reg, i));
579 return;
580 }
581 }
582 }
583
584 // Keep track of physreg data dependencies by recording each live register unit.
585 // Associate each regunit with an instruction operand. Depending on the
586 // direction instructions are scanned, it could be the operand that defined the
587 // regunit, or the highest operand to read the regunit.
588 namespace {
589 struct LiveRegUnit {
590 unsigned RegUnit;
591 unsigned Cycle;
592 const MachineInstr *MI;
593 unsigned Op;
594
getSparseSetIndex__anon8b94c6970411::LiveRegUnit595 unsigned getSparseSetIndex() const { return RegUnit; }
596
LiveRegUnit__anon8b94c6970411::LiveRegUnit597 LiveRegUnit(unsigned RU) : RegUnit(RU), Cycle(0), MI(0), Op(0) {}
598 };
599 }
600
601 // Identify physreg dependencies for UseMI, and update the live regunit
602 // tracking set when scanning instructions downwards.
updatePhysDepsDownwards(const MachineInstr * UseMI,SmallVectorImpl<DataDep> & Deps,SparseSet<LiveRegUnit> & RegUnits,const TargetRegisterInfo * TRI)603 static void updatePhysDepsDownwards(const MachineInstr *UseMI,
604 SmallVectorImpl<DataDep> &Deps,
605 SparseSet<LiveRegUnit> &RegUnits,
606 const TargetRegisterInfo *TRI) {
607 SmallVector<unsigned, 8> Kills;
608 SmallVector<unsigned, 8> LiveDefOps;
609
610 for (ConstMIOperands MO(UseMI); MO.isValid(); ++MO) {
611 if (!MO->isReg())
612 continue;
613 unsigned Reg = MO->getReg();
614 if (!TargetRegisterInfo::isPhysicalRegister(Reg))
615 continue;
616 // Track live defs and kills for updating RegUnits.
617 if (MO->isDef()) {
618 if (MO->isDead())
619 Kills.push_back(Reg);
620 else
621 LiveDefOps.push_back(MO.getOperandNo());
622 } else if (MO->isKill())
623 Kills.push_back(Reg);
624 // Identify dependencies.
625 if (!MO->readsReg())
626 continue;
627 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
628 SparseSet<LiveRegUnit>::iterator I = RegUnits.find(*Units);
629 if (I == RegUnits.end())
630 continue;
631 Deps.push_back(DataDep(I->MI, I->Op, MO.getOperandNo()));
632 break;
633 }
634 }
635
636 // Update RegUnits to reflect live registers after UseMI.
637 // First kills.
638 for (unsigned i = 0, e = Kills.size(); i != e; ++i)
639 for (MCRegUnitIterator Units(Kills[i], TRI); Units.isValid(); ++Units)
640 RegUnits.erase(*Units);
641
642 // Second, live defs.
643 for (unsigned i = 0, e = LiveDefOps.size(); i != e; ++i) {
644 unsigned DefOp = LiveDefOps[i];
645 for (MCRegUnitIterator Units(UseMI->getOperand(DefOp).getReg(), TRI);
646 Units.isValid(); ++Units) {
647 LiveRegUnit &LRU = RegUnits[*Units];
648 LRU.MI = UseMI;
649 LRU.Op = DefOp;
650 }
651 }
652 }
653
654 /// The length of the critical path through a trace is the maximum of two path
655 /// lengths:
656 ///
657 /// 1. The maximum height+depth over all instructions in the trace center block.
658 ///
659 /// 2. The longest cross-block dependency chain. For small blocks, it is
660 /// possible that the critical path through the trace doesn't include any
661 /// instructions in the block.
662 ///
663 /// This function computes the second number from the live-in list of the
664 /// center block.
665 unsigned MachineTraceMetrics::Ensemble::
computeCrossBlockCriticalPath(const TraceBlockInfo & TBI)666 computeCrossBlockCriticalPath(const TraceBlockInfo &TBI) {
667 assert(TBI.HasValidInstrDepths && "Missing depth info");
668 assert(TBI.HasValidInstrHeights && "Missing height info");
669 unsigned MaxLen = 0;
670 for (unsigned i = 0, e = TBI.LiveIns.size(); i != e; ++i) {
671 const LiveInReg &LIR = TBI.LiveIns[i];
672 if (!TargetRegisterInfo::isVirtualRegister(LIR.Reg))
673 continue;
674 const MachineInstr *DefMI = MTM.MRI->getVRegDef(LIR.Reg);
675 // Ignore dependencies outside the current trace.
676 const TraceBlockInfo &DefTBI = BlockInfo[DefMI->getParent()->getNumber()];
677 if (!DefTBI.hasValidDepth() || DefTBI.Head != TBI.Head)
678 continue;
679 unsigned Len = LIR.Height + Cycles[DefMI].Depth;
680 MaxLen = std::max(MaxLen, Len);
681 }
682 return MaxLen;
683 }
684
685 /// Compute instruction depths for all instructions above or in MBB in its
686 /// trace. This assumes that the trace through MBB has already been computed.
687 void MachineTraceMetrics::Ensemble::
computeInstrDepths(const MachineBasicBlock * MBB)688 computeInstrDepths(const MachineBasicBlock *MBB) {
689 // The top of the trace may already be computed, and HasValidInstrDepths
690 // implies Head->HasValidInstrDepths, so we only need to start from the first
691 // block in the trace that needs to be recomputed.
692 SmallVector<const MachineBasicBlock*, 8> Stack;
693 do {
694 TraceBlockInfo &TBI = BlockInfo[MBB->getNumber()];
695 assert(TBI.hasValidDepth() && "Incomplete trace");
696 if (TBI.HasValidInstrDepths)
697 break;
698 Stack.push_back(MBB);
699 MBB = TBI.Pred;
700 } while (MBB);
701
702 // FIXME: If MBB is non-null at this point, it is the last pre-computed block
703 // in the trace. We should track any live-out physregs that were defined in
704 // the trace. This is quite rare in SSA form, typically created by CSE
705 // hoisting a compare.
706 SparseSet<LiveRegUnit> RegUnits;
707 RegUnits.setUniverse(MTM.TRI->getNumRegUnits());
708
709 // Go through trace blocks in top-down order, stopping after the center block.
710 SmallVector<DataDep, 8> Deps;
711 while (!Stack.empty()) {
712 MBB = Stack.pop_back_val();
713 DEBUG(dbgs() << "Depths for BB#" << MBB->getNumber() << ":\n");
714 TraceBlockInfo &TBI = BlockInfo[MBB->getNumber()];
715 TBI.HasValidInstrDepths = true;
716 TBI.CriticalPath = 0;
717
718 // Also compute the critical path length through MBB when possible.
719 if (TBI.HasValidInstrHeights)
720 TBI.CriticalPath = computeCrossBlockCriticalPath(TBI);
721
722 for (MachineBasicBlock::const_iterator I = MBB->begin(), E = MBB->end();
723 I != E; ++I) {
724 const MachineInstr *UseMI = I;
725
726 // Collect all data dependencies.
727 Deps.clear();
728 if (UseMI->isPHI())
729 getPHIDeps(UseMI, Deps, TBI.Pred, MTM.MRI);
730 else if (getDataDeps(UseMI, Deps, MTM.MRI))
731 updatePhysDepsDownwards(UseMI, Deps, RegUnits, MTM.TRI);
732
733 // Filter and process dependencies, computing the earliest issue cycle.
734 unsigned Cycle = 0;
735 for (unsigned i = 0, e = Deps.size(); i != e; ++i) {
736 const DataDep &Dep = Deps[i];
737 const TraceBlockInfo&DepTBI =
738 BlockInfo[Dep.DefMI->getParent()->getNumber()];
739 // Ignore dependencies from outside the current trace.
740 if (!DepTBI.hasValidDepth() || DepTBI.Head != TBI.Head)
741 continue;
742 assert(DepTBI.HasValidInstrDepths && "Inconsistent dependency");
743 unsigned DepCycle = Cycles.lookup(Dep.DefMI).Depth;
744 // Add latency if DefMI is a real instruction. Transients get latency 0.
745 if (!Dep.DefMI->isTransient())
746 DepCycle += MTM.TII->computeOperandLatency(MTM.ItinData,
747 Dep.DefMI, Dep.DefOp,
748 UseMI, Dep.UseOp,
749 /* FindMin = */ false);
750 Cycle = std::max(Cycle, DepCycle);
751 }
752 // Remember the instruction depth.
753 InstrCycles &MICycles = Cycles[UseMI];
754 MICycles.Depth = Cycle;
755
756 if (!TBI.HasValidInstrHeights) {
757 DEBUG(dbgs() << Cycle << '\t' << *UseMI);
758 continue;
759 }
760 // Update critical path length.
761 TBI.CriticalPath = std::max(TBI.CriticalPath, Cycle + MICycles.Height);
762 DEBUG(dbgs() << TBI.CriticalPath << '\t' << Cycle << '\t' << *UseMI);
763 }
764 }
765 }
766
767 // Identify physreg dependencies for MI when scanning instructions upwards.
768 // Return the issue height of MI after considering any live regunits.
769 // Height is the issue height computed from virtual register dependencies alone.
updatePhysDepsUpwards(const MachineInstr * MI,unsigned Height,SparseSet<LiveRegUnit> & RegUnits,const InstrItineraryData * ItinData,const TargetInstrInfo * TII,const TargetRegisterInfo * TRI)770 static unsigned updatePhysDepsUpwards(const MachineInstr *MI, unsigned Height,
771 SparseSet<LiveRegUnit> &RegUnits,
772 const InstrItineraryData *ItinData,
773 const TargetInstrInfo *TII,
774 const TargetRegisterInfo *TRI) {
775 SmallVector<unsigned, 8> ReadOps;
776 for (ConstMIOperands MO(MI); MO.isValid(); ++MO) {
777 if (!MO->isReg())
778 continue;
779 unsigned Reg = MO->getReg();
780 if (!TargetRegisterInfo::isPhysicalRegister(Reg))
781 continue;
782 if (MO->readsReg())
783 ReadOps.push_back(MO.getOperandNo());
784 if (!MO->isDef())
785 continue;
786 // This is a def of Reg. Remove corresponding entries from RegUnits, and
787 // update MI Height to consider the physreg dependencies.
788 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
789 SparseSet<LiveRegUnit>::iterator I = RegUnits.find(*Units);
790 if (I == RegUnits.end())
791 continue;
792 unsigned DepHeight = I->Cycle;
793 if (!MI->isTransient()) {
794 // We may not know the UseMI of this dependency, if it came from the
795 // live-in list.
796 if (I->MI)
797 DepHeight += TII->computeOperandLatency(ItinData,
798 MI, MO.getOperandNo(),
799 I->MI, I->Op);
800 else
801 // No UseMI. Just use the MI latency instead.
802 DepHeight += TII->getInstrLatency(ItinData, MI);
803 }
804 Height = std::max(Height, DepHeight);
805 // This regunit is dead above MI.
806 RegUnits.erase(I);
807 }
808 }
809
810 // Now we know the height of MI. Update any regunits read.
811 for (unsigned i = 0, e = ReadOps.size(); i != e; ++i) {
812 unsigned Reg = MI->getOperand(ReadOps[i]).getReg();
813 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
814 LiveRegUnit &LRU = RegUnits[*Units];
815 // Set the height to the highest reader of the unit.
816 if (LRU.Cycle <= Height && LRU.MI != MI) {
817 LRU.Cycle = Height;
818 LRU.MI = MI;
819 LRU.Op = ReadOps[i];
820 }
821 }
822 }
823
824 return Height;
825 }
826
827
828 typedef DenseMap<const MachineInstr *, unsigned> MIHeightMap;
829
830 // Push the height of DefMI upwards if required to match UseMI.
831 // Return true if this is the first time DefMI was seen.
pushDepHeight(const DataDep & Dep,const MachineInstr * UseMI,unsigned UseHeight,MIHeightMap & Heights,const InstrItineraryData * ItinData,const TargetInstrInfo * TII)832 static bool pushDepHeight(const DataDep &Dep,
833 const MachineInstr *UseMI, unsigned UseHeight,
834 MIHeightMap &Heights,
835 const InstrItineraryData *ItinData,
836 const TargetInstrInfo *TII) {
837 // Adjust height by Dep.DefMI latency.
838 if (!Dep.DefMI->isTransient())
839 UseHeight += TII->computeOperandLatency(ItinData, Dep.DefMI, Dep.DefOp,
840 UseMI, Dep.UseOp);
841
842 // Update Heights[DefMI] to be the maximum height seen.
843 MIHeightMap::iterator I;
844 bool New;
845 tie(I, New) = Heights.insert(std::make_pair(Dep.DefMI, UseHeight));
846 if (New)
847 return true;
848
849 // DefMI has been pushed before. Give it the max height.
850 if (I->second < UseHeight)
851 I->second = UseHeight;
852 return false;
853 }
854
855 /// Assuming that DefMI was used by Trace.back(), add it to the live-in lists
856 /// of all the blocks in Trace. Stop when reaching the block that contains
857 /// DefMI.
858 void MachineTraceMetrics::Ensemble::
addLiveIns(const MachineInstr * DefMI,ArrayRef<const MachineBasicBlock * > Trace)859 addLiveIns(const MachineInstr *DefMI,
860 ArrayRef<const MachineBasicBlock*> Trace) {
861 assert(!Trace.empty() && "Trace should contain at least one block");
862 unsigned Reg = DefMI->getOperand(0).getReg();
863 assert(TargetRegisterInfo::isVirtualRegister(Reg));
864 const MachineBasicBlock *DefMBB = DefMI->getParent();
865
866 // Reg is live-in to all blocks in Trace that follow DefMBB.
867 for (unsigned i = Trace.size(); i; --i) {
868 const MachineBasicBlock *MBB = Trace[i-1];
869 if (MBB == DefMBB)
870 return;
871 TraceBlockInfo &TBI = BlockInfo[MBB->getNumber()];
872 // Just add the register. The height will be updated later.
873 TBI.LiveIns.push_back(Reg);
874 }
875 }
876
877 /// Compute instruction heights in the trace through MBB. This updates MBB and
878 /// the blocks below it in the trace. It is assumed that the trace has already
879 /// been computed.
880 void MachineTraceMetrics::Ensemble::
computeInstrHeights(const MachineBasicBlock * MBB)881 computeInstrHeights(const MachineBasicBlock *MBB) {
882 // The bottom of the trace may already be computed.
883 // Find the blocks that need updating.
884 SmallVector<const MachineBasicBlock*, 8> Stack;
885 do {
886 TraceBlockInfo &TBI = BlockInfo[MBB->getNumber()];
887 assert(TBI.hasValidHeight() && "Incomplete trace");
888 if (TBI.HasValidInstrHeights)
889 break;
890 Stack.push_back(MBB);
891 TBI.LiveIns.clear();
892 MBB = TBI.Succ;
893 } while (MBB);
894
895 // As we move upwards in the trace, keep track of instructions that are
896 // required by deeper trace instructions. Map MI -> height required so far.
897 MIHeightMap Heights;
898
899 // For physregs, the def isn't known when we see the use.
900 // Instead, keep track of the highest use of each regunit.
901 SparseSet<LiveRegUnit> RegUnits;
902 RegUnits.setUniverse(MTM.TRI->getNumRegUnits());
903
904 // If the bottom of the trace was already precomputed, initialize heights
905 // from its live-in list.
906 // MBB is the highest precomputed block in the trace.
907 if (MBB) {
908 TraceBlockInfo &TBI = BlockInfo[MBB->getNumber()];
909 for (unsigned i = 0, e = TBI.LiveIns.size(); i != e; ++i) {
910 LiveInReg LI = TBI.LiveIns[i];
911 if (TargetRegisterInfo::isVirtualRegister(LI.Reg)) {
912 // For virtual registers, the def latency is included.
913 unsigned &Height = Heights[MTM.MRI->getVRegDef(LI.Reg)];
914 if (Height < LI.Height)
915 Height = LI.Height;
916 } else {
917 // For register units, the def latency is not included because we don't
918 // know the def yet.
919 RegUnits[LI.Reg].Cycle = LI.Height;
920 }
921 }
922 }
923
924 // Go through the trace blocks in bottom-up order.
925 SmallVector<DataDep, 8> Deps;
926 for (;!Stack.empty(); Stack.pop_back()) {
927 MBB = Stack.back();
928 DEBUG(dbgs() << "Heights for BB#" << MBB->getNumber() << ":\n");
929 TraceBlockInfo &TBI = BlockInfo[MBB->getNumber()];
930 TBI.HasValidInstrHeights = true;
931 TBI.CriticalPath = 0;
932
933 // Get dependencies from PHIs in the trace successor.
934 const MachineBasicBlock *Succ = TBI.Succ;
935 // If MBB is the last block in the trace, and it has a back-edge to the
936 // loop header, get loop-carried dependencies from PHIs in the header. For
937 // that purpose, pretend that all the loop header PHIs have height 0.
938 if (!Succ)
939 if (const MachineLoop *Loop = getLoopFor(MBB))
940 if (MBB->isSuccessor(Loop->getHeader()))
941 Succ = Loop->getHeader();
942
943 if (Succ) {
944 for (MachineBasicBlock::const_iterator I = Succ->begin(), E = Succ->end();
945 I != E && I->isPHI(); ++I) {
946 const MachineInstr *PHI = I;
947 Deps.clear();
948 getPHIDeps(PHI, Deps, MBB, MTM.MRI);
949 if (!Deps.empty()) {
950 // Loop header PHI heights are all 0.
951 unsigned Height = TBI.Succ ? Cycles.lookup(PHI).Height : 0;
952 DEBUG(dbgs() << "pred\t" << Height << '\t' << *PHI);
953 if (pushDepHeight(Deps.front(), PHI, Height,
954 Heights, MTM.ItinData, MTM.TII))
955 addLiveIns(Deps.front().DefMI, Stack);
956 }
957 }
958 }
959
960 // Go through the block backwards.
961 for (MachineBasicBlock::const_iterator BI = MBB->end(), BB = MBB->begin();
962 BI != BB;) {
963 const MachineInstr *MI = --BI;
964
965 // Find the MI height as determined by virtual register uses in the
966 // trace below.
967 unsigned Cycle = 0;
968 MIHeightMap::iterator HeightI = Heights.find(MI);
969 if (HeightI != Heights.end()) {
970 Cycle = HeightI->second;
971 // We won't be seeing any more MI uses.
972 Heights.erase(HeightI);
973 }
974
975 // Don't process PHI deps. They depend on the specific predecessor, and
976 // we'll get them when visiting the predecessor.
977 Deps.clear();
978 bool HasPhysRegs = !MI->isPHI() && getDataDeps(MI, Deps, MTM.MRI);
979
980 // There may also be regunit dependencies to include in the height.
981 if (HasPhysRegs)
982 Cycle = updatePhysDepsUpwards(MI, Cycle, RegUnits,
983 MTM.ItinData, MTM.TII, MTM.TRI);
984
985 // Update the required height of any virtual registers read by MI.
986 for (unsigned i = 0, e = Deps.size(); i != e; ++i)
987 if (pushDepHeight(Deps[i], MI, Cycle, Heights, MTM.ItinData, MTM.TII))
988 addLiveIns(Deps[i].DefMI, Stack);
989
990 InstrCycles &MICycles = Cycles[MI];
991 MICycles.Height = Cycle;
992 if (!TBI.HasValidInstrDepths) {
993 DEBUG(dbgs() << Cycle << '\t' << *MI);
994 continue;
995 }
996 // Update critical path length.
997 TBI.CriticalPath = std::max(TBI.CriticalPath, Cycle + MICycles.Depth);
998 DEBUG(dbgs() << TBI.CriticalPath << '\t' << Cycle << '\t' << *MI);
999 }
1000
1001 // Update virtual live-in heights. They were added by addLiveIns() with a 0
1002 // height because the final height isn't known until now.
1003 DEBUG(dbgs() << "BB#" << MBB->getNumber() << " Live-ins:");
1004 for (unsigned i = 0, e = TBI.LiveIns.size(); i != e; ++i) {
1005 LiveInReg &LIR = TBI.LiveIns[i];
1006 const MachineInstr *DefMI = MTM.MRI->getVRegDef(LIR.Reg);
1007 LIR.Height = Heights.lookup(DefMI);
1008 DEBUG(dbgs() << ' ' << PrintReg(LIR.Reg) << '@' << LIR.Height);
1009 }
1010
1011 // Transfer the live regunits to the live-in list.
1012 for (SparseSet<LiveRegUnit>::const_iterator
1013 RI = RegUnits.begin(), RE = RegUnits.end(); RI != RE; ++RI) {
1014 TBI.LiveIns.push_back(LiveInReg(RI->RegUnit, RI->Cycle));
1015 DEBUG(dbgs() << ' ' << PrintRegUnit(RI->RegUnit, MTM.TRI)
1016 << '@' << RI->Cycle);
1017 }
1018 DEBUG(dbgs() << '\n');
1019
1020 if (!TBI.HasValidInstrDepths)
1021 continue;
1022 // Add live-ins to the critical path length.
1023 TBI.CriticalPath = std::max(TBI.CriticalPath,
1024 computeCrossBlockCriticalPath(TBI));
1025 DEBUG(dbgs() << "Critical path: " << TBI.CriticalPath << '\n');
1026 }
1027 }
1028
1029 MachineTraceMetrics::Trace
getTrace(const MachineBasicBlock * MBB)1030 MachineTraceMetrics::Ensemble::getTrace(const MachineBasicBlock *MBB) {
1031 // FIXME: Check cache tags, recompute as needed.
1032 computeTrace(MBB);
1033 computeInstrDepths(MBB);
1034 computeInstrHeights(MBB);
1035 return Trace(*this, BlockInfo[MBB->getNumber()]);
1036 }
1037
1038 unsigned
getInstrSlack(const MachineInstr * MI) const1039 MachineTraceMetrics::Trace::getInstrSlack(const MachineInstr *MI) const {
1040 assert(MI && "Not an instruction.");
1041 assert(getBlockNum() == unsigned(MI->getParent()->getNumber()) &&
1042 "MI must be in the trace center block");
1043 InstrCycles Cyc = getInstrCycles(MI);
1044 return getCriticalPath() - (Cyc.Depth + Cyc.Height);
1045 }
1046
1047 unsigned
getPHIDepth(const MachineInstr * PHI) const1048 MachineTraceMetrics::Trace::getPHIDepth(const MachineInstr *PHI) const {
1049 const MachineBasicBlock *MBB = TE.MTM.MF->getBlockNumbered(getBlockNum());
1050 SmallVector<DataDep, 1> Deps;
1051 getPHIDeps(PHI, Deps, MBB, TE.MTM.MRI);
1052 assert(Deps.size() == 1 && "PHI doesn't have MBB as a predecessor");
1053 DataDep &Dep = Deps.front();
1054 unsigned DepCycle = getInstrCycles(Dep.DefMI).Depth;
1055 // Add latency if DefMI is a real instruction. Transients get latency 0.
1056 if (!Dep.DefMI->isTransient())
1057 DepCycle += TE.MTM.TII->computeOperandLatency(TE.MTM.ItinData,
1058 Dep.DefMI, Dep.DefOp,
1059 PHI, Dep.UseOp,
1060 /* FindMin = */ false);
1061 return DepCycle;
1062 }
1063
getResourceDepth(bool Bottom) const1064 unsigned MachineTraceMetrics::Trace::getResourceDepth(bool Bottom) const {
1065 // For now, we compute the resource depth from instruction count / issue
1066 // width. Eventually, we should compute resource depth per functional unit
1067 // and return the max.
1068 unsigned Instrs = TBI.InstrDepth;
1069 if (Bottom)
1070 Instrs += TE.MTM.BlockInfo[getBlockNum()].InstrCount;
1071 if (const MCSchedModel *Model = TE.MTM.ItinData->SchedModel)
1072 if (Model->IssueWidth != 0)
1073 return Instrs / Model->IssueWidth;
1074 // Assume issue width 1 without a schedule model.
1075 return Instrs;
1076 }
1077
1078 unsigned MachineTraceMetrics::Trace::
getResourceLength(ArrayRef<const MachineBasicBlock * > Extrablocks) const1079 getResourceLength(ArrayRef<const MachineBasicBlock*> Extrablocks) const {
1080 unsigned Instrs = TBI.InstrDepth + TBI.InstrHeight;
1081 for (unsigned i = 0, e = Extrablocks.size(); i != e; ++i)
1082 Instrs += TE.MTM.getResources(Extrablocks[i])->InstrCount;
1083 if (const MCSchedModel *Model = TE.MTM.ItinData->SchedModel)
1084 if (Model->IssueWidth != 0)
1085 return Instrs / Model->IssueWidth;
1086 // Assume issue width 1 without a schedule model.
1087 return Instrs;
1088 }
1089
print(raw_ostream & OS) const1090 void MachineTraceMetrics::Ensemble::print(raw_ostream &OS) const {
1091 OS << getName() << " ensemble:\n";
1092 for (unsigned i = 0, e = BlockInfo.size(); i != e; ++i) {
1093 OS << " BB#" << i << '\t';
1094 BlockInfo[i].print(OS);
1095 OS << '\n';
1096 }
1097 }
1098
print(raw_ostream & OS) const1099 void MachineTraceMetrics::TraceBlockInfo::print(raw_ostream &OS) const {
1100 if (hasValidDepth()) {
1101 OS << "depth=" << InstrDepth;
1102 if (Pred)
1103 OS << " pred=BB#" << Pred->getNumber();
1104 else
1105 OS << " pred=null";
1106 OS << " head=BB#" << Head;
1107 if (HasValidInstrDepths)
1108 OS << " +instrs";
1109 } else
1110 OS << "depth invalid";
1111 OS << ", ";
1112 if (hasValidHeight()) {
1113 OS << "height=" << InstrHeight;
1114 if (Succ)
1115 OS << " succ=BB#" << Succ->getNumber();
1116 else
1117 OS << " succ=null";
1118 OS << " tail=BB#" << Tail;
1119 if (HasValidInstrHeights)
1120 OS << " +instrs";
1121 } else
1122 OS << "height invalid";
1123 if (HasValidInstrDepths && HasValidInstrHeights)
1124 OS << ", crit=" << CriticalPath;
1125 }
1126
print(raw_ostream & OS) const1127 void MachineTraceMetrics::Trace::print(raw_ostream &OS) const {
1128 unsigned MBBNum = &TBI - &TE.BlockInfo[0];
1129
1130 OS << TE.getName() << " trace BB#" << TBI.Head << " --> BB#" << MBBNum
1131 << " --> BB#" << TBI.Tail << ':';
1132 if (TBI.hasValidHeight() && TBI.hasValidDepth())
1133 OS << ' ' << getInstrCount() << " instrs.";
1134 if (TBI.HasValidInstrDepths && TBI.HasValidInstrHeights)
1135 OS << ' ' << TBI.CriticalPath << " cycles.";
1136
1137 const MachineTraceMetrics::TraceBlockInfo *Block = &TBI;
1138 OS << "\nBB#" << MBBNum;
1139 while (Block->hasValidDepth() && Block->Pred) {
1140 unsigned Num = Block->Pred->getNumber();
1141 OS << " <- BB#" << Num;
1142 Block = &TE.BlockInfo[Num];
1143 }
1144
1145 Block = &TBI;
1146 OS << "\n ";
1147 while (Block->hasValidHeight() && Block->Succ) {
1148 unsigned Num = Block->Succ->getNumber();
1149 OS << " -> BB#" << Num;
1150 Block = &TE.BlockInfo[Num];
1151 }
1152 OS << '\n';
1153 }
1154