/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/AMDGPU/ |
D | GCNRegPressure.cpp | 357 NextMI = &MI; in reset() 358 NextMI = skipDebugInstructionsForward(NextMI, MBBEnd); in reset() 359 if (NextMI == MBBEnd) in reset() 361 GCNRPTracker::reset(*NextMI, LiveRegsCopy, false); in reset() 368 NextMI = skipDebugInstructionsForward(NextMI, MBBEnd); in advanceBeforeNext() 369 if (NextMI == MBBEnd) in advanceBeforeNext() 372 SlotIndex SI = LIS.getInstructionIndex(*NextMI).getBaseIndex(); in advanceBeforeNext() 401 LastTrackedMI = &*NextMI++; in advanceToNext() 421 if ((NextMI == MBBEnd) || (LastTrackedMI && !advanceBeforeNext())) in advance() 428 while (NextMI != End) in advance()
|
D | GCNRegPressure.h | 152 MachineBasicBlock::const_iterator NextMI; variable 159 const MachineBasicBlock::const_iterator getNext() const { return NextMI; } in getNext()
|
D | SIShrinkInstructions.cpp | 333 MachineInstr &NextMI = *Next; in runOnMachineFunction() local 338 uint8_t Nop1 = NextMI.getOperand(0).getImm() + 1; in runOnMachineFunction() 342 NextMI.getOperand(0).setImm(Nop0 + Nop1 - 1); in runOnMachineFunction()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/CodeGen/ |
D | StackSlotColoring.cpp | 443 MachineBasicBlock::iterator NextMI = std::next(I); in RemoveDeadStores() local 453 while ((NextMI != E) && NextMI->isDebugInstr()) { in RemoveDeadStores() 454 ++NextMI; in RemoveDeadStores() 457 if (NextMI == E) continue; in RemoveDeadStores() 458 if (!(StoreReg = TII->isStoreToStackSlot(*NextMI, SecondSS, StoreSize))) in RemoveDeadStores() 467 if (NextMI->findRegisterUseOperandIdx(LoadReg, true, nullptr) != -1) { in RemoveDeadStores() 472 toErase.push_back(&*NextMI); in RemoveDeadStores()
|
D | SlotIndexes.cpp | 138 MachineInstr &NextMI = *Next; in removeSingleMachineInstrFromMaps() local 139 MIEntry.setInstr(&NextMI); in removeSingleMachineInstrFromMaps() 140 mi2iMap.insert(std::make_pair(&NextMI, MIIndex)); in removeSingleMachineInstrFromMaps()
|
/external/swiftshader/third_party/LLVM/lib/Target/ARM/ |
D | MLxExpansionPass.cpp | 186 MachineInstr *NextMI = LastMIs[Idx]; in FindMLxHazard() local 187 if (!NextMI) in FindMLxHazard() 190 if (TII->canCauseFpMLxStall(NextMI->getOpcode())) { in FindMLxHazard() 196 if (i <= Limit2 && hasRAWHazard(getDefReg(MI), NextMI)) in FindMLxHazard()
|
/external/llvm/lib/CodeGen/ |
D | StackSlotColoring.cpp | 394 MachineBasicBlock::iterator NextMI = std::next(I); in RemoveDeadStores() local 395 if (NextMI == MBB->end()) continue; in RemoveDeadStores() 401 if (!(StoreReg = TII->isStoreToStackSlot(*NextMI, SecondSS))) in RemoveDeadStores() 408 if (NextMI->findRegisterUseOperandIdx(LoadReg, true, nullptr) != -1) { in RemoveDeadStores() 413 toErase.push_back(&*NextMI); in RemoveDeadStores()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/ARM/ |
D | MLxExpansionPass.cpp | 252 MachineInstr *NextMI = LastMIs[Idx]; in FindMLxHazard() local 253 if (!NextMI) in FindMLxHazard() 256 if (TII->canCauseFpMLxStall(NextMI->getOpcode())) { in FindMLxHazard() 262 if (i <= Limit2 && hasRAWHazard(getDefReg(MI), NextMI)) in FindMLxHazard()
|
/external/llvm/lib/Target/ARM/ |
D | MLxExpansionPass.cpp | 252 MachineInstr *NextMI = LastMIs[Idx]; in FindMLxHazard() local 253 if (!NextMI) in FindMLxHazard() 256 if (TII->canCauseFpMLxStall(NextMI->getOpcode())) { in FindMLxHazard() 262 if (i <= Limit2 && hasRAWHazard(getDefReg(MI), NextMI)) in FindMLxHazard()
|
/external/llvm/lib/Target/AMDGPU/ |
D | SIShrinkInstructions.cpp | 252 MachineInstr &NextMI = *Next; in runOnMachineFunction() local 257 uint8_t Nop1 = NextMI.getOperand(0).getImm() + 1; in runOnMachineFunction() 261 NextMI.getOperand(0).setImm(Nop0 + Nop1 - 1); in runOnMachineFunction()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/AVR/ |
D | AVRFrameLowering.cpp | 313 MachineBasicBlock::iterator NextMI = std::next(I); in fixStackStores() local 319 I = NextMI; in fixStackStores() 345 I = NextMI; in fixStackStores() 357 I = NextMI; in fixStackStores()
|
/external/swiftshader/third_party/LLVM/lib/CodeGen/ |
D | StackSlotColoring.cpp | 683 MachineBasicBlock::iterator NextMI = llvm::next(I); in RemoveDeadStores() local 684 if (NextMI == MBB->end()) continue; in RemoveDeadStores() 690 if (!(StoreReg = TII->isStoreToStackSlot(NextMI, SecondSS))) continue; in RemoveDeadStores() 696 if (NextMI->findRegisterUseOperandIdx(LoadReg, true, 0) != -1) { in RemoveDeadStores() 701 toErase.push_back(NextMI); in RemoveDeadStores()
|
D | VirtRegRewriter.cpp | 1336 MachineInstr &NextMI = *NextMII; in OptimizeByUnfold2() local 1339 if (!TII->unfoldMemoryOperand(MF, &NextMI, VirtReg, false, false, NewMIs)) in OptimizeByUnfold2() 1343 VRM->transferRestorePts(&NextMI, NewMIs[0]); in OptimizeByUnfold2() 1345 InvalidateKills(NextMI, TRI, RegKills, KillOps); in OptimizeByUnfold2() 1346 EraseInstr(&NextMI); in OptimizeByUnfold2()
|
/external/llvm/lib/Target/Hexagon/ |
D | HexagonVLIWPacketizer.cpp | 1168 MachineInstr &NextMI = *NextMII; in isLegalToPacketizeTogether() local 1171 const MachineOperand &NOp0 = NextMI.getOperand(0); in isLegalToPacketizeTogether() 1172 const MachineOperand &NOp1 = NextMI.getOperand(1); in isLegalToPacketizeTogether()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/Hexagon/ |
D | HexagonVLIWPacketizer.cpp | 1351 MachineInstr &NextMI = *NextMII; in isLegalToPacketizeTogether() local 1354 const MachineOperand &NOp0 = NextMI.getOperand(0); in isLegalToPacketizeTogether() 1355 const MachineOperand &NOp1 = NextMI.getOperand(1); in isLegalToPacketizeTogether()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/CodeGen/AsmPrinter/ |
D | CodeViewDebug.cpp | 2630 for (const auto &NextMI : *MI->getParent()) { in beginInstruction() 2631 if (NextMI.isDebugInstr()) in beginInstruction() 2633 DL = NextMI.getDebugLoc(); in beginInstruction()
|