/external/swiftshader/third_party/LLVM/lib/CodeGen/ |
D | VirtRegRewriter.cpp | 419 AvailableSpills &Spills, 438 AvailableSpills &Spills, in GetRegForReload() argument 446 return GetRegForReload(RC, PhysReg, MF, MI, Spills, MaybeDeadStores, in GetRegForReload() 868 MachineInstr *MI, AvailableSpills &Spills, in GetRegForReload() argument 875 const TargetRegisterInfo *TRI = Spills.getRegInfo(); in GetRegForReload() 892 return GetRegForReload(RC, NewReg, MF, MI, Spills, MaybeDeadStores, in GetRegForReload() 929 MF, MI, Spills, MaybeDeadStores, in GetRegForReload() 953 Spills.ClobberPhysReg(NewPhysReg); in GetRegForReload() 954 Spills.ClobberPhysReg(NewOp.PhysRegReused); in GetRegForReload() 960 Spills.addAvailable(NewOp.StackSlotOrReMat, NewPhysReg); in GetRegForReload() [all …]
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Transforms/Coroutines/ |
D | CoroFrame.cpp | 299 static void dump(StringRef Title, SpillInfo const &Spills) { in dump() argument 302 for (auto const &E : Spills) { in dump() 373 SpillInfo &Spills) { in buildFrameType() argument 397 for (auto &S : Spills) { in buildFrameType() 472 static Instruction *insertSpills(SpillInfo &Spills, coro::Shape &Shape) { in insertSpills() argument 509 for (auto const &E : Spills) { in insertSpills() 763 SpillInfo const &Spills) { in rewriteMaterializableInstructions() argument 768 for (auto const &E : Spills) { in rewriteMaterializableInstructions() 808 static void moveSpillUsesAfterCoroBegin(Function &F, SpillInfo const &Spills, in moveSpillUsesAfterCoroBegin() argument 815 for (auto const &E : Spills) { in moveSpillUsesAfterCoroBegin() [all …]
|
/external/llvm/lib/CodeGen/ |
D | LiveInterval.cpp | 979 for (unsigned I = 0, E = Spills.size(); I != E; ++I) in print() 980 OS << ' ' << Spills[I]; in print() 1018 assert(Spills.empty() && "Leftover spilled segments"); in add() 1059 if (!Spills.empty() && coalescable(Spills.back(), Seg)) { in add() 1060 Seg.start = Spills.back().start; in add() 1061 Seg.end = std::max(Spills.back().end, Seg.end); in add() 1062 Spills.pop_back(); in add() 1082 Spills.push_back(Seg); in add() 1090 size_t NumMoved = std::min(Spills.size(), GapSize); in mergeSpills() 1093 LiveRange::iterator SpillSrc = Spills.end(); in mergeSpills() [all …]
|
D | InlineSpiller.cpp | 93 SmallPtrSet<MachineInstr *, 16> &Spills, 98 MachineBasicBlock *Root, SmallPtrSet<MachineInstr *, 16> &Spills, 105 SmallPtrSet<MachineInstr *, 16> &Spills, 1108 SmallPtrSet<MachineInstr *, 16> &Spills, in rmRedundantSpills() argument 1114 for (const auto CurrentSpill : Spills) { in rmRedundantSpills() 1130 Spills.erase(SpillToRm); in rmRedundantSpills() 1141 MachineBasicBlock *Root, SmallPtrSet<MachineInstr *, 16> &Spills, in getVisitOrders() argument 1164 for (const auto Spill : Spills) { in getVisitOrders() 1229 unsigned OrigReg, VNInfo &OrigVNI, SmallPtrSet<MachineInstr *, 16> &Spills, in runHoistSpills() argument 1243 rmRedundantSpills(Spills, SpillsToRm, SpillBBToSpill); in runHoistSpills() [all …]
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/CodeGen/ |
D | LiveInterval.cpp | 1098 for (unsigned I = 0, E = Spills.size(); I != E; ++I) in print() 1099 OS << ' ' << Spills[I]; in print() 1138 assert(Spills.empty() && "Leftover spilled segments"); in add() 1179 if (!Spills.empty() && coalescable(Spills.back(), Seg)) { in add() 1180 Seg.start = Spills.back().start; in add() 1181 Seg.end = std::max(Spills.back().end, Seg.end); in add() 1182 Spills.pop_back(); in add() 1202 Spills.push_back(Seg); in add() 1210 size_t NumMoved = std::min(Spills.size(), GapSize); in mergeSpills() 1213 LiveRange::iterator SpillSrc = Spills.end(); in mergeSpills() [all …]
|
D | InlineSpiller.cpp | 119 SmallPtrSet<MachineInstr *, 16> &Spills, 124 MachineBasicBlock *Root, SmallPtrSet<MachineInstr *, 16> &Spills, 131 SmallPtrSet<MachineInstr *, 16> &Spills, 1165 SmallPtrSet<MachineInstr *, 16> &Spills, in rmRedundantSpills() argument 1171 for (const auto CurrentSpill : Spills) { in rmRedundantSpills() 1187 Spills.erase(SpillToRm); in rmRedundantSpills() 1197 MachineBasicBlock *Root, SmallPtrSet<MachineInstr *, 16> &Spills, in getVisitOrders() argument 1220 for (const auto Spill : Spills) { in getVisitOrders() 1285 SmallPtrSet<MachineInstr *, 16> &Spills, in runHoistSpills() argument 1299 rmRedundantSpills(Spills, SpillsToRm, SpillBBToSpill); in runHoistSpills() [all …]
|
D | RegAllocGreedy.cpp | 542 unsigned &FoldedReloads, unsigned &Spills, 548 unsigned Reloads, FoldedReloads, Spills, FoldedSpills; in reportNumberOfSplillsReloads() local 549 reportNumberOfSplillsReloads(L, Reloads, FoldedReloads, Spills, in reportNumberOfSplillsReloads() 3093 unsigned &Spills, in reportNumberOfSplillsReloads() argument 3097 Spills = 0; in reportNumberOfSplillsReloads() 3111 Spills += SubSpills; in reportNumberOfSplillsReloads() 3132 ++Spills; in reportNumberOfSplillsReloads() 3138 if (Reloads || FoldedReloads || Spills || FoldedSpills) { in reportNumberOfSplillsReloads() 3144 if (Spills) in reportNumberOfSplillsReloads() 3145 R << NV("NumSpills", Spills) << " spills "; in reportNumberOfSplillsReloads()
|
/external/swiftshader/third_party/llvm-7.0/llvm/test/CodeGen/X86/ |
D | x86-32-intrcc.ll | 8 ; Spills eax, putting original esp at +4. 28 ; Spills eax and ecx, putting original esp at +8. Stack is adjusted up another 4 bytes
|
D | x86-64-intrcc.ll | 8 ; Spills rax, putting original esp at +8. 28 ; Spills rax and rcx, putting original rsp at +16. Stack is adjusted up another 8 bytes
|
/external/llvm/test/CodeGen/X86/ |
D | x86-32-intrcc.ll | 8 ; Spills eax, putting original esp at +4. 28 ; Spills eax and ecx, putting original esp at +8. Stack is adjusted up another 4 bytes
|
D | x86-64-intrcc.ll | 8 ; Spills rax, putting original esp at +8. 28 ; Spills rax and rcx, putting original rsp at +16. Stack is adjusted up another 8 bytes
|
/external/llvm/include/llvm/CodeGen/ |
D | LiveInterval.h | 795 SmallVector<LiveRange::Segment, 16> Spills; variable
|
/external/swiftshader/third_party/llvm-7.0/llvm/include/llvm/CodeGen/ |
D | LiveInterval.h | 859 SmallVector<LiveRange::Segment, 16> Spills; variable
|