/external/llvm/lib/CodeGen/ |
D | LiveIntervalUnion.cpp | 28 void LiveIntervalUnion::unify(LiveInterval &VirtReg) { in unify() argument 29 if (VirtReg.empty()) in unify() 34 LiveInterval::iterator RegPos = VirtReg.begin(); in unify() 35 LiveInterval::iterator RegEnd = VirtReg.end(); in unify() 39 SegPos.insert(RegPos->start, RegPos->end, &VirtReg); in unify() 49 SegPos.insert(RegEnd->start, RegEnd->end, &VirtReg); in unify() 51 SegPos.insert(RegPos->start, RegPos->end, &VirtReg); in unify() 55 void LiveIntervalUnion::extract(LiveInterval &VirtReg) { in extract() argument 56 if (VirtReg.empty()) in extract() 61 LiveInterval::iterator RegPos = VirtReg.begin(); in extract() [all …]
|
D | RegAllocBasic.cpp | 126 virtual unsigned selectOrSplit(LiveInterval &VirtReg, 269 LiveInterval &VirtReg = *I->second; in seedLiveRegs() local 271 PhysReg2LiveUnion[RegNum].unify(VirtReg); in seedLiveRegs() 273 enqueue(&VirtReg); in seedLiveRegs() 277 void RegAllocBase::assign(LiveInterval &VirtReg, unsigned PhysReg) { in assign() argument 278 DEBUG(dbgs() << "assigning " << PrintReg(VirtReg.reg, TRI) in assign() 280 assert(!VRM->hasPhys(VirtReg.reg) && "Duplicate VirtReg assignment"); in assign() 281 VRM->assignVirt2Phys(VirtReg.reg, PhysReg); in assign() 283 PhysReg2LiveUnion[PhysReg].unify(VirtReg); in assign() 287 void RegAllocBase::unassign(LiveInterval &VirtReg, unsigned PhysReg) { in unassign() argument [all …]
|
D | RegAllocGreedy.cpp | 115 LiveRangeStage getStage(const LiveInterval &VirtReg) const { in getStage() 116 return ExtraRegInfo[VirtReg.reg].Stage; in getStage() 119 void setStage(const LiveInterval &VirtReg, LiveRangeStage Stage) { in setStage() argument 121 ExtraRegInfo[VirtReg.reg].Stage = Stage; in setStage() 306 bool RAGreedy::LRE_CanEraseVirtReg(unsigned VirtReg) { in LRE_CanEraseVirtReg() argument 307 if (unsigned PhysReg = VRM->getPhys(VirtReg)) { in LRE_CanEraseVirtReg() 308 unassign(LIS->getInterval(VirtReg), PhysReg); in LRE_CanEraseVirtReg() 316 void RAGreedy::LRE_WillShrinkVirtReg(unsigned VirtReg) { in LRE_WillShrinkVirtReg() argument 317 unsigned PhysReg = VRM->getPhys(VirtReg); in LRE_WillShrinkVirtReg() 322 LiveInterval &LI = LIS->getInterval(VirtReg); in LRE_WillShrinkVirtReg() [all …]
|
D | VirtRegRewriter.cpp | 369 unsigned VirtReg; member 374 AssignedPhysReg(apr), VirtReg(vreg) {} in ReusedOp() 396 unsigned VirtReg) { in addReuse() argument 403 AssignedPhysReg, VirtReg)); in addReuse() 437 unsigned GetRegForReload(unsigned VirtReg, unsigned PhysReg, MachineInstr *MI, in GetRegForReload() argument 445 const TargetRegisterClass* RC = MF.getRegInfo().getRegClass(VirtReg); in GetRegForReload() 692 unsigned VirtReg = MO.getReg(); in ReMaterialize() local 693 if (TargetRegisterInfo::isPhysicalRegister(VirtReg)) in ReMaterialize() 696 unsigned Phys = VRM.getPhys(VirtReg); in ReMaterialize() 905 MBB->getParent()->getRegInfo().getRegClass(Op.VirtReg); in GetRegForReload() [all …]
|
D | RegAllocBase.h | 113 LiveIntervalUnion::Query &query(LiveInterval &VirtReg, unsigned PhysReg) { in query() argument 114 Queries[PhysReg].init(UserTag, &VirtReg, &PhysReg2LiveUnion[PhysReg]); in query() 143 virtual unsigned selectOrSplit(LiveInterval &VirtReg, 152 unsigned checkPhysRegInterference(LiveInterval& VirtReg, unsigned PhysReg); 156 void assign(LiveInterval &VirtReg, unsigned PhysReg); 161 void unassign(LiveInterval &VirtReg, unsigned PhysReg); 166 bool spillInterferences(LiveInterval &VirtReg, unsigned PhysReg, 187 void spillReg(LiveInterval &VirtReg, unsigned PhysReg,
|
D | LiveIntervalUnion.h | 93 void unify(LiveInterval &VirtReg); 96 void extract(LiveInterval &VirtReg); 163 LiveInterval *VirtReg; variable 172 Query(): LiveUnion(), VirtReg(), Tag(0), UserTag(0) {} in Query() 175 LiveUnion(LIU), VirtReg(VReg), CheckedFirstInterference(false), in Query() 181 VirtReg = NULL; in clear() 192 if (UserTag == UTag && VirtReg == VReg && in init() 199 VirtReg = VReg; in init() 205 assert(VirtReg && "uninitialized"); in virtReg() 206 return *VirtReg; in virtReg() [all …]
|
D | RegAllocFast.cpp | 150 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 155 void killVirtReg(unsigned VirtReg); 157 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 165 unsigned VirtReg, unsigned Hint); 167 unsigned VirtReg, unsigned Hint); 176 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { in getStackSpaceFor() argument 178 int SS = StackSlotForVirtReg[VirtReg]; in getStackSpaceFor() 187 StackSlotForVirtReg[VirtReg] = FrameIdx; in getStackSpaceFor() 235 void RAFast::killVirtReg(unsigned VirtReg) { in killVirtReg() argument 236 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && in killVirtReg() [all …]
|
D | VirtRegMap.h | 212 bool hasPreferredPhys(unsigned VirtReg) { in hasPreferredPhys() argument 213 return getPhys(VirtReg) == getRegAllocPref(VirtReg); in hasPreferredPhys() 230 unsigned getOriginal(unsigned VirtReg) const { in getOriginal() argument 231 unsigned Orig = getPreSplitReg(VirtReg); in getOriginal() 232 return Orig ? Orig : VirtReg; in getOriginal() 449 void setIsImplicitlyDefined(unsigned VirtReg) { in setIsImplicitlyDefined() argument 450 ImplicitDefed.set(TargetRegisterInfo::virtReg2Index(VirtReg)); in setIsImplicitlyDefined() 454 bool isImplicitlyDefined(unsigned VirtReg) const { in isImplicitlyDefined() argument 455 return ImplicitDefed[TargetRegisterInfo::virtReg2Index(VirtReg)]; in isImplicitlyDefined() 460 void virtFolded(unsigned VirtReg, MachineInstr *OldMI, MachineInstr *NewMI, [all …]
|
D | AllocationOrder.cpp | 25 AllocationOrder::AllocationOrder(unsigned VirtReg, in AllocationOrder() argument 29 const TargetRegisterClass *RC = VRM.getRegInfo().getRegClass(VirtReg); in AllocationOrder() 31 VRM.getRegInfo().getRegAllocationHint(VirtReg); in AllocationOrder()
|
D | LiveDebugVariables.cpp | 268 UserValue *lookupVirtReg(unsigned VirtReg); 299 void mapVirtReg(unsigned VirtReg, UserValue *EC); 391 void LDVImpl::mapVirtReg(unsigned VirtReg, UserValue *EC) { in mapVirtReg() argument 392 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && "Only map VirtRegs"); in mapVirtReg() 393 UserValue *&Leader = virtRegToEqClass[VirtReg]; in mapVirtReg() 397 UserValue *LDVImpl::lookupVirtReg(unsigned VirtReg) { in lookupVirtReg() argument 398 if (UserValue *UV = virtRegToEqClass.lookup(VirtReg)) in lookupVirtReg() 840 unsigned VirtReg = Loc.getReg(); in rewriteLocations() local 841 if (VRM.isAssignedReg(VirtReg) && in rewriteLocations() 842 TargetRegisterInfo::isPhysicalRegister(VRM.getPhys(VirtReg))) { in rewriteLocations() [all …]
|
D | VirtRegMap.cpp | 183 void VirtRegMap::virtFolded(unsigned VirtReg, MachineInstr *OldMI, in virtFolded() argument 194 MI2VirtMap.insert(IP, std::make_pair(NewMI, std::make_pair(VirtReg, MRInfo))); in virtFolded() 197 void VirtRegMap::virtFolded(unsigned VirtReg, MachineInstr *MI, ModRef MRInfo) { in virtFolded() argument 199 MI2VirtMap.insert(IP, std::make_pair(MI, std::make_pair(VirtReg, MRInfo))); in virtFolded() 281 unsigned VirtReg = MO.getReg(); in rewrite() local 282 unsigned PhysReg = getPhys(VirtReg); in rewrite()
|
D | AllocationOrder.h | 39 AllocationOrder(unsigned VirtReg,
|
D | InlineSpiller.cpp | 637 bool InlineSpiller::reMaterializeFor(LiveInterval &VirtReg, in reMaterializeFor() argument 640 VNInfo *ParentVNI = VirtReg.getVNInfoAt(UseIdx.getBaseIndex()); in reMaterializeFor() 646 if (MO.isReg() && MO.isUse() && MO.getReg() == VirtReg.reg) in reMaterializeFor() 662 markValueUsed(&VirtReg, ParentVNI); in reMaterializeFor() 671 tie(Reads, Writes) = MI->readsWritesVirtualRegister(VirtReg.reg, &Ops); in reMaterializeFor() 676 markValueUsed(&VirtReg, ParentVNI); in reMaterializeFor() 705 if (MO.isReg() && MO.isUse() && MO.getReg() == VirtReg.reg) { in reMaterializeFor()
|
D | PrologEpilogInserter.cpp | 810 unsigned VirtReg = 0; in scavengeFrameVirtualRegs() local 830 if (Reg != VirtReg) { in scavengeFrameVirtualRegs() 836 VirtReg = Reg; in scavengeFrameVirtualRegs()
|