/external/llvm/lib/Target/PowerPC/ |
D | PPCVSXCopy.cpp | 57 MachineRegisterInfo &MRI) { in IsRegInClass() 59 return RC->hasSubClassEq(MRI.getRegClass(Reg)); in IsRegInClass() 67 bool IsVSReg(unsigned Reg, MachineRegisterInfo &MRI) { in IsVSReg() 68 return IsRegInClass(Reg, &PPC::VSRCRegClass, MRI); in IsVSReg() 71 bool IsVRReg(unsigned Reg, MachineRegisterInfo &MRI) { in IsVRReg() 72 return IsRegInClass(Reg, &PPC::VRRCRegClass, MRI); in IsVRReg() 75 bool IsF8Reg(unsigned Reg, MachineRegisterInfo &MRI) { in IsF8Reg() 76 return IsRegInClass(Reg, &PPC::F8RCRegClass, MRI); in IsF8Reg() 79 bool IsVSFReg(unsigned Reg, MachineRegisterInfo &MRI) { in IsVSFReg() 80 return IsRegInClass(Reg, &PPC::VSFRCRegClass, MRI); in IsVSFReg() [all …]
|
/external/llvm/lib/Target/AArch64/ |
D | AArch64AdvSIMDScalarPass.cpp | 72 MachineRegisterInfo *MRI; member in __anonc0d6275b0111::AArch64AdvSIMDScalar 113 const MachineRegisterInfo *MRI) { in isGPR64() argument 117 return MRI->getRegClass(Reg)->hasSuperClassEq(&AArch64::GPR64RegClass); in isGPR64() 122 const MachineRegisterInfo *MRI) { in isFPR64() argument 124 return (MRI->getRegClass(Reg)->hasSuperClassEq(&AArch64::FPR64RegClass) && in isFPR64() 126 (MRI->getRegClass(Reg)->hasSuperClassEq(&AArch64::FPR128RegClass) && in isFPR64() 136 const MachineRegisterInfo *MRI, in getSrcFromCopy() argument 153 MRI) && in getSrcFromCopy() 154 isGPR64(MI->getOperand(1).getReg(), MI->getOperand(1).getSubReg(), MRI)) in getSrcFromCopy() 157 MRI) && in getSrcFromCopy() [all …]
|
/external/llvm/lib/Target/AMDGPU/ |
D | SIFixSGPRCopies.cpp | 116 const MachineRegisterInfo &MRI = MI.getParent()->getParent()->getRegInfo(); in hasVGPROperands() local 122 if (TRI->hasVGPRs(MRI.getRegClass(MI.getOperand(i).getReg()))) in hasVGPROperands() 131 const MachineRegisterInfo &MRI) { in getCopyRegClasses() argument 137 MRI.getRegClass(SrcReg) : in getCopyRegClasses() 145 MRI.getRegClass(DstReg) : in getCopyRegClasses() 179 MachineRegisterInfo &MRI) { in foldVGPRCopyIntoRegSequence() argument 183 if (!TRI->isSGPRClass(MRI.getRegClass(DstReg))) in foldVGPRCopyIntoRegSequence() 186 if (!MRI.hasOneUse(DstReg)) in foldVGPRCopyIntoRegSequence() 189 MachineInstr &CopyUse = *MRI.use_instr_begin(DstReg); in foldVGPRCopyIntoRegSequence() 194 std::tie(SrcRC, DstRC) = getCopyRegClasses(CopyUse, *TRI, MRI); in foldVGPRCopyIntoRegSequence() [all …]
|
D | SIShrinkInstructions.cpp | 70 const MachineRegisterInfo &MRI) { in isVGPR() argument 75 return TRI.hasVGPRs(MRI.getRegClass(MO->getReg())); in isVGPR() 82 const MachineRegisterInfo &MRI) { in canShrink() argument 96 if (!isVGPR(Src2, TRI, MRI) || in canShrink() 110 if (Src1 && (!isVGPR(Src1, TRI, MRI) || (Src1Mod && Src1Mod->getImm() != 0))) in canShrink() 130 MachineRegisterInfo &MRI, bool TryToCommute = true) { in foldImmediates() argument 132 if (!MRI.isSSA()) in foldImmediates() 150 if (Src0.isReg() && !isVGPR(&Src0, TRI, MRI)) in foldImmediates() 154 if (Src0.isReg() && MRI.hasOneUse(Src0.getReg())) { in foldImmediates() 156 MachineInstr *Def = MRI.getUniqueVRegDef(Reg); in foldImmediates() [all …]
|
D | SIInstrInfo.cpp | 334 const MachineRegisterInfo &MRI = in shouldClusterMemOps() local 336 const TargetRegisterClass *DstRC = MRI.getRegClass(FirstDst->getReg()); in shouldClusterMemOps() 594 MachineRegisterInfo &MRI = MF->getRegInfo(); in storeRegToStackSlot() local 595 MRI.constrainRegClass(SrcReg, &AMDGPU::SReg_32_XM0RegClass); in storeRegToStackSlot() 694 MachineRegisterInfo &MRI = MF->getRegInfo(); in loadRegFromStackSlot() local 695 MRI.constrainRegClass(DestReg, &AMDGPU::SReg_32_XM0RegClass); in loadRegFromStackSlot() 977 const MachineRegisterInfo &MRI = MI.getParent()->getParent()->getRegInfo(); in commuteInstructionImpl() local 978 if (!isLegalRegOperand(MRI, InstrDesc.OpInfo[Src1Idx], Src0)) in commuteInstructionImpl() 1217 unsigned Reg, MachineRegisterInfo *MRI) const { in FoldImmediate() 1218 if (!MRI->hasOneNonDBGUse(Reg)) in FoldImmediate() [all …]
|
/external/llvm/lib/Target/WebAssembly/ |
D | WebAssemblyRegColoring.cpp | 63 static float computeWeight(const MachineRegisterInfo *MRI, in computeWeight() argument 67 for (MachineOperand &MO : MRI->reg_nodbg_operands(VReg)) in computeWeight() 86 MachineRegisterInfo *MRI = &MF.getRegInfo(); in runOnMachineFunction() local 93 unsigned NumVRegs = MRI->getNumVirtRegs(); in runOnMachineFunction() 103 if (MRI->use_empty(VReg)) in runOnMachineFunction() 108 LI->weight = computeWeight(MRI, MBFI, VReg); in runOnMachineFunction() 119 [MRI](LiveInterval *LHS, LiveInterval *RHS) { in runOnMachineFunction() 120 if (MRI->isLiveIn(LHS->reg) != MRI->isLiveIn(RHS->reg)) in runOnMachineFunction() 121 return MRI->isLiveIn(LHS->reg); in runOnMachineFunction() 139 const TargetRegisterClass *RC = MRI->getRegClass(Old); in runOnMachineFunction() [all …]
|
D | WebAssemblyPeephole.cpp | 60 MachineRegisterInfo &MRI) { in MaybeRewriteToDrop() argument 64 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(OldReg)); in MaybeRewriteToDrop() 75 MachineRegisterInfo &MRI, in MaybeRewriteToFallthrough() argument 91 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg)); in MaybeRewriteToFallthrough() 109 MachineRegisterInfo &MRI = MF.getRegInfo(); in runOnMachineFunction() local 138 Changed |= MaybeRewriteToDrop(OldReg, NewReg, MO, MFI, MRI); in runOnMachineFunction() 159 if (MRI.getRegClass(NewReg) != MRI.getRegClass(OldReg)) in runOnMachineFunction() 162 Changed |= MaybeRewriteToDrop(OldReg, NewReg, MO, MFI, MRI); in runOnMachineFunction() 171 MI, MBB, MF, MFI, MRI, TII, WebAssembly::FALLTHROUGH_RETURN_I32, in runOnMachineFunction() 176 MI, MBB, MF, MFI, MRI, TII, WebAssembly::FALLTHROUGH_RETURN_I64, in runOnMachineFunction() [all …]
|
D | WebAssemblyStoreResults.cpp | 80 const MachineRegisterInfo &MRI, in ReplaceDominatedUses() argument 93 for (auto I = MRI.use_begin(FromReg), E = MRI.use_end(); I != E;) { in ReplaceDominatedUses() 143 const MachineRegisterInfo &MRI, in optimizeStore() argument 148 return ReplaceDominatedUses(MBB, MI, FromReg, ToReg, MRI, MDT, LIS); in optimizeStore() 152 const MachineRegisterInfo &MRI, in optimizeCall() argument 174 if (MRI.getRegClass(FromReg) != MRI.getRegClass(ToReg)) in optimizeCall() 177 return ReplaceDominatedUses(MBB, MI, FromReg, ToReg, MRI, MDT, LIS); in optimizeCall() 186 MachineRegisterInfo &MRI = MF.getRegInfo(); in runOnMachineFunction() local 195 MRI.leaveSSA(); in runOnMachineFunction() 197 assert(MRI.tracksLiveness() && "StoreResults expects liveness tracking"); in runOnMachineFunction() [all …]
|
D | WebAssemblyRegStackify.cpp | 228 const MachineRegisterInfo &MRI, in GetVRegDef() argument 232 if (MachineInstr *Def = MRI.getUniqueVRegDef(Reg)) in GetVRegDef() 247 MachineRegisterInfo &MRI, MachineDominatorTree &MDT, in HasOneUse() argument 250 if (MRI.hasOneUse(Reg)) in HasOneUse() 258 for (auto I : MRI.use_nodbg_operands(Reg)) { in HasOneUse() 278 const MachineRegisterInfo &MRI) { in IsSafeToMove() argument 298 if (!MRI.isPhysRegModified(Reg)) in IsSafeToMove() 354 const MachineRegisterInfo &MRI, in OneUseDominatesOtherUses() argument 363 for (const MachineOperand &Use : MRI.use_operands(Reg)) { in OneUseDominatesOtherUses() 396 assert(MRI.hasOneUse(DefReg)); in OneUseDominatesOtherUses() [all …]
|
D | WebAssemblyFrameLowering.cpp | 87 MachineRegisterInfo &MRI = MF.getRegInfo(); in writeSPToMemory() local 89 MRI.getTargetRegisterInfo()->getPointerRegClass(MF); in writeSPToMemory() 90 unsigned Zero = MRI.createVirtualRegister(PtrRC); in writeSPToMemory() 91 unsigned Drop = MRI.createVirtualRegister(PtrRC); in writeSPToMemory() 133 auto &MRI = MF.getRegInfo(); in emitPrologue() local 139 MRI.getTargetRegisterInfo()->getPointerRegClass(MF); in emitPrologue() 140 unsigned Zero = MRI.createVirtualRegister(PtrRC); in emitPrologue() 141 unsigned SPReg = MRI.createVirtualRegister(PtrRC); in emitPrologue() 159 unsigned OffsetReg = MRI.createVirtualRegister(PtrRC); in emitPrologue() 186 auto &MRI = MF.getRegInfo(); in emitEpilogue() local [all …]
|
D | WebAssemblyPrepareForLiveIntervals.cpp | 75 static bool HasArgumentDef(unsigned Reg, const MachineRegisterInfo &MRI) { in HasArgumentDef() argument 76 for (auto &Def : MRI.def_instructions(Reg)) in HasArgumentDef() 89 MachineRegisterInfo &MRI = MF.getRegInfo(); in runOnMachineFunction() local 97 MRI.leaveSSA(); in runOnMachineFunction() 106 for (unsigned i = 0, e = MRI.getNumVirtRegs(); i < e; ++i) { in runOnMachineFunction() 110 if (MRI.use_nodbg_empty(Reg)) in runOnMachineFunction() 114 if (HasArgumentDef(Reg, MRI)) in runOnMachineFunction()
|
D | WebAssemblyReplacePhysRegs.cpp | 66 MachineRegisterInfo &MRI = MF.getRegInfo(); in runOnMachineFunction() local 73 MRI.leaveSSA(); in runOnMachineFunction() 74 MRI.invalidateLiveness(); in runOnMachineFunction() 85 for (auto I = MRI.reg_begin(PReg), E = MRI.reg_end(); I != E; ) { in runOnMachineFunction() 89 VReg = MRI.createVirtualRegister(RC); in runOnMachineFunction()
|
/external/llvm/lib/CodeGen/ |
D | PeepholeOptimizer.cpp | 126 MachineRegisterInfo *MRI; member in __anonc6b70f480111::PeepholeOptimizer 310 const MachineRegisterInfo &MRI; member in __anonc6b70f480111::ValueTracker 351 const MachineRegisterInfo &MRI, in ValueTracker() argument 355 UseAdvancedTracking(UseAdvancedTracking), MRI(MRI), TII(TII) { in ValueTracker() 357 Def = MRI.getVRegDef(Reg); in ValueTracker() 358 DefIdx = MRI.def_begin(Reg).getOperandNo(); in ValueTracker() 370 const MachineRegisterInfo &MRI, in ValueTracker() argument 374 UseAdvancedTracking(UseAdvancedTracking), MRI(MRI), TII(TII) { in ValueTracker() 422 if (MRI->hasOneNonDBGUse(SrcReg)) in INITIALIZE_PASS_DEPENDENCY() 428 const TargetRegisterClass *DstRC = MRI->getRegClass(DstReg); in INITIALIZE_PASS_DEPENDENCY() [all …]
|
D | DetectDeadLanes.cpp | 111 const MachineRegisterInfo *MRI; member in __anon257b486d0111::DetectDeadLanes 155 static bool isCrossCopy(const MachineRegisterInfo &MRI, in isCrossCopy() argument 161 const TargetRegisterClass *SrcRC = MRI.getRegClass(SrcReg); in isCrossCopy() 167 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo(); in isCrossCopy() 207 UsedLanes &= MRI->getMaxLaneMaskForVReg(MOReg); in addUsedLanesOnOperand() 257 const TargetRegisterClass *RC = MRI->getRegClass(DefReg); in transferUsedLanes() 351 DefinedLanes &= MRI->getMaxLaneMaskForVReg(Def.getReg()); in transferDefinedLanes() 358 if (!MRI->hasOneDef(Reg)) in determineInitialDefinedLanes() 361 const MachineOperand &Def = *MRI->def_begin(Reg); in determineInitialDefinedLanes() 376 const TargetRegisterClass *DefRC = MRI->getRegClass(Reg); in determineInitialDefinedLanes() [all …]
|
D | RegAllocBase.cpp | 58 MRI = &vrm.getRegInfo(); in init() 62 MRI->freezeReservedRegs(vrm.getMachineFunction()); in init() 71 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) { in seedLiveRegs() 73 if (MRI->reg_nodbg_empty(Reg)) in seedLiveRegs() 89 if (MRI->reg_nodbg_empty(VirtReg->reg)) { in allocatePhysRegs() 103 << TRI->getRegClassName(MRI->getRegClass(VirtReg->reg)) in allocatePhysRegs() 114 I = MRI->reg_instr_begin(VirtReg->reg), E = MRI->reg_instr_end(); in allocatePhysRegs() 128 RegClassInfo.getOrder(MRI->getRegClass(VirtReg->reg)).front()); in allocatePhysRegs() 139 if (MRI->reg_nodbg_empty(SplitVirtReg->reg)) { in allocatePhysRegs()
|
D | RegisterPressure.cpp | 27 const MachineRegisterInfo &MRI, unsigned Reg, in increaseSetPressure() argument 33 PSetIterator PSetI = MRI.getPressureSets(Reg); in increaseSetPressure() 41 const MachineRegisterInfo &MRI, unsigned Reg, in decreaseSetPressure() argument 47 PSetIterator PSetI = MRI.getPressureSets(Reg); in decreaseSetPressure() 118 PSetIterator PSetI = MRI->getPressureSets(RegUnit); in increaseRegPressure() 130 decreaseSetPressure(CurrSetPressure, *MRI, RegUnit, PreviousMask, NewMask); in decreaseRegPressure() 182 void LiveRegSet::init(const MachineRegisterInfo &MRI) { in init() argument 183 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo(); in init() 185 unsigned NumVirtRegs = MRI.getNumVirtRegs(); in init() 231 MRI = &MF->getRegInfo(); in init() [all …]
|
D | MachineCSE.cpp | 48 MachineRegisterInfo *MRI; member in __anone126a1830111::MachineCSE 131 bool OnlyOneUse = MRI->hasOneNonDBGUse(Reg); in INITIALIZE_PASS_DEPENDENCY() 132 MachineInstr *DefMI = MRI->getVRegDef(Reg); in INITIALIZE_PASS_DEPENDENCY() 154 const TargetRegisterClass *RC = MRI->getRegClass(Reg); in INITIALIZE_PASS_DEPENDENCY() 155 if (!MRI->constrainRegClass(SrcReg, RC)) in INITIALIZE_PASS_DEPENDENCY() 161 MRI->clearKillFlags(SrcReg); in INITIALIZE_PASS_DEPENDENCY() 230 if (!MRI->isConstantPhysReg(Reg, *MBB->getParent())) in hasLivePhysRegDefUses() 281 if (MRI->isAllocatable(PhysDefs[i]) || MRI->isReserved(PhysDefs[i])) in PhysRegDefsReach() 377 for (MachineInstr &MI : MRI->use_nodbg_instructions(CSReg)) { in isProfitableToCSE() 380 for (MachineInstr &MI : MRI->use_nodbg_instructions(Reg)) { in isProfitableToCSE() [all …]
|
/external/llvm/lib/Target/Hexagon/ |
D | HexagonBitSimplify.cpp | 166 MachineRegisterInfo &MRI); 168 unsigned &Begin, unsigned &Width, MachineRegisterInfo &MRI); 170 unsigned NewSR, MachineRegisterInfo &MRI); 172 unsigned NewR, unsigned NewSR, MachineRegisterInfo &MRI); 182 const BitTracker::RegisterRef &RR, MachineRegisterInfo &MRI); 184 const BitTracker::RegisterRef &RS, MachineRegisterInfo &MRI); 312 MachineRegisterInfo &MRI) { in replaceReg() argument 316 auto Begin = MRI.use_begin(OldR), End = MRI.use_end(); in replaceReg() 327 unsigned NewSR, MachineRegisterInfo &MRI) { in replaceRegWithSub() argument 331 auto Begin = MRI.use_begin(OldR), End = MRI.use_end(); in replaceRegWithSub() [all …]
|
/external/swiftshader/third_party/LLVM/lib/CodeGen/ |
D | PeepholeOptimizer.cpp | 77 MachineRegisterInfo *MRI; member in __anon8eacc2f40111::PeepholeOptimizer 142 MachineRegisterInfo::use_nodbg_iterator UI = MRI->use_nodbg_begin(SrcReg); in OptimizeExtInstr() 143 if (++UI == MRI->use_nodbg_end()) in OptimizeExtInstr() 150 UI = MRI->use_nodbg_begin(DstReg); in OptimizeExtInstr() 151 for (MachineRegisterInfo::use_nodbg_iterator UE = MRI->use_nodbg_end(); in OptimizeExtInstr() 162 UI = MRI->use_nodbg_begin(SrcReg); in OptimizeExtInstr() 163 for (MachineRegisterInfo::use_nodbg_iterator UE = MRI->use_nodbg_end(); in OptimizeExtInstr() 229 UI = MRI->use_nodbg_begin(DstReg); in OptimizeExtInstr() 231 UE = MRI->use_nodbg_end(); UI != UE; ++UI) in OptimizeExtInstr() 235 const TargetRegisterClass *RC = MRI->getRegClass(SrcReg); in OptimizeExtInstr() [all …]
|
D | TwoAddressInstructionPass.cpp | 64 MachineRegisterInfo *MRI; member in __anondebe4d580111::TwoAddressInstructionPass 215 UI = MRI->use_nodbg_begin(SavedReg), in Sink3AddrInstruction() 216 UE = MRI->use_nodbg_end(); UI != UE; ++UI) { in Sink3AddrInstruction() 311 for (MachineRegisterInfo::use_nodbg_iterator UI = MRI->use_nodbg_begin(Reg), in isProfitableToReMat() 312 UE = MRI->use_nodbg_end(); UI != UE; ++UI) { in isProfitableToReMat() 346 for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(Reg), in NoUseAfterLastDef() 347 E = MRI->reg_end(); I != E; ++I) { in NoUseAfterLastDef() 369 for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(Reg), in FindLastUseInMBB() 370 E = MRI->reg_end(); I != E; ++I) { in FindLastUseInMBB() 427 const MachineRegisterInfo *MRI, in isKilled() argument [all …]
|
D | MachineSink.cpp | 48 MachineRegisterInfo *MRI; // Machine register information member in __anonddc2ccc90111::MachineSinking 118 !MRI->hasOneNonDBGUse(SrcReg)) in PerformTrivialForwardCoalescing() 121 const TargetRegisterClass *SRC = MRI->getRegClass(SrcReg); in PerformTrivialForwardCoalescing() 122 const TargetRegisterClass *DRC = MRI->getRegClass(DstReg); in PerformTrivialForwardCoalescing() 126 MachineInstr *DefMI = MRI->getVRegDef(SrcReg); in PerformTrivialForwardCoalescing() 131 MRI->replaceRegWith(DstReg, SrcReg); in PerformTrivialForwardCoalescing() 150 if (MRI->use_nodbg_empty(Reg)) in AllUsesDominatedByBlock() 175 I = MRI->use_nodbg_begin(Reg), E = MRI->use_nodbg_end(); in AllUsesDominatedByBlock() 189 I = MRI->use_nodbg_begin(Reg), E = MRI->use_nodbg_end(); in AllUsesDominatedByBlock() 217 MRI = &MF.getRegInfo(); in runOnMachineFunction() [all …]
|
/external/llvm/lib/Target/ARM/ |
D | A15SDOptimizer.cpp | 62 MachineRegisterInfo *MRI; member 142 return MRI->getRegClass(Reg)->hasSuperClassEq(TRC); in usesRegClass() 160 MachineInstr *MI = MRI->getVRegDef(SReg); in getPrefSPRLane() 227 II = MRI->use_instr_begin(Reg), EE = MRI->use_instr_end(); in eraseInstrWithNoUses() 259 MachineInstr *DPRMI = MRI->getVRegDef(MI->getOperand(1).getReg()); in optimizeSDPattern() 260 MachineInstr *SPRMI = MRI->getVRegDef(MI->getOperand(2).getReg()); in optimizeSDPattern() 279 MRI->getRegClass(MI->getOperand(1).getReg()); in optimizeSDPattern() 280 if (TRC->hasSuperClassEq(MRI->getRegClass(FullReg))) { in optimizeSDPattern() 311 MachineInstr *Def = MRI->getVRegDef(OpReg); in optimizeSDPattern() 354 MachineInstr *Def = MRI->getVRegDef(MI->getOperand(1).getReg()); in elideCopies() [all …]
|
D | MLxExpansionPass.cpp | 53 MachineRegisterInfo *MRI; member 95 MachineInstr *DefMI = MRI->getVRegDef(Reg); in getAccDefMI() 102 DefMI = MRI->getVRegDef(Reg); in getAccDefMI() 108 DefMI = MRI->getVRegDef(Reg); in getAccDefMI() 120 !MRI->hasOneNonDBGUse(Reg)) in getDefReg() 124 MachineInstr *UseMI = &*MRI->use_instr_nodbg_begin(Reg); in getDefReg() 131 !MRI->hasOneNonDBGUse(Reg)) in getDefReg() 133 UseMI = &*MRI->use_instr_nodbg_begin(Reg); in getDefReg() 149 MachineInstr *DefMI = MRI->getVRegDef(Reg); in hasLoopHazard() 160 DefMI = MRI->getVRegDef(SrcReg); in hasLoopHazard() [all …]
|
/external/llvm/lib/Target/NVPTX/ |
D | NVPTXPeephole.cpp | 83 const auto &MRI = MF.getRegInfo(); in isCVTAToLocalCombinationCandidate() local 86 GenericAddrDef = MRI.getUniqueVRegDef(Op.getReg()); in isCVTAToLocalCombinationCandidate() 108 const auto &MRI = MF.getRegInfo(); in CombineCVTAToLocal() local 110 auto &Prev = *MRI.getUniqueVRegDef(Root.getOperand(1).getReg()); in CombineCVTAToLocal() 121 if (MRI.hasOneNonDBGUse(Prev.getOperand(0).getReg())) { in CombineCVTAToLocal() 147 const auto &MRI = MF.getRegInfo(); in runOnMachineFunction() local 148 if (MRI.use_empty(NVPTX::VRFrame)) { in runOnMachineFunction() 149 if (auto MI = MRI.getUniqueVRegDef(NVPTX::VRFrame)) { in runOnMachineFunction()
|
/external/llvm/lib/Target/Mips/MCTargetDesc/ |
D | MipsMCTargetDesc.h | 39 const MCRegisterInfo &MRI, 42 const MCRegisterInfo &MRI, 46 const MCRegisterInfo &MRI, 49 const MCRegisterInfo &MRI, 52 const MCRegisterInfo &MRI, 55 const MCRegisterInfo &MRI,
|