Lines Matching refs:MRI
334 const MachineRegisterInfo &MRI = in shouldClusterMemOps() local
336 const TargetRegisterClass *DstRC = MRI.getRegClass(FirstDst->getReg()); in shouldClusterMemOps()
594 MachineRegisterInfo &MRI = MF->getRegInfo(); in storeRegToStackSlot() local
595 MRI.constrainRegClass(SrcReg, &AMDGPU::SReg_32_XM0RegClass); in storeRegToStackSlot()
694 MachineRegisterInfo &MRI = MF->getRegInfo(); in loadRegFromStackSlot() local
695 MRI.constrainRegClass(DestReg, &AMDGPU::SReg_32_XM0RegClass); in loadRegFromStackSlot()
977 const MachineRegisterInfo &MRI = MI.getParent()->getParent()->getRegInfo(); in commuteInstructionImpl() local
978 if (!isLegalRegOperand(MRI, InstrDesc.OpInfo[Src1Idx], Src0)) in commuteInstructionImpl()
1217 unsigned Reg, MachineRegisterInfo *MRI) const { in FoldImmediate()
1218 if (!MRI->hasOneNonDBGUse(Reg)) in FoldImmediate()
1246 if (!Src1->isReg() || RI.isSGPRClass(MRI->getRegClass(Src1->getReg()))) in FoldImmediate()
1249 if (!Src2->isReg() || RI.isSGPRClass(MRI->getRegClass(Src2->getReg()))) in FoldImmediate()
1281 bool DeleteDef = MRI->hasOneNonDBGUse(Reg); in FoldImmediate()
1293 (Src0->isReg() && RI.isSGPRClass(MRI->getRegClass(Src0->getReg())))) in FoldImmediate()
1296 if (!Src1->isReg() || RI.isSGPRClass(MRI->getRegClass(Src1->getReg()))) in FoldImmediate()
1322 bool DeleteDef = MRI->hasOneNonDBGUse(Reg); in FoldImmediate()
1577 bool SIInstrInfo::usesConstantBus(const MachineRegisterInfo &MRI, in usesConstantBus() argument
1588 return RI.isSGPRClass(MRI.getRegClass(MO.getReg())); in usesConstantBus()
1651 const MachineRegisterInfo &MRI = MI.getParent()->getParent()->getRegInfo(); in verifyInstruction() local
1741 if (usesConstantBus(MRI, MO, getOpSize(Opcode, OpIdx))) { in verifyInstruction()
1849 const MachineRegisterInfo &MRI = MI.getParent()->getParent()->getRegInfo(); in getOpRegClass() local
1856 return MRI.getRegClass(Reg); in getOpRegClass()
1880 MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo(); in legalizeOpWithMove() local
1895 unsigned Reg = MRI.createVirtualRegister(VRC); in legalizeOpWithMove()
1902 MachineRegisterInfo &MRI, in buildExtractSubReg() argument
1910 unsigned SubReg = MRI.createVirtualRegister(SubRC); in buildExtractSubReg()
1922 unsigned NewSuperReg = MRI.createVirtualRegister(SuperRC); in buildExtractSubReg()
1935 MachineRegisterInfo &MRI, in buildExtractSubRegOrImm() argument
1950 unsigned SubReg = buildExtractSubReg(MII, MRI, Op, SuperRC, in buildExtractSubRegOrImm()
1963 bool SIInstrInfo::isLegalRegOperand(const MachineRegisterInfo &MRI, in isLegalRegOperand() argument
1972 MRI.getRegClass(Reg) : in isLegalRegOperand()
1976 static_cast<const SIRegisterInfo*>(MRI.getTargetRegisterInfo()); in isLegalRegOperand()
1991 bool SIInstrInfo::isLegalVSrcOperand(const MachineRegisterInfo &MRI, in isLegalVSrcOperand() argument
1995 return isLegalRegOperand(MRI, OpInfo, MO); in isLegalVSrcOperand()
2004 const MachineRegisterInfo &MRI = MI.getParent()->getParent()->getRegInfo(); in isOperandLegal() local
2012 if (isVALU(MI) && usesConstantBus(MRI, *MO, DefinedRC->getSize())) { in isOperandLegal()
2024 usesConstantBus(MRI, Op, getOpSize(MI, i))) { in isOperandLegal()
2035 return isLegalRegOperand(MRI, OpInfo, *MO); in isOperandLegal()
2049 void SIInstrInfo::legalizeOperandsVOP2(MachineRegisterInfo &MRI, in legalizeOperandsVOP2() argument
2068 if (Src0.isReg() && RI.isSGPRReg(MRI, Src0.getReg())) in legalizeOperandsVOP2()
2074 if (isLegalRegOperand(MRI, InstrDesc.OpInfo[Src1Idx], Src1)) in legalizeOperandsVOP2()
2095 !isLegalRegOperand(MRI, InstrDesc.OpInfo[Src1Idx], Src0)) { in legalizeOperandsVOP2()
2128 void SIInstrInfo::legalizeOperandsVOP3(MachineRegisterInfo &MRI, in legalizeOperandsVOP3() argument
2151 if (!RI.isSGPRClass(MRI.getRegClass(MO.getReg()))) in legalizeOperandsVOP3()
2167 MachineRegisterInfo &MRI) const { in readlaneVGPRToSGPR()
2168 const TargetRegisterClass *VRC = MRI.getRegClass(SrcReg); in readlaneVGPRToSGPR()
2170 unsigned DstReg = MRI.createVirtualRegister(SRC); in readlaneVGPRToSGPR()
2175 unsigned SGPR = MRI.createVirtualRegister(&AMDGPU::SGPR_32RegClass); in readlaneVGPRToSGPR()
2192 void SIInstrInfo::legalizeOperandsSMRD(MachineRegisterInfo &MRI, in legalizeOperandsSMRD() argument
2200 if (SBase && !RI.isSGPRClass(MRI.getRegClass(SBase->getReg()))) { in legalizeOperandsSMRD()
2201 unsigned SGPR = readlaneVGPRToSGPR(SBase->getReg(), MI, MRI); in legalizeOperandsSMRD()
2207 MachineRegisterInfo &MRI = MI.getParent()->getParent()->getRegInfo(); in legalizeOperands() local
2211 legalizeOperandsVOP2(MRI, MI); in legalizeOperands()
2217 legalizeOperandsVOP3(MRI, MI); in legalizeOperands()
2223 legalizeOperandsSMRD(MRI, MI); in legalizeOperands()
2237 MRI.getRegClass(MI.getOperand(i).getReg()); in legalizeOperands()
2263 unsigned DstReg = MRI.createVirtualRegister(RC); in legalizeOperands()
2290 const TargetRegisterClass *OpRC = MRI.getRegClass(Op.getReg()); in legalizeOperands()
2295 unsigned DstReg = MRI.createVirtualRegister(VRC); in legalizeOperands()
2313 const TargetRegisterClass *DstRC = MRI.getRegClass(Dst); in legalizeOperands()
2314 const TargetRegisterClass *Src0RC = MRI.getRegClass(Src0); in legalizeOperands()
2317 unsigned NewSrc0 = MRI.createVirtualRegister(DstRC); in legalizeOperands()
2328 if (SRsrc && !RI.isSGPRClass(MRI.getRegClass(SRsrc->getReg()))) { in legalizeOperands()
2329 unsigned SGPR = readlaneVGPRToSGPR(SRsrc->getReg(), MI, MRI); in legalizeOperands()
2334 if (SSamp && !RI.isSGPRClass(MRI.getRegClass(SSamp->getReg()))) { in legalizeOperands()
2335 unsigned SGPR = readlaneVGPRToSGPR(SSamp->getReg(), MI, MRI); in legalizeOperands()
2350 if (RI.getCommonSubClass(MRI.getRegClass(SRsrc->getReg()), in legalizeOperands()
2360 unsigned SRsrcPtr = buildExtractSubReg(MI, MRI, *SRsrc, in legalizeOperands()
2364 unsigned Zero64 = MRI.createVirtualRegister(&AMDGPU::SReg_64RegClass); in legalizeOperands()
2365 unsigned SRsrcFormatLo = MRI.createVirtualRegister(&AMDGPU::SGPR_32RegClass); in legalizeOperands()
2366 unsigned SRsrcFormatHi = MRI.createVirtualRegister(&AMDGPU::SGPR_32RegClass); in legalizeOperands()
2367 unsigned NewSRsrc = MRI.createVirtualRegister(&AMDGPU::SReg_128RegClass); in legalizeOperands()
2392 unsigned NewVAddr = MRI.createVirtualRegister(&AMDGPU::VReg_64RegClass); in legalizeOperands()
2396 unsigned NewVAddrLo = MRI.createVirtualRegister(&AMDGPU::VGPR_32RegClass); in legalizeOperands()
2397 unsigned NewVAddrHi = MRI.createVirtualRegister(&AMDGPU::VGPR_32RegClass); in legalizeOperands()
2503 MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo(); in moveToVALU() local
2660 NewDstReg = MRI.createVirtualRegister(NewDstRC); in moveToVALU()
2661 MRI.replaceRegWith(DstReg, NewDstReg); in moveToVALU()
2668 addUsersToMoveToVALUWorklist(NewDstReg, MRI, Worklist); in moveToVALU()
2675 MachineRegisterInfo &MRI = MBB.getParent()->getRegInfo(); in lowerScalarAbs() local
2681 unsigned TmpReg = MRI.createVirtualRegister(&AMDGPU::VGPR_32RegClass); in lowerScalarAbs()
2682 unsigned ResultReg = MRI.createVirtualRegister(&AMDGPU::VGPR_32RegClass); in lowerScalarAbs()
2692 MRI.replaceRegWith(Dest.getReg(), ResultReg); in lowerScalarAbs()
2693 addUsersToMoveToVALUWorklist(ResultReg, MRI, Worklist); in lowerScalarAbs()
2700 MachineRegisterInfo &MRI = MBB.getParent()->getRegInfo(); in splitScalar64BitUnaryOp() local
2710 MRI.getRegClass(Src0.getReg()) : in splitScalar64BitUnaryOp()
2715 MachineOperand SrcReg0Sub0 = buildExtractSubRegOrImm(MII, MRI, Src0, Src0RC, in splitScalar64BitUnaryOp()
2718 const TargetRegisterClass *DestRC = MRI.getRegClass(Dest.getReg()); in splitScalar64BitUnaryOp()
2722 unsigned DestSub0 = MRI.createVirtualRegister(NewDestSubRC); in splitScalar64BitUnaryOp()
2726 MachineOperand SrcReg0Sub1 = buildExtractSubRegOrImm(MII, MRI, Src0, Src0RC, in splitScalar64BitUnaryOp()
2729 unsigned DestSub1 = MRI.createVirtualRegister(NewDestSubRC); in splitScalar64BitUnaryOp()
2733 unsigned FullDestReg = MRI.createVirtualRegister(NewDestRC); in splitScalar64BitUnaryOp()
2740 MRI.replaceRegWith(Dest.getReg(), FullDestReg); in splitScalar64BitUnaryOp()
2746 addUsersToMoveToVALUWorklist(FullDestReg, MRI, Worklist); in splitScalar64BitUnaryOp()
2753 MachineRegisterInfo &MRI = MBB.getParent()->getRegInfo(); in splitScalar64BitBinaryOp() local
2764 MRI.getRegClass(Src0.getReg()) : in splitScalar64BitBinaryOp()
2769 MRI.getRegClass(Src1.getReg()) : in splitScalar64BitBinaryOp()
2774 MachineOperand SrcReg0Sub0 = buildExtractSubRegOrImm(MII, MRI, Src0, Src0RC, in splitScalar64BitBinaryOp()
2776 MachineOperand SrcReg1Sub0 = buildExtractSubRegOrImm(MII, MRI, Src1, Src1RC, in splitScalar64BitBinaryOp()
2779 const TargetRegisterClass *DestRC = MRI.getRegClass(Dest.getReg()); in splitScalar64BitBinaryOp()
2783 unsigned DestSub0 = MRI.createVirtualRegister(NewDestSubRC); in splitScalar64BitBinaryOp()
2788 MachineOperand SrcReg0Sub1 = buildExtractSubRegOrImm(MII, MRI, Src0, Src0RC, in splitScalar64BitBinaryOp()
2790 MachineOperand SrcReg1Sub1 = buildExtractSubRegOrImm(MII, MRI, Src1, Src1RC, in splitScalar64BitBinaryOp()
2793 unsigned DestSub1 = MRI.createVirtualRegister(NewDestSubRC); in splitScalar64BitBinaryOp()
2798 unsigned FullDestReg = MRI.createVirtualRegister(NewDestRC); in splitScalar64BitBinaryOp()
2805 MRI.replaceRegWith(Dest.getReg(), FullDestReg); in splitScalar64BitBinaryOp()
2813 addUsersToMoveToVALUWorklist(FullDestReg, MRI, Worklist); in splitScalar64BitBinaryOp()
2819 MachineRegisterInfo &MRI = MBB.getParent()->getRegInfo(); in splitScalar64BitBCNT() local
2829 MRI.getRegClass(Src.getReg()) : in splitScalar64BitBCNT()
2832 unsigned MidReg = MRI.createVirtualRegister(&AMDGPU::VGPR_32RegClass); in splitScalar64BitBCNT()
2833 unsigned ResultReg = MRI.createVirtualRegister(&AMDGPU::VGPR_32RegClass); in splitScalar64BitBCNT()
2837 MachineOperand SrcRegSub0 = buildExtractSubRegOrImm(MII, MRI, Src, SrcRC, in splitScalar64BitBCNT()
2839 MachineOperand SrcRegSub1 = buildExtractSubRegOrImm(MII, MRI, Src, SrcRC, in splitScalar64BitBCNT()
2850 MRI.replaceRegWith(Dest.getReg(), ResultReg); in splitScalar64BitBCNT()
2854 addUsersToMoveToVALUWorklist(ResultReg, MRI, Worklist); in splitScalar64BitBCNT()
2860 MachineRegisterInfo &MRI = MBB.getParent()->getRegInfo(); in splitScalar64BitBFE() local
2876 unsigned MidRegLo = MRI.createVirtualRegister(&AMDGPU::VGPR_32RegClass); in splitScalar64BitBFE()
2877 unsigned MidRegHi = MRI.createVirtualRegister(&AMDGPU::VGPR_32RegClass); in splitScalar64BitBFE()
2878 unsigned ResultReg = MRI.createVirtualRegister(&AMDGPU::VReg_64RegClass); in splitScalar64BitBFE()
2895 MRI.replaceRegWith(Dest.getReg(), ResultReg); in splitScalar64BitBFE()
2896 addUsersToMoveToVALUWorklist(ResultReg, MRI, Worklist); in splitScalar64BitBFE()
2901 unsigned TmpReg = MRI.createVirtualRegister(&AMDGPU::VGPR_32RegClass); in splitScalar64BitBFE()
2902 unsigned ResultReg = MRI.createVirtualRegister(&AMDGPU::VReg_64RegClass); in splitScalar64BitBFE()
2914 MRI.replaceRegWith(Dest.getReg(), ResultReg); in splitScalar64BitBFE()
2915 addUsersToMoveToVALUWorklist(ResultReg, MRI, Worklist); in splitScalar64BitBFE()
2920 MachineRegisterInfo &MRI, in addUsersToMoveToVALUWorklist() argument
2922 for (MachineRegisterInfo::use_iterator I = MRI.use_begin(DstReg), in addUsersToMoveToVALUWorklist()
2923 E = MRI.use_end(); I != E; ++I) { in addUsersToMoveToVALUWorklist()
2990 const MachineRegisterInfo &MRI = MI.getParent()->getParent()->getRegInfo(); in findUsedSGPR() local
3010 const TargetRegisterClass *RegRC = MRI.getRegClass(Reg); in findUsedSGPR()