/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AMDGPU/ |
D | SIOptimizeExecMaskingPreRA.cpp | 215 Register CmpReg = AndCC->getReg(); in optimizeVcndVcmpPair() local 217 if (CmpReg == ExecReg) { in optimizeVcndVcmpPair() 219 CmpReg = AndCC->getReg(); in optimizeVcndVcmpPair() 225 auto *Cmp = TRI->findReachingDef(CmpReg, CmpSubReg, *And, MRI, LIS); in optimizeVcndVcmpPair() 271 if ((Register::isVirtualRegister(CmpReg) && MRI.use_nodbg_empty(CmpReg)) || in optimizeVcndVcmpPair() 272 (CmpReg == CondReg && in optimizeVcndVcmpPair()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64RedundantCopyElimination.cpp | 405 MCPhysReg CmpReg = KnownReg.Reg; in optimizeBlock() local 406 if (any_of(MI->implicit_operands(), [CmpReg](MachineOperand &O) { in optimizeBlock() 408 O.getReg() != CmpReg; in optimizeBlock()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/WebAssembly/ |
D | WebAssemblyISelLowering.cpp | 387 unsigned Tmp0, Tmp1, CmpReg, EqzReg, FalseReg, TrueReg; in LowerFPToInt() local 390 CmpReg = MRI.createVirtualRegister(&WebAssembly::I32RegClass); in LowerFPToInt() 405 BuildMI(BB, DL, TII.get(LT), CmpReg).addReg(Tmp0).addReg(Tmp1); in LowerFPToInt() 416 BuildMI(BB, DL, TII.get(And), AndReg).addReg(CmpReg).addReg(SecondCmpReg); in LowerFPToInt() 417 CmpReg = AndReg; in LowerFPToInt() 420 BuildMI(BB, DL, TII.get(Eqz), EqzReg).addReg(CmpReg); in LowerFPToInt()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/ |
D | X86FastISel.cpp | 2210 unsigned CmpReg = fastEmitInst_rri(CmpOpcode, VK1, CmpLHSReg, CmpLHSIsKill, in X86FastEmitSSESelect() local 2224 CmpReg, true, ImplicitDefReg, true, in X86FastEmitSSESelect() 2244 unsigned CmpReg = fastEmitInst_rri(CmpOpcode, RC, CmpLHSReg, CmpLHSIsKill, in X86FastEmitSSESelect() local 2247 LHSReg, LHSIsKill, CmpReg, true); in X86FastEmitSSESelect() 2266 unsigned CmpReg = fastEmitInst_rri(Opc[0], RC, CmpLHSReg, CmpLHSIsKill, in X86FastEmitSSESelect() local 2268 unsigned AndReg = fastEmitInst_rr(Opc[1], VR128, CmpReg, /*IsKill=*/false, in X86FastEmitSSESelect() 2270 unsigned AndNReg = fastEmitInst_rr(Opc[2], VR128, CmpReg, /*IsKill=*/true, in X86FastEmitSSESelect()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/ARM/ |
D | ARMFastISel.cpp | 1303 unsigned CmpReg = getRegForValue(BI->getCondition()); in SelectBranch() local 1304 if (CmpReg == 0) return false; in SelectBranch() 1314 CmpReg = constrainOperandRegClass(TII.get(TstOpc), CmpReg, 0); in SelectBranch() 1317 .addReg(CmpReg) in SelectBranch()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/PowerPC/ |
D | PPCISelLowering.cpp | 10885 unsigned CmpReg = Incr2Reg; in EmitPartwordAtomicBinary() local 10895 CmpReg = incr; in EmitPartwordAtomicBinary() 10898 .addReg(CmpReg) in EmitPartwordAtomicBinary() 11348 Register CmpReg = RegInfo.createVirtualRegister(&PPC::CRRCRegClass); in EmitInstrWithCustomInserter() local 11350 BuildMI(BB, dl, TII->get(PPC::CMPW), CmpReg) in EmitInstrWithCustomInserter() 11355 .addReg(CmpReg) in EmitInstrWithCustomInserter()
|