Home
last modified time | relevance | path

Searched refs:SecondLdSt (Results 1 – 10 of 10) sorted by relevance

/external/llvm/lib/Target/AArch64/
DAArch64InstrInfo.h108 bool shouldClusterMemOps(MachineInstr &FirstLdSt, MachineInstr &SecondLdSt,
DAArch64InstrInfo.cpp1762 MachineInstr &SecondLdSt, in shouldClusterMemOps() argument
1770 unsigned SecondOpc = SecondLdSt.getOpcode(); in shouldClusterMemOps()
1777 !isCandidateToMergeOrPair(SecondLdSt)) in shouldClusterMemOps()
1785 int64_t Offset2 = SecondLdSt.getOperand(2).getImm(); in shouldClusterMemOps()
/external/llvm/lib/Target/AMDGPU/
DSIInstrInfo.cpp300 MachineInstr &SecondLdSt, in shouldClusterMemOps() argument
305 if (isDS(FirstLdSt) && isDS(SecondLdSt)) { in shouldClusterMemOps()
307 SecondDst = getNamedOperand(SecondLdSt, AMDGPU::OpName::vdst); in shouldClusterMemOps()
310 if (isSMRD(FirstLdSt) && isSMRD(SecondLdSt)) { in shouldClusterMemOps()
312 SecondDst = getNamedOperand(SecondLdSt, AMDGPU::OpName::sdst); in shouldClusterMemOps()
315 if ((isMUBUF(FirstLdSt) && isMUBUF(SecondLdSt)) || in shouldClusterMemOps()
316 (isMTBUF(FirstLdSt) && isMTBUF(SecondLdSt))) { in shouldClusterMemOps()
318 SecondDst = getNamedOperand(SecondLdSt, AMDGPU::OpName::vdata); in shouldClusterMemOps()
DSIInstrInfo.h117 bool shouldClusterMemOps(MachineInstr &FirstLdSt, MachineInstr &SecondLdSt,
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AMDGPU/
DSIInstrInfo.cpp440 const MachineInstr &SecondLdSt = *BaseOp2.getParent(); in shouldClusterMemOps() local
442 if (!memOpsHaveSameBasePtr(FirstLdSt, BaseOp1, SecondLdSt, BaseOp2)) in shouldClusterMemOps()
448 if ((isMUBUF(FirstLdSt) && isMUBUF(SecondLdSt)) || in shouldClusterMemOps()
449 (isMTBUF(FirstLdSt) && isMTBUF(SecondLdSt)) || in shouldClusterMemOps()
450 (isFLAT(FirstLdSt) && isFLAT(SecondLdSt))) { in shouldClusterMemOps()
458 SecondDst = getNamedOperand(SecondLdSt, AMDGPU::OpName::vdata); in shouldClusterMemOps()
460 SecondDst = getNamedOperand(SecondLdSt, AMDGPU::OpName::vdst); in shouldClusterMemOps()
461 } else if (isSMRD(FirstLdSt) && isSMRD(SecondLdSt)) { in shouldClusterMemOps()
463 SecondDst = getNamedOperand(SecondLdSt, AMDGPU::OpName::sdst); in shouldClusterMemOps()
464 } else if (isDS(FirstLdSt) && isDS(SecondLdSt)) { in shouldClusterMemOps()
[all …]
/external/llvm/include/llvm/Target/
DTargetInstrInfo.h1046 MachineInstr &SecondLdSt, in shouldClusterMemOps() argument
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/
DAArch64InstrInfo.cpp2370 const MachineInstr &SecondLdSt = *BaseOp2.getParent(); in shouldClusterMemOps() local
2385 if (!isPairableLdStInst(FirstLdSt) || !isPairableLdStInst(SecondLdSt)) in shouldClusterMemOps()
2390 unsigned SecondOpc = SecondLdSt.getOpcode(); in shouldClusterMemOps()
2397 !isCandidateToMergeOrPair(SecondLdSt)) in shouldClusterMemOps()
2405 int64_t Offset2 = SecondLdSt.getOperand(2).getImm(); in shouldClusterMemOps()
/external/llvm-project/llvm/lib/Target/PowerPC/
DPPCInstrInfo.cpp2370 const MachineInstr &SecondLdSt = *BaseOp2.getParent(); in shouldClusterMemOps() local
2372 unsigned SecondOpc = SecondLdSt.getOpcode(); in shouldClusterMemOps()
2381 !isLdStSafeToCluster(SecondLdSt, TRI)) in shouldClusterMemOps()
2388 !getMemOperandWithOffsetWidth(SecondLdSt, Base2, Offset2, Width2, TRI) || in shouldClusterMemOps()
/external/llvm-project/llvm/lib/Target/AArch64/
DAArch64InstrInfo.cpp2635 const MachineInstr &SecondLdSt = *BaseOp2.getParent(); in shouldClusterMemOps() local
2650 if (!isPairableLdStInst(FirstLdSt) || !isPairableLdStInst(SecondLdSt)) in shouldClusterMemOps()
2655 unsigned SecondOpc = SecondLdSt.getOpcode(); in shouldClusterMemOps()
2662 !isCandidateToMergeOrPair(SecondLdSt)) in shouldClusterMemOps()
2670 int64_t Offset2 = SecondLdSt.getOperand(2).getImm(); in shouldClusterMemOps()
/external/llvm-project/llvm/lib/Target/AMDGPU/
DSIInstrInfo.cpp480 const MachineInstr &SecondLdSt = *BaseOps2.front()->getParent(); in shouldClusterMemOps() local
481 if (!memOpsHaveSameBasePtr(FirstLdSt, BaseOps1, SecondLdSt, BaseOps2)) in shouldClusterMemOps()