/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Hexagon/MCTargetDesc/ |
D | HexagonMCCompound.cpp | 334 static bool isOrderedCompoundPair(MCInst const &MIa, bool IsExtendedA, in isOrderedCompoundPair() argument 336 unsigned MIaG = getCompoundCandidateGroup(MIa, IsExtendedA); in isOrderedCompoundPair() 340 unsigned Opca = MIa.getOpcode(); in isOrderedCompoundPair() 345 (MIa.getOperand(0).getReg() == MIb.getOperand(0).getReg())); in isOrderedCompoundPair()
|
D | HexagonMCDuplexInfo.cpp | 579 MCInst const &MIa, bool ExtendedA, in isOrderedDuplexPair() argument 592 unsigned MIaG = HexagonMCInstrInfo::getDuplexCandidateGroup(MIa), in isOrderedDuplexPair() 601 MCInst SubInst0 = HexagonMCInstrInfo::deriveSubInst(MIa); in isOrderedDuplexPair() 623 if (subInstWouldBeExtended(MIa)) in isOrderedDuplexPair() 657 bool HexagonMCInstrInfo::isDuplexPair(MCInst const &MIa, MCInst const &MIb) { in isDuplexPair() argument 658 unsigned MIaG = getDuplexCandidateGroup(MIa), in isDuplexPair()
|
D | HexagonMCInstrInfo.h | 224 bool isDuplexPair(MCInst const &MIa, MCInst const &MIb); 258 bool isOrderedDuplexPair(MCInstrInfo const &MCII, MCInst const &MIa,
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/RISCV/ |
D | RISCVInstrInfo.cpp | 586 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() argument 587 assert(MIa.mayLoadOrStore() && "MIa must be a load or store."); in areMemAccessesTriviallyDisjoint() 590 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 591 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 603 if (getMemOperandWithOffsetWidth(MIa, BaseOpA, OffsetA, WidthA, TRI) && in areMemAccessesTriviallyDisjoint()
|
D | RISCVInstrInfo.h | 94 bool areMemAccessesTriviallyDisjoint(const MachineInstr &MIa,
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Lanai/ |
D | LanaiInstrInfo.cpp | 89 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() argument 90 assert(MIa.mayLoadOrStore() && "MIa must be a load or store."); in areMemAccessesTriviallyDisjoint() 93 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 94 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 106 if (getMemOperandWithOffsetWidth(MIa, BaseOpA, OffsetA, WidthA, TRI) && in areMemAccessesTriviallyDisjoint()
|
D | LanaiInstrInfo.h | 38 bool areMemAccessesTriviallyDisjoint(const MachineInstr &MIa,
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Hexagon/ |
D | HexagonInstrInfo.h | 290 areMemAccessesTriviallyDisjoint(const MachineInstr &MIa, 356 bool isDuplexPair(const MachineInstr &MIa, const MachineInstr &MIb) const;
|
D | HexagonInstrInfo.cpp | 1871 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() argument 1872 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 1873 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 1878 if (MIa.mayLoad() && !isMemOp(MIa) && MIb.mayLoad() && !isMemOp(MIb)) in areMemAccessesTriviallyDisjoint() 1883 if (!getBaseAndOffsetPosition(MIa, BasePosA, OffsetPosA)) in areMemAccessesTriviallyDisjoint() 1885 const MachineOperand &BaseA = MIa.getOperand(BasePosA); in areMemAccessesTriviallyDisjoint() 1901 unsigned SizeA = getMemAccessSize(MIa); in areMemAccessesTriviallyDisjoint() 1905 const MachineOperand &OffA = MIa.getOperand(OffsetPosA); in areMemAccessesTriviallyDisjoint() 1907 if (!MIa.getOperand(OffsetPosA).isImm() || in areMemAccessesTriviallyDisjoint() 1910 int OffsetA = isPostIncrement(MIa) ? 0 : OffA.getImm(); in areMemAccessesTriviallyDisjoint() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64LoadStoreOptimizer.cpp | 1147 static bool mayAlias(MachineInstr &MIa, MachineInstr &MIb, in mayAlias() argument 1150 if (!MIa.mayStore() && !MIb.mayStore()) in mayAlias() 1154 if (!MIa.mayLoadOrStore() && !MIb.mayLoadOrStore()) in mayAlias() 1157 return MIa.mayAlias(AA, MIb, /*UseTBAA*/false); in mayAlias() 1160 static bool mayAlias(MachineInstr &MIa, in mayAlias() argument 1164 if (mayAlias(MIa, *MIb, AA)) in mayAlias()
|
D | AArch64InstrInfo.h | 58 areMemAccessesTriviallyDisjoint(const MachineInstr &MIa,
|
D | AArch64InstrInfo.cpp | 933 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() argument 939 assert(MIa.mayLoadOrStore() && "MIa must be a load or store."); in areMemAccessesTriviallyDisjoint() 942 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 943 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 951 if (getMemOperandWithOffsetWidth(MIa, BaseOpA, OffsetA, WidthA, TRI) && in areMemAccessesTriviallyDisjoint()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/SystemZ/ |
D | SystemZInstrInfo.h | 335 areMemAccessesTriviallyDisjoint(const MachineInstr &MIa,
|
D | SystemZInstrInfo.cpp | 1782 areMemAccessesTriviallyDisjoint(const MachineInstr &MIa, in areMemAccessesTriviallyDisjoint() argument 1785 if (!MIa.hasOneMemOperand() || !MIb.hasOneMemOperand()) in areMemAccessesTriviallyDisjoint() 1792 MachineMemOperand *MMOa = *MIa.memoperands_begin(); in areMemAccessesTriviallyDisjoint()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AMDGPU/ |
D | SIInstrInfo.cpp | 2528 bool SIInstrInfo::checkInstOffsetsDoNotOverlap(const MachineInstr &MIa, in checkInstOffsetsDoNotOverlap() argument 2533 if (getMemOperandWithOffset(MIa, BaseOp0, Offset0, &RI) && in checkInstOffsetsDoNotOverlap() 2538 if (!MIa.hasOneMemOperand() || !MIb.hasOneMemOperand()) { in checkInstOffsetsDoNotOverlap() 2542 unsigned Width0 = (*MIa.memoperands_begin())->getSize(); in checkInstOffsetsDoNotOverlap() 2552 bool SIInstrInfo::areMemAccessesTriviallyDisjoint(const MachineInstr &MIa, in areMemAccessesTriviallyDisjoint() argument 2554 assert(MIa.mayLoadOrStore() && in areMemAccessesTriviallyDisjoint() 2559 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects()) in areMemAccessesTriviallyDisjoint() 2563 if (MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 2571 if (isDS(MIa)) { in areMemAccessesTriviallyDisjoint() 2573 return checkInstOffsetsDoNotOverlap(MIa, MIb); in areMemAccessesTriviallyDisjoint() [all …]
|
D | SIInstrInfo.h | 132 bool checkInstOffsetsDoNotOverlap(const MachineInstr &MIa, 315 areMemAccessesTriviallyDisjoint(const MachineInstr &MIa,
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/CodeGen/ |
D | TargetInstrInfo.h | 1650 areMemAccessesTriviallyDisjoint(const MachineInstr &MIa, in areMemAccessesTriviallyDisjoint() argument 1652 assert(MIa.mayLoadOrStore() && in areMemAccessesTriviallyDisjoint()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/PowerPC/ |
D | PPCInstrInfo.h | 370 areMemAccessesTriviallyDisjoint(const MachineInstr &MIa,
|
D | PPCInstrInfo.cpp | 4281 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() argument 4282 assert(MIa.mayLoadOrStore() && "MIa must be a load or store."); in areMemAccessesTriviallyDisjoint() 4285 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 4286 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 4298 if (getMemOperandWithOffsetWidth(MIa, BaseOpA, OffsetA, WidthA, TRI) && in areMemAccessesTriviallyDisjoint()
|