/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AMDGPU/ |
D | R600ControlFlowFinalizer.cpp | 352 MachineInstr *MIb = BuildMI(MBB, ClauseHead, MBB.findDebugLoc(ClauseHead), in MakeFetchClause() local 356 return ClauseFile(MIb, std::move(ClauseContent)); in MakeFetchClause() 571 MachineInstr *MIb = BuildMI(MBB, MI, MBB.findDebugLoc(MI), in runOnMachineFunction() local 576 Pair.second.insert(MIb); in runOnMachineFunction() 596 MachineInstr *MIb = BuildMI(MBB, MI, MBB.findDebugLoc(MI), in runOnMachineFunction() local 600 IfThenElseStack.push_back(MIb); in runOnMachineFunction() 601 LLVM_DEBUG(dbgs() << CfCount << ":"; MIb->dump();); in runOnMachineFunction() 610 MachineInstr *MIb = BuildMI(MBB, MI, MBB.findDebugLoc(MI), in runOnMachineFunction() local 614 LLVM_DEBUG(dbgs() << CfCount << ":"; MIb->dump();); in runOnMachineFunction() 615 IfThenElseStack.push_back(MIb); in runOnMachineFunction() [all …]
|
D | SIInstrInfo.cpp | 2529 const MachineInstr &MIb) const { in checkInstOffsetsDoNotOverlap() 2534 getMemOperandWithOffset(MIb, BaseOp1, Offset1, &RI)) { in checkInstOffsetsDoNotOverlap() 2538 if (!MIa.hasOneMemOperand() || !MIb.hasOneMemOperand()) { in checkInstOffsetsDoNotOverlap() 2543 unsigned Width1 = (*MIb.memoperands_begin())->getSize(); in checkInstOffsetsDoNotOverlap() 2553 const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() 2556 assert(MIb.mayLoadOrStore() && in areMemAccessesTriviallyDisjoint() 2559 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects()) in areMemAccessesTriviallyDisjoint() 2563 if (MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 2572 if (isDS(MIb)) in areMemAccessesTriviallyDisjoint() 2573 return checkInstOffsetsDoNotOverlap(MIa, MIb); in areMemAccessesTriviallyDisjoint() [all …]
|
D | SIInstrInfo.h | 133 const MachineInstr &MIb) const; 316 const MachineInstr &MIb) const override;
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Hexagon/MCTargetDesc/ |
D | HexagonMCDuplexInfo.cpp | 580 MCInst const &MIb, bool ExtendedB, in isOrderedDuplexPair() argument 588 unsigned Opcode = MIb.getOpcode(); in isOrderedDuplexPair() 593 MIbG = HexagonMCInstrInfo::getDuplexCandidateGroup(MIb); in isOrderedDuplexPair() 602 MCInst SubInst1 = HexagonMCInstrInfo::deriveSubInst(MIb); in isOrderedDuplexPair() 616 if (MIb.getOpcode() == Hexagon::S2_allocframe) in isOrderedDuplexPair() 628 if (subInstWouldBeExtended(MIb) && !ExtendedB) in isOrderedDuplexPair() 634 if ((MIb.getNumOperands() > 1) && MIb.getOperand(1).isReg() && in isOrderedDuplexPair() 635 (MIb.getOperand(1).getReg() == Hexagon::R31)) in isOrderedDuplexPair() 637 if ((MIb.getNumOperands() > 0) && MIb.getOperand(0).isReg() && in isOrderedDuplexPair() 638 (MIb.getOperand(0).getReg() == Hexagon::R31)) in isOrderedDuplexPair() [all …]
|
D | HexagonMCCompound.cpp | 335 MCInst const &MIb, bool IsExtendedB) { in isOrderedCompoundPair() argument 337 unsigned MIbG = getCompoundCandidateGroup(MIb, IsExtendedB); in isOrderedCompoundPair() 345 (MIa.getOperand(0).getReg() == MIb.getOperand(0).getReg())); in isOrderedCompoundPair()
|
D | HexagonMCInstrInfo.h | 224 bool isDuplexPair(MCInst const &MIa, MCInst const &MIb); 259 bool ExtendedA, MCInst const &MIb, bool ExtendedB,
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/RISCV/ |
D | RISCVInstrInfo.cpp | 586 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() 588 assert(MIb.mayLoadOrStore() && "MIb must be a load or store."); in areMemAccessesTriviallyDisjoint() 590 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 591 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 604 getMemOperandWithOffsetWidth(MIb, BaseOpB, OffsetB, WidthB, TRI)) { in areMemAccessesTriviallyDisjoint()
|
D | RISCVInstrInfo.h | 95 const MachineInstr &MIb) const override;
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Lanai/ |
D | LanaiInstrInfo.cpp | 89 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() 91 assert(MIb.mayLoadOrStore() && "MIb must be a load or store."); in areMemAccessesTriviallyDisjoint() 93 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 94 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 107 getMemOperandWithOffsetWidth(MIb, BaseOpB, OffsetB, WidthB, TRI)) { in areMemAccessesTriviallyDisjoint()
|
D | LanaiInstrInfo.h | 39 const MachineInstr &MIb) const override;
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Hexagon/ |
D | HexagonInstrInfo.h | 291 const MachineInstr &MIb) const override; 356 bool isDuplexPair(const MachineInstr &MIa, const MachineInstr &MIb) const;
|
D | HexagonInstrInfo.cpp | 1871 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() 1872 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 1873 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 1878 if (MIa.mayLoad() && !isMemOp(MIa) && MIb.mayLoad() && !isMemOp(MIb)) in areMemAccessesTriviallyDisjoint() 1891 if (!getBaseAndOffsetPosition(MIb, BasePosB, OffsetPosB)) in areMemAccessesTriviallyDisjoint() 1893 const MachineOperand &BaseB = MIb.getOperand(BasePosB); in areMemAccessesTriviallyDisjoint() 1902 unsigned SizeB = getMemAccessSize(MIb); in areMemAccessesTriviallyDisjoint() 1906 const MachineOperand &OffB = MIb.getOperand(OffsetPosB); in areMemAccessesTriviallyDisjoint() 1908 !MIb.getOperand(OffsetPosB).isImm()) in areMemAccessesTriviallyDisjoint() 1911 int OffsetB = isPostIncrement(MIb) ? 0 : OffB.getImm(); in areMemAccessesTriviallyDisjoint() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64LoadStoreOptimizer.cpp | 1147 static bool mayAlias(MachineInstr &MIa, MachineInstr &MIb, in mayAlias() argument 1150 if (!MIa.mayStore() && !MIb.mayStore()) in mayAlias() 1154 if (!MIa.mayLoadOrStore() && !MIb.mayLoadOrStore()) in mayAlias() 1157 return MIa.mayAlias(AA, MIb, /*UseTBAA*/false); in mayAlias() 1163 for (MachineInstr *MIb : MemInsns) in mayAlias() 1164 if (mayAlias(MIa, *MIb, AA)) in mayAlias()
|
D | AArch64InstrInfo.h | 59 const MachineInstr &MIb) const override;
|
D | AArch64InstrInfo.cpp | 933 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() 940 assert(MIb.mayLoadOrStore() && "MIb must be a load or store."); in areMemAccessesTriviallyDisjoint() 942 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 943 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 952 getMemOperandWithOffsetWidth(MIb, BaseOpB, OffsetB, WidthB, TRI)) { in areMemAccessesTriviallyDisjoint()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/SystemZ/ |
D | SystemZInstrInfo.h | 336 const MachineInstr &MIb) const override;
|
D | SystemZInstrInfo.cpp | 1783 const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() 1785 if (!MIa.hasOneMemOperand() || !MIb.hasOneMemOperand()) in areMemAccessesTriviallyDisjoint() 1793 MachineMemOperand *MMOb = *MIb.memoperands_begin(); in areMemAccessesTriviallyDisjoint()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/CodeGen/ |
D | TargetInstrInfo.h | 1651 const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() argument 1654 assert(MIb.mayLoadOrStore() && in areMemAccessesTriviallyDisjoint()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/PowerPC/ |
D | PPCInstrInfo.h | 371 const MachineInstr &MIb) const override;
|
D | PPCInstrInfo.cpp | 4281 const MachineInstr &MIa, const MachineInstr &MIb) const { in areMemAccessesTriviallyDisjoint() 4283 assert(MIb.mayLoadOrStore() && "MIb must be a load or store."); in areMemAccessesTriviallyDisjoint() 4285 if (MIa.hasUnmodeledSideEffects() || MIb.hasUnmodeledSideEffects() || in areMemAccessesTriviallyDisjoint() 4286 MIa.hasOrderedMemoryRef() || MIb.hasOrderedMemoryRef()) in areMemAccessesTriviallyDisjoint() 4299 getMemOperandWithOffsetWidth(MIb, BaseOpB, OffsetB, WidthB, TRI)) { in areMemAccessesTriviallyDisjoint()
|