/external/llvm/lib/CodeGen/ |
D | ImplicitNullChecks.cpp | 485 ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI, in insertFaultingLoad()
|
D | InlineSpiller.cpp | 728 MachineInstr *LoadMI) { in foldMemoryOperand()
|
D | TargetInstrInfo.cpp | 780 MachineInstr &LoadMI, in foldMemoryOperand()
|
/external/swiftshader/third_party/LLVM/lib/CodeGen/ |
D | VirtRegRewriter.cpp | 947 MachineInstr *LoadMI = prior(InsertLoc); in GetRegForReload() local 1770 MachineInstr *LoadMI = prior(InsertLoc); in InsertEmergencySpills() local 1872 MachineInstr *LoadMI = prior(InsertLoc); in InsertRestores() local 2217 MachineInstr *LoadMI = prior(InsertLoc); in ProcessUses() local
|
D | InlineSpiller.cpp | 1018 MachineInstr *LoadMI) { in foldMemoryOperand()
|
/external/swiftshader/third_party/LLVM/include/llvm/Target/ |
D | TargetInstrInfo.h | 449 MachineInstr* LoadMI) const { in foldMemoryOperandImpl()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/PowerPC/ |
D | PPCMIPeephole.cpp | 319 MachineInstr *LoadMI = MRI->getVRegDef(DefReg); in simplifyCode() local
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/AArch64/ |
D | AArch64LoadStoreOptimizer.cpp | 1047 MachineInstr &LoadMI = *I; in findMatchingStore() local
|
D | AArch64InstrInfo.cpp | 3177 MachineInstr &LoadMI = *--InsertPt; in foldMemoryOperandImpl() local
|
D | AArch64FastISel.cpp | 4457 const auto *LoadMI = MI; in optimizeIntExtLoad() local
|
/external/llvm/lib/Target/X86/ |
D | X86MCInstLower.cpp | 908 MCInst LoadMI; in LowerFAULTING_LOAD_OP() local
|
D | X86InstrInfo.cpp | 6165 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() 6220 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
D | X86FastISel.cpp | 778 MachineInstrBuilder LoadMI = in handleConstantAddresses() local
|
/external/llvm/lib/Target/AArch64/ |
D | AArch64LoadStoreOptimizer.cpp | 1103 MachineInstr &LoadMI = *I; in findMatchingStore() local
|
D | AArch64FastISel.cpp | 4371 const auto *LoadMI = MI; in optimizeIntExtLoad() local
|
/external/llvm/lib/Target/SystemZ/ |
D | SystemZInstrInfo.cpp | 1029 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/CodeGen/ |
D | TargetInstrInfo.cpp | 624 MachineInstr &LoadMI, in foldMemoryOperand()
|
D | InlineSpiller.cpp | 757 MachineInstr *LoadMI) { in foldMemoryOperand()
|
/external/swiftshader/third_party/LLVM/lib/Target/X86/ |
D | X86FastISel.cpp | 548 MachineInstrBuilder LoadMI = in X86SelectAddress() local
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/SystemZ/ |
D | SystemZInstrInfo.cpp | 1320 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/X86/ |
D | X86InstrInfo.cpp | 5074 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() 5191 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl()
|
D | X86FastISel.cpp | 805 MachineInstrBuilder LoadMI = in handleConstantAddresses() local
|