/external/llvm/lib/CodeGen/ |
D | ImplicitNullChecks.cpp | 113 MachineInstr *insertFaultingLoad(MachineInstr *LoadMI, MachineBasicBlock *MBB, 485 ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI, in insertFaultingLoad() argument 492 unsigned NumDefs = LoadMI->getDesc().getNumDefs(); in insertFaultingLoad() 497 DefReg = LoadMI->defs().begin()->getReg(); in insertFaultingLoad() 498 assert(std::distance(LoadMI->defs().begin(), LoadMI->defs().end()) == 1 && in insertFaultingLoad() 504 .addImm(LoadMI->getOpcode()); in insertFaultingLoad() 506 for (auto &MO : LoadMI->uses()) in insertFaultingLoad() 509 MIB.setMemRefs(LoadMI->memoperands_begin(), LoadMI->memoperands_end()); in insertFaultingLoad()
|
D | TargetInstrInfo.cpp | 780 MachineInstr &LoadMI, in foldMemoryOperand() argument 782 assert(LoadMI.canFoldAsLoad() && "LoadMI isn't foldable!"); in foldMemoryOperand() 796 isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperand() 803 NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI, LIS); in foldMemoryOperand() 810 NewMI->setMemRefs(LoadMI.memoperands_begin(), LoadMI.memoperands_end()); in foldMemoryOperand() 815 for (MachineInstr::mmo_iterator I = LoadMI.memoperands_begin(), in foldMemoryOperand() 816 E = LoadMI.memoperands_end(); in foldMemoryOperand()
|
D | InlineSpiller.cpp | 203 MachineInstr *LoadMI = nullptr); 728 MachineInstr *LoadMI) { in foldMemoryOperand() argument 758 if (LoadMI && MO.isDef()) in foldMemoryOperand() 768 LoadMI ? TII.foldMemoryOperand(*MI, FoldOps, *LoadMI, &LIS) in foldMemoryOperand()
|
/external/swiftshader/third_party/LLVM/lib/CodeGen/ |
D | TargetInstrInfoImpl.cpp | 334 MachineInstr* LoadMI) const { in foldMemoryOperand() 335 assert(LoadMI->getDesc().canFoldAsLoad() && "LoadMI isn't foldable!"); in foldMemoryOperand() 344 MachineInstr *NewMI = foldMemoryOperandImpl(MF, MI, Ops, LoadMI); in foldMemoryOperand() 350 NewMI->setMemRefs(LoadMI->memoperands_begin(), in foldMemoryOperand() 351 LoadMI->memoperands_end()); in foldMemoryOperand()
|
D | InlineSpiller.cpp | 178 MachineInstr *LoadMI = 0); 1018 MachineInstr *LoadMI) { in foldMemoryOperand() argument 1032 if (LoadMI && MO.isDef()) in foldMemoryOperand() 1040 LoadMI ? TII.foldMemoryOperand(MI, FoldOps, LoadMI) in foldMemoryOperand() 1045 if (!LoadMI) in foldMemoryOperand()
|
D | VirtRegRewriter.cpp | 947 MachineInstr *LoadMI = prior(InsertLoc); in GetRegForReload() local 948 VRM.addSpillSlotUse(NewOp.StackSlotOrReMat, LoadMI); in GetRegForReload() 1770 MachineInstr *LoadMI = prior(InsertLoc); in InsertEmergencySpills() local 1771 VRM->addSpillSlotUse(SS, LoadMI); in InsertEmergencySpills() 1773 DistanceMap.insert(std::make_pair(LoadMI, DistanceMap.size())); in InsertEmergencySpills() 1872 MachineInstr *LoadMI = prior(InsertLoc); in InsertRestores() local 1873 VRM->addSpillSlotUse(SSorRMId, LoadMI); in InsertRestores() 1875 DistanceMap.insert(std::make_pair(LoadMI, DistanceMap.size())); in InsertRestores() 2217 MachineInstr *LoadMI = prior(InsertLoc); in ProcessUses() local 2218 VRM->addSpillSlotUse(SSorRMId, LoadMI); in ProcessUses() [all …]
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/CodeGen/ |
D | TargetInstrInfo.cpp | 624 MachineInstr &LoadMI, in foldMemoryOperand() argument 626 assert(LoadMI.canFoldAsLoad() && "LoadMI isn't foldable!"); in foldMemoryOperand() 642 isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperand() 649 NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI, LIS); in foldMemoryOperand() 657 NewMI->setMemRefs(LoadMI.memoperands_begin(), LoadMI.memoperands_end()); in foldMemoryOperand() 661 for (MachineInstr::mmo_iterator I = LoadMI.memoperands_begin(), in foldMemoryOperand() 662 E = LoadMI.memoperands_end(); in foldMemoryOperand()
|
D | InlineSpiller.cpp | 223 MachineInstr *LoadMI = nullptr); 757 MachineInstr *LoadMI) { in foldMemoryOperand() argument 790 if (LoadMI && MO.isDef()) in foldMemoryOperand() 805 LoadMI ? TII.foldMemoryOperand(*MI, FoldOps, *LoadMI, &LIS) in foldMemoryOperand()
|
/external/swiftshader/third_party/LLVM/include/llvm/Target/ |
D | TargetInstrInfo.h | 430 MachineInstr* LoadMI) const; 449 MachineInstr* LoadMI) const { in foldMemoryOperandImpl() argument
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/AArch64/ |
D | AArch64LoadStoreOptimizer.cpp | 1047 MachineInstr &LoadMI = *I; in findMatchingStore() local 1048 unsigned BaseReg = getLdStBaseOp(LoadMI).getReg(); in findMatchingStore() 1074 if (MI.mayStore() && isMatchingStore(LoadMI, MI) && in findMatchingStore() 1076 isLdOffsetInRangeOfSt(LoadMI, MI, TII) && in findMatchingStore() 1094 if (MI.mayStore() && mayAlias(LoadMI, MI, AA)) in findMatchingStore()
|
D | AArch64FastISel.cpp | 4457 const auto *LoadMI = MI; in optimizeIntExtLoad() local 4458 if (LoadMI->getOpcode() == TargetOpcode::COPY && in optimizeIntExtLoad() 4459 LoadMI->getOperand(1).getSubReg() == AArch64::sub_32) { in optimizeIntExtLoad() 4461 LoadMI = MRI.getUniqueVRegDef(LoadReg); in optimizeIntExtLoad() 4462 assert(LoadMI && "Expected valid instruction"); in optimizeIntExtLoad() 4464 if (!(IsZExt && isZExtLoad(LoadMI)) && !(!IsZExt && isSExtLoad(LoadMI))) in optimizeIntExtLoad()
|
/external/llvm/lib/Target/SystemZ/ |
D | SystemZInstrInfo.h | 212 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
|
/external/llvm/lib/Target/AArch64/ |
D | AArch64LoadStoreOptimizer.cpp | 1103 MachineInstr &LoadMI = *I; in findMatchingStore() local 1104 unsigned BaseReg = getLdStBaseOp(LoadMI).getReg(); in findMatchingStore() 1129 if (MI.mayStore() && isMatchingStore(LoadMI, MI) && in findMatchingStore() 1131 isLdOffsetInRangeOfSt(LoadMI, MI, TII) && in findMatchingStore() 1149 if (MI.mayStore() && mayAlias(LoadMI, MI, TII)) in findMatchingStore()
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/SystemZ/ |
D | SystemZInstrInfo.h | 258 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
|
/external/llvm/include/llvm/Target/ |
D | TargetInstrInfo.h | 827 MachineInstr &LoadMI, 920 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
|
/external/llvm/lib/Target/X86/ |
D | X86MCInstLower.cpp | 908 MCInst LoadMI; in LowerFAULTING_LOAD_OP() local 909 LoadMI.setOpcode(LoadOpcode); in LowerFAULTING_LOAD_OP() 912 LoadMI.addOperand(MCOperand::createReg(LoadDefRegister)); in LowerFAULTING_LOAD_OP() 918 LoadMI.addOperand(MaybeOperand.getValue()); in LowerFAULTING_LOAD_OP() 920 OutStreamer->EmitInstruction(LoadMI, getSubtargetInfo()); in LowerFAULTING_LOAD_OP()
|
D | X86InstrInfo.h | 377 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
|
D | X86InstrInfo.cpp | 6165 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() argument 6168 unsigned Opc = LoadMI.getOpcode(); in isNonFoldablePartialRegisterLoad() 6171 MF.getRegInfo().getRegClass(LoadMI.getOperand(0).getReg())->getSize(); in isNonFoldablePartialRegisterLoad() 6220 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl() argument 6223 unsigned NumOps = LoadMI.getDesc().getNumOperands(); in foldMemoryOperandImpl() 6225 if (isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperandImpl() 6226 if (isNonFoldablePartialRegisterLoad(LoadMI, MI, MF)) in foldMemoryOperandImpl() 6240 if (LoadMI.hasOneMemOperand()) in foldMemoryOperandImpl() 6241 Alignment = (*LoadMI.memoperands_begin())->getAlignment(); in foldMemoryOperandImpl() 6243 switch (LoadMI.getOpcode()) { in foldMemoryOperandImpl() [all …]
|
/external/swiftshader/third_party/LLVM/lib/Target/X86/ |
D | X86InstrInfo.h | 276 MachineInstr* LoadMI) const;
|
D | X86InstrInfo.cpp | 2701 MachineInstr *LoadMI) const { in foldMemoryOperandImpl() 2713 if (LoadMI->hasOneMemOperand()) in foldMemoryOperandImpl() 2714 Alignment = (*LoadMI->memoperands_begin())->getAlignment(); in foldMemoryOperandImpl() 2716 switch (LoadMI->getOpcode()) { in foldMemoryOperandImpl() 2752 if (LoadMI->getOperand(0).getSubReg() != MI->getOperand(Ops[0]).getSubReg()) in foldMemoryOperandImpl() 2756 switch (LoadMI->getOpcode()) { in foldMemoryOperandImpl() 2788 unsigned Opc = LoadMI->getOpcode(); in foldMemoryOperandImpl() 2813 unsigned NumOps = LoadMI->getDesc().getNumOperands(); in foldMemoryOperandImpl() 2815 MOs.push_back(LoadMI->getOperand(i)); in foldMemoryOperandImpl()
|
/external/swiftshader/third_party/llvm-7.0/llvm/include/llvm/CodeGen/ |
D | TargetInstrInfo.h | 918 MachineInstr &LoadMI, 1014 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/PowerPC/ |
D | PPCMIPeephole.cpp | 319 MachineInstr *LoadMI = MRI->getVRegDef(DefReg); in simplifyCode() local 320 if (LoadMI && LoadMI->getOpcode() == PPC::LXVDSX) in simplifyCode()
|
/external/swiftshader/third_party/LLVM/lib/Target/MBlaze/ |
D | MBlazeInstrInfo.td | 256 class LoadMI<bits<6> op, string instr_asm, PatFrag OpNode> : 460 def LBUI : LoadMI<0x38, "lbui ", zextloadi8>; 461 def LHUI : LoadMI<0x39, "lhui ", zextloadi16>; 462 def LWI : LoadMI<0x3A, "lwi ", load>;
|
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/X86/ |
D | X86InstrInfo.cpp | 5074 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() argument 5077 unsigned Opc = LoadMI.getOpcode(); in isNonFoldablePartialRegisterLoad() 5081 MF.getRegInfo().getRegClass(LoadMI.getOperand(0).getReg()); in isNonFoldablePartialRegisterLoad() 5191 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl() argument 5202 unsigned NumOps = LoadMI.getDesc().getNumOperands(); in foldMemoryOperandImpl() 5204 if (isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperandImpl() 5205 if (isNonFoldablePartialRegisterLoad(LoadMI, MI, MF)) in foldMemoryOperandImpl() 5221 if (LoadMI.hasOneMemOperand()) in foldMemoryOperandImpl() 5222 Alignment = (*LoadMI.memoperands_begin())->getAlignment(); in foldMemoryOperandImpl() 5224 switch (LoadMI.getOpcode()) { in foldMemoryOperandImpl() [all …]
|
D | X86InstrInfo.h | 401 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
|