/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AMDGPU/ |
D | SIFixupVectorISel.cpp | 87 unsigned &IndexReg, in findSRegBaseAndIndex() argument 118 IndexReg = DefInst->getOperand(3).getReg(); in findSRegBaseAndIndex() 120 MachineInstr *MI = MRI.getUniqueVRegDef(IndexReg); in findSRegBaseAndIndex() 130 IndexReg = MI->getOperand(1).getReg(); in findSRegBaseAndIndex() 143 if (!TRI->hasVGPRs(MRI.getRegClass(IndexReg))) in findSRegBaseAndIndex() 146 MRI.clearKillFlags(IndexReg); in findSRegBaseAndIndex() 176 unsigned IndexReg = 0; in fixupGlobalSaddr() local 178 if (!findSRegBaseAndIndex(Op, BaseReg, IndexReg, MRI, TRI)) in fixupGlobalSaddr() 189 NewGlob->addOperand(MF, MachineOperand::CreateReg(IndexReg, false)); in fixupGlobalSaddr()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/ |
D | X86FixupLEAs.cpp | 369 Register IndexReg = Index.getReg(); in optTwoAddrLEA() local 379 if (IndexReg != 0) in optTwoAddrLEA() 380 IndexReg = TRI->getSubReg(IndexReg, X86::sub_32bit); in optTwoAddrLEA() 387 if (BaseReg != 0 && IndexReg != 0 && Disp.getImm() == 0 && in optTwoAddrLEA() 388 (DestReg == BaseReg || DestReg == IndexReg)) { in optTwoAddrLEA() 391 std::swap(BaseReg, IndexReg); in optTwoAddrLEA() 396 .addReg(BaseReg).addReg(IndexReg) in optTwoAddrLEA() 401 .addReg(BaseReg).addReg(IndexReg); in optTwoAddrLEA() 403 } else if (DestReg == BaseReg && IndexReg == 0) { in optTwoAddrLEA() 549 Register IndexReg = Index.getReg(); in processInstrForSlow3OpLEA() local [all …]
|
D | X86InstrBuilder.h | 54 unsigned IndexReg; member 60 : BaseType(RegBase), Scale(1), IndexReg(0), Disp(0), GV(nullptr), in X86AddressMode() 77 MO.push_back(MachineOperand::CreateReg(IndexReg, false, false, false, false, in getFullAddress() 108 AM.IndexReg = Op2.getReg(); in getAddressFromInstr() 183 MIB.addImm(AM.Scale).addReg(AM.IndexReg); in addFullAddress()
|
D | X86InsertPrefetch.cpp | 83 Register IndexReg = MI.getOperand(Op + X86::AddrIndexReg).getReg(); in IsMemOpCompatibleWithPrefetch() local 87 (IndexReg == 0 || in IsMemOpCompatibleWithPrefetch() 88 X86MCRegisterClasses[X86::GR64RegClassID].contains(IndexReg) || in IsMemOpCompatibleWithPrefetch() 89 X86MCRegisterClasses[X86::GR32RegClassID].contains(IndexReg)); in IsMemOpCompatibleWithPrefetch()
|
D | X86AsmPrinter.cpp | 286 const MachineOperand &IndexReg = MI->getOperand(OpNo + X86::AddrIndexReg); in PrintLeaMemReference() local 296 bool HasParenPart = IndexReg.getReg() || HasBaseReg; in PrintLeaMemReference() 317 assert(IndexReg.getReg() != X86::ESP && in PrintLeaMemReference() 324 if (IndexReg.getReg()) { in PrintLeaMemReference() 352 const MachineOperand &IndexReg = MI->getOperand(OpNo + X86::AddrIndexReg); in PrintIntelMemReference() local 376 if (IndexReg.getReg()) { in PrintIntelMemReference() 389 if (DispVal || (!IndexReg.getReg() && !HasBaseReg)) { in PrintIntelMemReference()
|
D | X86ISelDAGToDAG.cpp | 66 SDValue IndexReg; member 80 : BaseType(RegBase), Base_FrameIndex(0), Scale(1), IndexReg(), Disp(0), in X86ISelAddressMode() 91 IndexReg.getNode() != nullptr || Base_Reg.getNode() != nullptr; in hasBaseOrIndexReg() 122 if (IndexReg.getNode()) in dump() 123 IndexReg.getNode()->dump(DAG); in dump() 287 AM.IndexReg), 0); in getAddressOperands() 288 AM.IndexReg = Neg; in getAddressOperands() 291 if (AM.IndexReg.getNode()) in getAddressOperands() 292 Index = AM.IndexReg; in getAddressOperands() 1549 AM.Base_Reg = AM.IndexReg; in matchAddress() [all …]
|
D | X86FastISel.cpp | 230 AM.IndexReg = constrainOperandRegClass(MIB->getDesc(), AM.IndexReg, in addFullAddress() 736 (AM.Base.Reg == 0 && AM.IndexReg == 0)) { in handleConstantAddresses() 755 assert(AM.Base.Reg == 0 && AM.IndexReg == 0); in handleConstantAddresses() 817 if (AM.IndexReg == 0) { in handleConstantAddresses() 819 AM.IndexReg = getRegForValue(V); in handleConstantAddresses() 820 return AM.IndexReg != 0; in handleConstantAddresses() 904 unsigned IndexReg = AM.IndexReg; in X86SelectAddress() local 936 if (IndexReg == 0 && in X86SelectAddress() 941 IndexReg = getRegForGEPIndex(Op).first; in X86SelectAddress() 942 if (IndexReg == 0) in X86SelectAddress() [all …]
|
D | X86SpeculativeLoadHardening.cpp | 1719 unsigned BaseReg = 0, IndexReg = 0; in tracePredStateThroughBlocksAndHarden() local 1724 IndexReg = IndexMO.getReg(); in tracePredStateThroughBlocksAndHarden() 1726 if (!BaseReg && !IndexReg) in tracePredStateThroughBlocksAndHarden() 1735 (IndexReg && LoadDepRegs.test(IndexReg))) in tracePredStateThroughBlocksAndHarden() 1747 !HardenedAddrRegs.count(IndexReg)) { in tracePredStateThroughBlocksAndHarden() 1758 if (IndexReg) in tracePredStateThroughBlocksAndHarden() 1759 HardenedAddrRegs.insert(IndexReg); in tracePredStateThroughBlocksAndHarden()
|
D | X86MCInstLower.cpp | 1062 unsigned Opc, BaseReg, ScaleVal, IndexReg, Displacement, SegmentReg; in EmitNop() local 1063 IndexReg = Displacement = SegmentReg = 0; in EmitNop() 1091 IndexReg = X86::RAX; in EmitNop() 1097 IndexReg = X86::RAX; in EmitNop() 1108 IndexReg = X86::RAX; in EmitNop() 1114 IndexReg = X86::RAX; in EmitNop() 1120 IndexReg = X86::RAX; in EmitNop() 1143 .addReg(IndexReg) in EmitNop()
|
D | X86InstructionSelector.cpp | 616 assert(AM.Base.Reg == 0 && AM.IndexReg == 0); in selectGlobalValue()
|
D | X86ISelLowering.cpp | 32443 if (AM.IndexReg == X86::NoRegister) in EmitInstrWithCustomInserter()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/AsmParser/ |
D | X86AsmParser.cpp | 345 unsigned BaseReg, IndexReg, TmpReg, Scale; member in __anonc3356ce30111::X86AsmParser::IntelExprStateMachine 368 : State(IES_INIT), PrevState(IES_ERROR), BaseReg(0), IndexReg(0), in IntelExprStateMachine() 378 unsigned getIndexReg() { return IndexReg; } in getIndexReg() 482 if (IndexReg) { in onPlus() 486 IndexReg = TmpReg; in onPlus() 537 if (IndexReg) { in onMinus() 541 IndexReg = TmpReg; in onMinus() 592 if (IndexReg) { in onRegister() 597 IndexReg = Reg; in onRegister() 666 if (IndexReg) { in onInteger() [all …]
|
D | X86Operand.h | 63 unsigned IndexReg; member 137 if (Mem.IndexReg) in print() 139 << X86IntelInstPrinter::getRegisterName(Mem.IndexReg); in print() 189 return Mem.IndexReg; in getMemIndexReg() 317 return Mem.IndexReg >= LowR && Mem.IndexReg <= HighR; in isMemIndexReg() 636 Res->Mem.IndexReg = 0; 650 unsigned BaseReg, unsigned IndexReg, unsigned Scale, SMLoc StartLoc, 655 assert((SegReg || BaseReg || IndexReg) && "Invalid memory operand!"); 664 Res->Mem.IndexReg = IndexReg;
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/MCTargetDesc/ |
D | X86MCCodeEmitter.cpp | 187 const MCOperand &IndexReg = MI.getOperand(Op + X86::AddrIndexReg); in is16BitMemOperand() local 195 (IndexReg.getReg() != 0 && in is16BitMemOperand() 196 X86MCRegisterClasses[X86::GR16RegClassID].contains(IndexReg.getReg()))) in is16BitMemOperand() 206 const MCOperand &IndexReg = MI.getOperand(Op + X86::AddrIndexReg); in is32BitMemOperand() local 210 (IndexReg.getReg() != 0 && in is32BitMemOperand() 211 X86MCRegisterClasses[X86::GR32RegClassID].contains(IndexReg.getReg()))) in is32BitMemOperand() 214 assert(IndexReg.getReg() == 0 && "Invalid eip-based address."); in is32BitMemOperand() 217 if (IndexReg.getReg() == X86::EIZ) in is32BitMemOperand() 228 const MCOperand &IndexReg = MI.getOperand(Op + X86::AddrIndexReg); in is64BitMemOperand() local 232 (IndexReg.getReg() != 0 && in is64BitMemOperand() [all …]
|
D | X86ATTInstPrinter.cpp | 389 const MCOperand &IndexReg = MI->getOperand(Op + X86::AddrIndexReg); in printMemReference() local 399 if (DispVal || (!IndexReg.getReg() && !BaseReg.getReg())) in printMemReference() 406 if (IndexReg.getReg() || BaseReg.getReg()) { in printMemReference() 411 if (IndexReg.getReg()) { in printMemReference()
|
D | X86IntelInstPrinter.cpp | 348 const MCOperand &IndexReg = MI->getOperand(Op+X86::AddrIndexReg); in printMemReference() local 362 if (IndexReg.getReg()) { in printMemReference() 376 if (DispVal || (!IndexReg.getReg() && !BaseReg.getReg())) { in printMemReference()
|
D | X86MCTargetDesc.cpp | 532 const MCOperand &IndexReg = Inst.getOperand(MemOpStart + X86::AddrIndexReg); in evaluateMemoryOperandAddress() local 535 if (SegReg.getReg() != 0 || IndexReg.getReg() != 0 || ScaleAmt.getImm() != 1 || in evaluateMemoryOperandAddress()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/MC/MCParser/ |
D | MCTargetAsmParser.h | 68 StringRef IndexReg; member 73 : NeedBracs(false), Imm(0), BaseReg(StringRef()), IndexReg(StringRef()), in IntelExpr() 78 : NeedBracs(needBracs), Imm(imm), BaseReg(baseReg), IndexReg(indexReg), in IntelExpr() 84 bool hasIndexReg() const { return !IndexReg.empty(); } in hasIndexReg()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/PowerPC/ |
D | PPCFastISel.cpp | 171 unsigned &IndexReg); 425 unsigned &IndexReg) { in PPCSimplifyAddress() argument 446 IndexReg = PPCMaterializeInt(Offset, MVT::i64); in PPCSimplifyAddress() 447 assert(IndexReg && "Unexpected error in PPCMaterializeInt!"); in PPCSimplifyAddress() 510 unsigned IndexReg = 0; in PPCEmitLoad() local 511 PPCSimplifyAddress(Addr, UseOffset, IndexReg); in PPCEmitLoad() 583 if (IndexReg) in PPCEmitLoad() 584 MIB.addReg(Addr.Base.Reg).addReg(IndexReg); in PPCEmitLoad() 659 unsigned IndexReg = 0; in PPCEmitStore() local 660 PPCSimplifyAddress(Addr, UseOffset, IndexReg); in PPCEmitStore() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/GlobalISel/ |
D | MachineIRBuilder.cpp | 264 Register IndexReg) { in buildBrJT() argument 270 .addUse(IndexReg); in buildBrJT()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/CodeGen/GlobalISel/ |
D | MachineIRBuilder.h | 661 Register IndexReg);
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/SystemZ/ |
D | SystemZISelLowering.cpp | 845 bool IndexReg; member 848 LongDisplacement(LongDispl), IndexReg(IdxReg) {} in AddressingMode() 954 if (!SupportedAM.IndexReg) in isLegalAddressingMode() 6949 Register IndexReg = MI.getOperand(3).getReg(); in emitCondStore() local 6959 if (STOCOpcode && !IndexReg && Subtarget.hasLoadStoreOnCond()) { in emitCondStore() 7016 .addReg(IndexReg); in emitCondStore()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/MC/MCParser/ |
D | AsmParser.cpp | 5931 << AR.IntelExp.IndexReg; in parseMSInlineAsm()
|