Searched refs:AddrNumOperands (Results 1 – 15 of 15) sorted by relevance
/external/swiftshader/third_party/LLVM/lib/Target/X86/ |
D | X86CodeEmitter.cpp | 211 unsigned e = (isTwoAddr ? X86::AddrNumOperands+1 : X86::AddrNumOperands); in determineREX() 866 X86_MC::getX86RegNum(MI.getOperand(CurOp + X86::AddrNumOperands) in emitInstruction() 868 CurOp += X86::AddrNumOperands + 1; in emitInstruction() 886 int AddrOperands = X86::AddrNumOperands; in emitInstruction() 940 intptr_t PCAdj = (CurOp + X86::AddrNumOperands != NumOps) ? in emitInstruction() 941 (MI.getOperand(CurOp+X86::AddrNumOperands).isImm() ? in emitInstruction() 947 CurOp += X86::AddrNumOperands; in emitInstruction()
|
D | X86InstrInfo.cpp | 1072 if (MI->getOperand(X86::AddrNumOperands).getSubReg() == 0 && in isStoreToStackSlot() 1074 return MI->getOperand(X86::AddrNumOperands).getReg(); in isStoreToStackSlot() 2755 SmallVector<MachineOperand,X86::AddrNumOperands> MOs; in foldMemoryOperandImpl() 2814 for (unsigned i = NumOps - X86::AddrNumOperands; i != NumOps; ++i) in foldMemoryOperandImpl() 2907 SmallVector<MachineOperand, X86::AddrNumOperands> AddrOps; in unfoldMemoryOperand() 2913 if (i >= Index && i < Index + X86::AddrNumOperands) in unfoldMemoryOperand() 2932 for (unsigned i = 1; i != 1 + X86::AddrNumOperands; ++i) { in unfoldMemoryOperand() 3029 if (i >= Index-NumDefs && i < Index-NumDefs + X86::AddrNumOperands) in unfoldMemoryOperand()
|
D | X86MCInstLower.cpp | 353 assert(OutMI.getNumOperands() == 1+X86::AddrNumOperands && in Lower()
|
D | X86ISelLowering.cpp | 10944 assert(bInstr->getNumOperands() < X86::AddrNumOperands + 4 && in EmitAtomicBitwiseWithCustomInserter() 10948 MachineOperand* argOpers[2 + X86::AddrNumOperands]; in EmitAtomicBitwiseWithCustomInserter() 10954 int lastAddrIndx = X86::AddrNumOperands - 1; // [0,3] in EmitAtomicBitwiseWithCustomInserter() 11057 assert(bInstr->getNumOperands() < X86::AddrNumOperands + 14 && in EmitAtomicBit6432WithCustomInserter() 11061 MachineOperand* argOpers[2 + X86::AddrNumOperands]; in EmitAtomicBit6432WithCustomInserter() 11062 for (int i=0; i < 2 + X86::AddrNumOperands; ++i) { in EmitAtomicBit6432WithCustomInserter() 11072 int lastAddrIndx = X86::AddrNumOperands - 1; // [0,3] in EmitAtomicBit6432WithCustomInserter() 11211 assert(mInstr->getNumOperands() < X86::AddrNumOperands + 4 && in EmitAtomicMinMaxWithCustomInserter() 11214 MachineOperand* argOpers[2 + X86::AddrNumOperands]; in EmitAtomicMinMaxWithCustomInserter() 11220 int lastAddrIndx = X86::AddrNumOperands - 1; // [0,3] in EmitAtomicMinMaxWithCustomInserter() [all …]
|
D | X86FloatingPoint.cpp | 985 assert((NumOps == X86::AddrNumOperands + 1 || NumOps == 1) && in handleOneArgFP()
|
/external/swiftshader/third_party/LLVM/lib/Target/X86/MCTargetDesc/ |
D | X86MCCodeEmitter.cpp | 510 CurOp = X86::AddrNumOperands; in EmitVEXOpcodePrefix() 683 unsigned e = (isTwoAddr ? X86::AddrNumOperands+1 : X86::AddrNumOperands); in DetermineREXPrefix() 925 SrcRegNum = CurOp + X86::AddrNumOperands; in EncodeInstruction() 949 int AddrOperands = X86::AddrNumOperands; in EncodeInstruction() 982 CurOp += X86::AddrNumOperands; in EncodeInstruction()
|
D | X86BaseInfo.h | 41 AddrNumOperands = 5 enumerator
|
/external/llvm/lib/Target/X86/MCTargetDesc/ |
D | X86MCCodeEmitter.cpp | 727 CurOp += X86::AddrNumOperands; in EmitVEXOpcodePrefix() 779 VEX_4V = ~getX86RegEncoding(MI, CurOp + X86::AddrNumOperands) & 0xf; in EmitVEXOpcodePrefix() 998 CurOp += X86::AddrNumOperands; in DetermineREXPrefix() 1008 CurOp += X86::AddrNumOperands; in DetermineREXPrefix() 1292 unsigned SrcRegNum = CurOp + X86::AddrNumOperands; in encodeInstruction() 1346 CurOp = FirstMemOp + X86::AddrNumOperands; in encodeInstruction() 1383 CurOp += X86::AddrNumOperands; in encodeInstruction()
|
D | X86BaseInfo.h | 42 AddrNumOperands = 5 enumerator
|
/external/llvm/lib/Target/X86/ |
D | X86CallFrameOptimization.cpp | 467 MachineOperand PushOp = MOV->getOperand(X86::AddrNumOperands); in adjustCallSequence() 516 for (unsigned i = NumOps - X86::AddrNumOperands; i != NumOps; ++i) in adjustCallSequence()
|
D | X86InstrInfo.h | 137 return Op + X86::AddrNumOperands <= MI.getNumOperands() && in isMem()
|
D | X86InstrInfo.cpp | 2275 if (MI.getOperand(X86::AddrNumOperands).getSubReg() == 0 && in isStoreToStackSlot() 2277 return MI.getOperand(X86::AddrNumOperands).getReg(); in isStoreToStackSlot() 6287 SmallVector<MachineOperand,X86::AddrNumOperands> MOs; in foldMemoryOperandImpl() 6355 MOs.append(LoadMI.operands_begin() + NumOps - X86::AddrNumOperands, in foldMemoryOperandImpl() 6390 SmallVector<MachineOperand, X86::AddrNumOperands> AddrOps; in unfoldMemoryOperand() 6396 if (i >= Index && i < Index + X86::AddrNumOperands) in unfoldMemoryOperand() 6413 for (unsigned i = 1; i != 1 + X86::AddrNumOperands; ++i) { in unfoldMemoryOperand() 6507 if (i >= Index-NumDefs && i < Index-NumDefs + X86::AddrNumOperands) in unfoldMemoryOperand()
|
D | X86FloatingPoint.cpp | 1061 assert((NumOps == X86::AddrNumOperands + 1 || NumOps == 1) && in handleOneArgFP()
|
D | X86MCInstLower.cpp | 404 assert(OutMI.getNumOperands() == 1+X86::AddrNumOperands && in Lower()
|
D | X86ISelLowering.cpp | 22717 for (int i = 0; i < X86::AddrNumOperands; ++i) in emitMonitor() 22720 unsigned ValOps = X86::AddrNumOperands; in emitMonitor() 22747 static_assert(X86::AddrNumOperands == 5, in EmitVAARG64WithCustomInserter() 23431 unsigned ValOpIdx = X86::AddrNumOperands; in EmitLoweredAtomicFP() 23437 for (int i = 0; i < X86::AddrNumOperands; ++i) { in EmitLoweredAtomicFP() 23447 for (int i = 0; i < X86::AddrNumOperands; ++i) in EmitLoweredAtomicFP() 23824 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) { in emitEHSjLjSetJmp() 23910 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) in emitEHSjLjLongJmp() 23915 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) { in emitEHSjLjLongJmp() 23924 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) { in emitEHSjLjLongJmp() [all …]
|