Searched refs:AddrNumOperands (Results 1 – 13 of 13) sorted by relevance
291 MachineOperand ImmOp = MI->getOperand(X86::AddrNumOperands); in classifyInstruction()297 MachineOperand ImmOp = MI->getOperand(X86::AddrNumOperands); in classifyInstruction()509 MachineOperand PushOp = Store->getOperand(X86::AddrNumOperands); in adjustCallSequence()563 for (unsigned i = NumOps - X86::AddrNumOperands; i != NumOps; ++i) in adjustCallSequence()
197 MachineOperand &StackAdjust = MBBI->getOperand(isMem ? X86::AddrNumOperands in ExpandMI()263 for (unsigned i = 0; i != X86::AddrNumOperands; ++i) in ExpandMI()
479 assert(OutMI.getNumOperands() == 1 + X86::AddrNumOperands && in Lower()768 assert(OutMI.getNumOperands() == X86::AddrNumOperands && in Lower()2061 for (int i = 0; i < X86::AddrNumOperands; ++i) { in EmitInstruction()2070 for (int i = 0; i < X86::AddrNumOperands; ++i) { in EmitInstruction()2085 Register Reg = MI->getOperand(X86::AddrNumOperands).getReg(); in EmitInstruction()2091 for (int i = 0; i < X86::AddrNumOperands; ++i) in EmitInstruction()2098 for (int i = 0; i < X86::AddrNumOperands; ++i) { in EmitInstruction()
538 MemOpEnd = MemOpStart + X86::AddrNumOperands; in usedAsAddr()573 OpIdx += (X86::AddrNumOperands - 1); in buildClosure()
430 MachineOperand &StoreSrcVReg = StoreInst->getOperand(X86::AddrNumOperands); in buildCopy()432 NewStore->getOperand(X86::AddrNumOperands).setIsKill(StoreSrcVReg.isKill()); in buildCopy()
128 return Op + X86::AddrNumOperands <= MI.getNumOperands() && in isMem()
440 if (MI.getOperand(X86::AddrNumOperands).getSubReg() == 0 && in isStoreToStackSlot()442 return MI.getOperand(X86::AddrNumOperands).getReg(); in isStoreToStackSlot()4043 Register SrcReg = MIB->getOperand(X86::AddrNumOperands).getReg(); in expandNOVLXStore()4053 MIB->getOperand(X86::AddrNumOperands).setReg(SrcReg); in expandNOVLXStore()4857 if (MOs.size() == X86::AddrNumOperands && in foldMemoryOperandImpl()5276 SmallVector<MachineOperand,X86::AddrNumOperands> MOs; in foldMemoryOperandImpl()5355 MOs.append(LoadMI.operands_begin() + NumOps - X86::AddrNumOperands, in foldMemoryOperandImpl()5480 SmallVector<MachineOperand, X86::AddrNumOperands> AddrOps; in unfoldMemoryOperand()5486 if (i >= Index && i < Index + X86::AddrNumOperands) in unfoldMemoryOperand()5518 for (unsigned i = 1; i != 1 + X86::AddrNumOperands; ++i) { in unfoldMemoryOperand()[all …]
1114 for (int i = 0; i < X86::AddrNumOperands; ++i) in rewriteSetCC()
1122 assert((NumOps == X86::AddrNumOperands + 1 || NumOps == 1) && in handleOneArgFP()
30325 static_assert(X86::AddrNumOperands == 5, in EmitVAARG64WithCustomInserter()31513 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) { in emitSetJmpShadowStackFix()31621 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) { in emitEHSjLjSetJmp()31774 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) { in emitLongJmpShadowStackFix()31900 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) { in emitEHSjLjLongJmp()31912 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) { in emitEHSjLjLongJmp()31926 for (unsigned i = 0; i < X86::AddrNumOperands; ++i) { in emitEHSjLjLongJmp()32365 .addReg(MI.getOperand(X86::AddrNumOperands).getReg()); in EmitInstrWithCustomInserter()
887 CurOp += X86::AddrNumOperands; in emitVEXOpcodePrefix()947 VEX_4V = ~getX86RegEncoding(MI, CurOp + X86::AddrNumOperands) & 0xf; in emitVEXOpcodePrefix()1212 CurOp += X86::AddrNumOperands; in determineREXPrefix()1221 CurOp += X86::AddrNumOperands; in determineREXPrefix()1490 unsigned SrcRegNum = CurOp + X86::AddrNumOperands; in encodeInstruction()1573 CurOp = FirstMemOp + X86::AddrNumOperands; in encodeInstruction()1585 CurOp = FirstMemOp + X86::AddrNumOperands; in encodeInstruction()1602 CurOp = FirstMemOp + X86::AddrNumOperands; in encodeInstruction()1608 CurOp = FirstMemOp + X86::AddrNumOperands; in encodeInstruction()1648 CurOp = FirstMemOp + X86::AddrNumOperands; in encodeInstruction()[all …]
41 AddrNumOperands = 5 enumerator
3078 X86::AddrNumOperands - 1).getReg(); in validateInstruction()