• Home
  • Raw
  • Download

Lines Matching refs:X86

161   bool foldX86XALUIntrinsic(X86::CondCode &CC, const Instruction *I,
207 static std::pair<X86::CondCode, bool>
209 X86::CondCode CC = X86::COND_INVALID; in getX86ConditionCode()
214 case CmpInst::FCMP_UEQ: CC = X86::COND_E; break; in getX86ConditionCode()
216 case CmpInst::FCMP_OGT: CC = X86::COND_A; break; in getX86ConditionCode()
218 case CmpInst::FCMP_OGE: CC = X86::COND_AE; break; in getX86ConditionCode()
220 case CmpInst::FCMP_ULT: CC = X86::COND_B; break; in getX86ConditionCode()
222 case CmpInst::FCMP_ULE: CC = X86::COND_BE; break; in getX86ConditionCode()
223 case CmpInst::FCMP_ONE: CC = X86::COND_NE; break; in getX86ConditionCode()
224 case CmpInst::FCMP_UNO: CC = X86::COND_P; break; in getX86ConditionCode()
225 case CmpInst::FCMP_ORD: CC = X86::COND_NP; break; in getX86ConditionCode()
227 case CmpInst::FCMP_UNE: CC = X86::COND_INVALID; break; in getX86ConditionCode()
230 case CmpInst::ICMP_EQ: CC = X86::COND_E; break; in getX86ConditionCode()
231 case CmpInst::ICMP_NE: CC = X86::COND_NE; break; in getX86ConditionCode()
232 case CmpInst::ICMP_UGT: CC = X86::COND_A; break; in getX86ConditionCode()
233 case CmpInst::ICMP_UGE: CC = X86::COND_AE; break; in getX86ConditionCode()
234 case CmpInst::ICMP_ULT: CC = X86::COND_B; break; in getX86ConditionCode()
235 case CmpInst::ICMP_ULE: CC = X86::COND_BE; break; in getX86ConditionCode()
236 case CmpInst::ICMP_SGT: CC = X86::COND_G; break; in getX86ConditionCode()
237 case CmpInst::ICMP_SGE: CC = X86::COND_GE; break; in getX86ConditionCode()
238 case CmpInst::ICMP_SLT: CC = X86::COND_L; break; in getX86ConditionCode()
239 case CmpInst::ICMP_SLE: CC = X86::COND_LE; break; in getX86ConditionCode()
282 bool X86FastISel::foldX86XALUIntrinsic(X86::CondCode &CC, const Instruction *I, in foldX86XALUIntrinsic()
302 X86::CondCode TmpCC; in foldX86XALUIntrinsic()
308 case Intrinsic::umul_with_overflow: TmpCC = X86::COND_O; break; in foldX86XALUIntrinsic()
310 case Intrinsic::usub_with_overflow: TmpCC = X86::COND_B; break; in foldX86XALUIntrinsic()
373 Opc = X86::MOV8rm; in X86FastEmitLoad()
374 RC = &X86::GR8RegClass; in X86FastEmitLoad()
377 Opc = X86::MOV16rm; in X86FastEmitLoad()
378 RC = &X86::GR16RegClass; in X86FastEmitLoad()
381 Opc = X86::MOV32rm; in X86FastEmitLoad()
382 RC = &X86::GR32RegClass; in X86FastEmitLoad()
386 Opc = X86::MOV64rm; in X86FastEmitLoad()
387 RC = &X86::GR64RegClass; in X86FastEmitLoad()
391 Opc = Subtarget->hasAVX() ? X86::VMOVSSrm : X86::MOVSSrm; in X86FastEmitLoad()
392 RC = &X86::FR32RegClass; in X86FastEmitLoad()
394 Opc = X86::LD_Fp32m; in X86FastEmitLoad()
395 RC = &X86::RFP32RegClass; in X86FastEmitLoad()
400 Opc = Subtarget->hasAVX() ? X86::VMOVSDrm : X86::MOVSDrm; in X86FastEmitLoad()
401 RC = &X86::FR64RegClass; in X86FastEmitLoad()
403 Opc = X86::LD_Fp64m; in X86FastEmitLoad()
404 RC = &X86::RFP64RegClass; in X86FastEmitLoad()
435 unsigned AndResult = createResultReg(&X86::GR8RegClass); in X86FastEmitStore()
437 TII.get(X86::AND8ri), AndResult) in X86FastEmitStore()
442 case MVT::i8: Opc = X86::MOV8mr; break; in X86FastEmitStore()
443 case MVT::i16: Opc = X86::MOV16mr; break; in X86FastEmitStore()
444 case MVT::i32: Opc = X86::MOV32mr; break; in X86FastEmitStore()
445 case MVT::i64: Opc = X86::MOV64mr; break; // Must be in x86-64 mode. in X86FastEmitStore()
448 (Subtarget->hasAVX() ? X86::VMOVSSmr : X86::MOVSSmr) : X86::ST_Fp32m; in X86FastEmitStore()
452 (Subtarget->hasAVX() ? X86::VMOVSDmr : X86::MOVSDmr) : X86::ST_Fp64m; in X86FastEmitStore()
456 Opc = Subtarget->hasAVX() ? X86::VMOVAPSmr : X86::MOVAPSmr; in X86FastEmitStore()
458 Opc = Subtarget->hasAVX() ? X86::VMOVUPSmr : X86::MOVUPSmr; in X86FastEmitStore()
462 Opc = Subtarget->hasAVX() ? X86::VMOVAPDmr : X86::MOVAPDmr; in X86FastEmitStore()
464 Opc = Subtarget->hasAVX() ? X86::VMOVUPDmr : X86::MOVUPDmr; in X86FastEmitStore()
471 Opc = Subtarget->hasAVX() ? X86::VMOVDQAmr : X86::MOVDQAmr; in X86FastEmitStore()
473 Opc = Subtarget->hasAVX() ? X86::VMOVDQUmr : X86::MOVDQUmr; in X86FastEmitStore()
500 case MVT::i8: Opc = X86::MOV8mi; break; in X86FastEmitStore()
501 case MVT::i16: Opc = X86::MOV16mi; break; in X86FastEmitStore()
502 case MVT::i32: Opc = X86::MOV32mi; break; in X86FastEmitStore()
506 Opc = X86::MOV64mi32; in X86FastEmitStore()
579 AM.Base.Reg = X86::RIP; in handleConstantAddresses()
604 Opc = X86::MOV64rm; in handleConstantAddresses()
605 RC = &X86::GR64RegClass; in handleConstantAddresses()
608 StubAM.Base.Reg = X86::RIP; in handleConstantAddresses()
610 Opc = X86::MOV32rm; in handleConstantAddresses()
611 RC = &X86::GR32RegClass; in handleConstantAddresses()
902 AM.Base.Reg = X86::RIP; in X86SelectCallAddress()
1023 if (VA.getLocReg() == X86::ST0 || VA.getLocReg() == X86::ST1) in X86SelectRet()
1073 unsigned RetReg = Subtarget->is64Bit() ? X86::RAX : X86::EAX; in X86SelectRet()
1081 …I(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(Subtarget->is64Bit() ? X86::RETQ : X86::RETL)); in X86SelectRet()
1121 case MVT::i8: return X86::CMP8rr; in X86ChooseCmpOpcode()
1122 case MVT::i16: return X86::CMP16rr; in X86ChooseCmpOpcode()
1123 case MVT::i32: return X86::CMP32rr; in X86ChooseCmpOpcode()
1124 case MVT::i64: return X86::CMP64rr; in X86ChooseCmpOpcode()
1126 return X86ScalarSSEf32 ? (HasAVX ? X86::VUCOMISSrr : X86::UCOMISSrr) : 0; in X86ChooseCmpOpcode()
1128 return X86ScalarSSEf64 ? (HasAVX ? X86::VUCOMISDrr : X86::UCOMISDrr) : 0; in X86ChooseCmpOpcode()
1139 case MVT::i8: return X86::CMP8ri; in X86ChooseCmpImmediateOpcode()
1140 case MVT::i16: return X86::CMP16ri; in X86ChooseCmpImmediateOpcode()
1141 case MVT::i32: return X86::CMP32ri; in X86ChooseCmpImmediateOpcode()
1146 return X86::CMP64ri32; in X86ChooseCmpImmediateOpcode()
1197 ResultReg = createResultReg(&X86::GR32RegClass); in X86SelectCmp()
1198 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::MOV32r0), in X86SelectCmp()
1201 X86::sub_8bit); in X86SelectCmp()
1207 ResultReg = createResultReg(&X86::GR8RegClass); in X86SelectCmp()
1208 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::MOV8ri), in X86SelectCmp()
1233 { X86::SETEr, X86::SETNPr, X86::AND8rr }, in X86SelectCmp()
1234 { X86::SETNEr, X86::SETPr, X86::OR8rr } in X86SelectCmp()
1243 ResultReg = createResultReg(&X86::GR8RegClass); in X86SelectCmp()
1248 unsigned FlagReg1 = createResultReg(&X86::GR8RegClass); in X86SelectCmp()
1249 unsigned FlagReg2 = createResultReg(&X86::GR8RegClass); in X86SelectCmp()
1260 X86::CondCode CC; in X86SelectCmp()
1263 assert(CC <= X86::LAST_VALID_COND && "Unexpected condition code."); in X86SelectCmp()
1264 unsigned Opc = X86::getSETFromCond(CC); in X86SelectCmp()
1303 case MVT::i8: MovInst = X86::MOVZX32rr8; break; in X86SelectZExt()
1304 case MVT::i16: MovInst = X86::MOVZX32rr16; break; in X86SelectZExt()
1305 case MVT::i32: MovInst = X86::MOV32rr; break; in X86SelectZExt()
1309 unsigned Result32 = createResultReg(&X86::GR32RegClass); in X86SelectZExt()
1313 ResultReg = createResultReg(&X86::GR64RegClass); in X86SelectZExt()
1316 .addImm(0).addReg(Result32).addImm(X86::sub_32bit); in X86SelectZExt()
1339 X86::CondCode CC; in X86SelectBranch()
1390 assert(CC <= X86::LAST_VALID_COND && "Unexpected condition code."); in X86SelectBranch()
1392 BranchOpc = X86::GetCondBranchFromCond(CC); in X86SelectBranch()
1406 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::JP_4)) in X86SelectBranch()
1432 case MVT::i8: TestOpc = X86::TEST8ri; break; in X86SelectBranch()
1433 case MVT::i16: TestOpc = X86::TEST16ri; break; in X86SelectBranch()
1434 case MVT::i32: TestOpc = X86::TEST32ri; break; in X86SelectBranch()
1435 case MVT::i64: TestOpc = X86::TEST64ri32; break; in X86SelectBranch()
1443 unsigned JmpOpc = X86::JNE_4; in X86SelectBranch()
1446 JmpOpc = X86::JE_4; in X86SelectBranch()
1467 unsigned BranchOpc = X86::GetCondBranchFromCond(CC); in X86SelectBranch()
1486 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::TEST8ri)) in X86SelectBranch()
1488 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::JNE_4)) in X86SelectBranch()
1503 CReg = X86::CL; in X86SelectShift()
1504 RC = &X86::GR8RegClass; in X86SelectShift()
1506 case Instruction::LShr: OpReg = X86::SHR8rCL; break; in X86SelectShift()
1507 case Instruction::AShr: OpReg = X86::SAR8rCL; break; in X86SelectShift()
1508 case Instruction::Shl: OpReg = X86::SHL8rCL; break; in X86SelectShift()
1512 CReg = X86::CX; in X86SelectShift()
1513 RC = &X86::GR16RegClass; in X86SelectShift()
1515 case Instruction::LShr: OpReg = X86::SHR16rCL; break; in X86SelectShift()
1516 case Instruction::AShr: OpReg = X86::SAR16rCL; break; in X86SelectShift()
1517 case Instruction::Shl: OpReg = X86::SHL16rCL; break; in X86SelectShift()
1521 CReg = X86::ECX; in X86SelectShift()
1522 RC = &X86::GR32RegClass; in X86SelectShift()
1524 case Instruction::LShr: OpReg = X86::SHR32rCL; break; in X86SelectShift()
1525 case Instruction::AShr: OpReg = X86::SAR32rCL; break; in X86SelectShift()
1526 case Instruction::Shl: OpReg = X86::SHL32rCL; break; in X86SelectShift()
1530 CReg = X86::RCX; in X86SelectShift()
1531 RC = &X86::GR64RegClass; in X86SelectShift()
1533 case Instruction::LShr: OpReg = X86::SHR64rCL; break; in X86SelectShift()
1534 case Instruction::AShr: OpReg = X86::SAR64rCL; break; in X86SelectShift()
1535 case Instruction::Shl: OpReg = X86::SHL64rCL; break; in X86SelectShift()
1556 if (CReg != X86::CL) in X86SelectShift()
1558 TII.get(TargetOpcode::KILL), X86::CL) in X86SelectShift()
1599 { &X86::GR8RegClass, X86::AX, 0, { in X86SelectDivRem()
1600 { X86::IDIV8r, 0, X86::MOVSX16rr8, X86::AL, S }, // SDiv in X86SelectDivRem()
1601 { X86::IDIV8r, 0, X86::MOVSX16rr8, X86::AH, S }, // SRem in X86SelectDivRem()
1602 { X86::DIV8r, 0, X86::MOVZX16rr8, X86::AL, U }, // UDiv in X86SelectDivRem()
1603 { X86::DIV8r, 0, X86::MOVZX16rr8, X86::AH, U }, // URem in X86SelectDivRem()
1606 { &X86::GR16RegClass, X86::AX, X86::DX, { in X86SelectDivRem()
1607 { X86::IDIV16r, X86::CWD, Copy, X86::AX, S }, // SDiv in X86SelectDivRem()
1608 { X86::IDIV16r, X86::CWD, Copy, X86::DX, S }, // SRem in X86SelectDivRem()
1609 { X86::DIV16r, X86::MOV32r0, Copy, X86::AX, U }, // UDiv in X86SelectDivRem()
1610 { X86::DIV16r, X86::MOV32r0, Copy, X86::DX, U }, // URem in X86SelectDivRem()
1613 { &X86::GR32RegClass, X86::EAX, X86::EDX, { in X86SelectDivRem()
1614 { X86::IDIV32r, X86::CDQ, Copy, X86::EAX, S }, // SDiv in X86SelectDivRem()
1615 { X86::IDIV32r, X86::CDQ, Copy, X86::EDX, S }, // SRem in X86SelectDivRem()
1616 { X86::DIV32r, X86::MOV32r0, Copy, X86::EAX, U }, // UDiv in X86SelectDivRem()
1617 { X86::DIV32r, X86::MOV32r0, Copy, X86::EDX, U }, // URem in X86SelectDivRem()
1620 { &X86::GR64RegClass, X86::RAX, X86::RDX, { in X86SelectDivRem()
1621 { X86::IDIV64r, X86::CQO, Copy, X86::RAX, S }, // SDiv in X86SelectDivRem()
1622 { X86::IDIV64r, X86::CQO, Copy, X86::RDX, S }, // SRem in X86SelectDivRem()
1623 { X86::DIV64r, X86::MOV32r0, Copy, X86::RAX, U }, // UDiv in X86SelectDivRem()
1624 { X86::DIV64r, X86::MOV32r0, Copy, X86::RDX, U }, // URem in X86SelectDivRem()
1671 unsigned Zero32 = createResultReg(&X86::GR32RegClass); in X86SelectDivRem()
1673 TII.get(X86::MOV32r0), Zero32); in X86SelectDivRem()
1681 .addReg(Zero32, 0, X86::sub_16bit); in X86SelectDivRem()
1689 .addImm(0).addReg(Zero32).addImm(X86::sub_32bit); in X86SelectDivRem()
1707 OpEntry.DivRemResultReg == X86::AH && Subtarget->is64Bit()) { in X86SelectDivRem()
1708 unsigned SourceSuperReg = createResultReg(&X86::GR16RegClass); in X86SelectDivRem()
1709 unsigned ResultSuperReg = createResultReg(&X86::GR16RegClass); in X86SelectDivRem()
1711 TII.get(Copy), SourceSuperReg).addReg(X86::AX); in X86SelectDivRem()
1714 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::SHR16ri), in X86SelectDivRem()
1719 /*Kill=*/true, X86::sub_8bit); in X86SelectDivRem()
1746 X86::CondCode CC = X86::COND_NE; in X86FastEmitCMoveSelect()
1757 { X86::SETNPr, X86::SETEr , X86::TEST8rr }, in X86FastEmitCMoveSelect()
1758 { X86::SETPr, X86::SETNEr, X86::OR8rr } in X86FastEmitCMoveSelect()
1775 assert(CC <= X86::LAST_VALID_COND && "Unexpected condition code."); in X86FastEmitCMoveSelect()
1788 unsigned FlagReg1 = createResultReg(&X86::GR8RegClass); in X86FastEmitCMoveSelect()
1789 unsigned FlagReg2 = createResultReg(&X86::GR8RegClass); in X86FastEmitCMoveSelect()
1796 unsigned TmpReg = createResultReg(&X86::GR8RegClass); in X86FastEmitCMoveSelect()
1826 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::TEST8ri)) in X86FastEmitCMoveSelect()
1842 unsigned Opc = X86::getCMovFromCond(CC, RC->getSize()); in X86FastEmitCMoveSelect()
1890 { { X86::CMPSSrr, X86::FsANDPSrr, X86::FsANDNPSrr, X86::FsORPSrr }, in X86FastEmitSSESelect()
1891 { X86::VCMPSSrr, X86::VFsANDPSrr, X86::VFsANDNPSrr, X86::VFsORPSrr } }, in X86FastEmitSSESelect()
1892 { { X86::CMPSDrr, X86::FsANDPDrr, X86::FsANDNPDrr, X86::FsORPDrr }, in X86FastEmitSSESelect()
1893 { X86::VCMPSDrr, X86::VFsANDPDrr, X86::VFsANDNPDrr, X86::VFsORPDrr } } in X86FastEmitSSESelect()
1941 case MVT::i8: Opc = X86::CMOV_GR8; break; in X86FastEmitPseudoSelect()
1942 case MVT::i16: Opc = X86::CMOV_GR16; break; in X86FastEmitPseudoSelect()
1943 case MVT::i32: Opc = X86::CMOV_GR32; break; in X86FastEmitPseudoSelect()
1944 case MVT::f32: Opc = X86::CMOV_FR32; break; in X86FastEmitPseudoSelect()
1945 case MVT::f64: Opc = X86::CMOV_FR64; break; in X86FastEmitPseudoSelect()
1949 X86::CondCode CC = X86::COND_NE; in X86FastEmitPseudoSelect()
1958 if (CC > X86::LAST_VALID_COND) in X86FastEmitPseudoSelect()
1975 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::TEST8ri)) in X86FastEmitPseudoSelect()
2053 unsigned ResultReg = createResultReg(&X86::FR64RegClass); in X86SelectFPExt()
2055 TII.get(X86::CVTSS2SDrr), ResultReg) in X86SelectFPExt()
2072 unsigned ResultReg = createResultReg(&X86::FR32RegClass); in X86SelectFPTrunc()
2074 TII.get(X86::CVTSD2SSrr), ResultReg) in X86SelectFPTrunc()
2110 (const TargetRegisterClass*)&X86::GR16_ABCDRegClass : in X86SelectTrunc()
2111 (const TargetRegisterClass*)&X86::GR32_ABCDRegClass; in X86SelectTrunc()
2121 X86::sub_8bit); in X86SelectTrunc()
2197 case MVT::i32: Opc = X86::MOV32rm; RC = &X86::GR32RegClass; break; in X86VisitIntrinsicCall()
2198 case MVT::i64: Opc = X86::MOV64rm; RC = &X86::GR64RegClass; break; in X86VisitIntrinsicCall()
2209 assert(((FrameReg == X86::RBP && VT == MVT::i64) || in X86VisitIntrinsicCall()
2210 (FrameReg == X86::EBP && VT == MVT::i32)) && in X86VisitIntrinsicCall()
2310 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::TRAP)); in X86VisitIntrinsicCall()
2327 {X86::SQRTSSr, X86::VSQRTSSr}, in X86VisitIntrinsicCall()
2328 {X86::SQRTSDr, X86::VSQRTSDr} in X86VisitIntrinsicCall()
2335 case MVT::f32: Opc = SqrtOpc[0][HasAVX]; RC = &X86::FR32RegClass; break; in X86VisitIntrinsicCall()
2336 case MVT::f64: Opc = SqrtOpc[1][HasAVX]; RC = &X86::FR64RegClass; break; in X86VisitIntrinsicCall()
2397 BaseOpc = ISD::ADD; CondOpc = X86::SETOr; break; in X86VisitIntrinsicCall()
2399 BaseOpc = ISD::ADD; CondOpc = X86::SETBr; break; in X86VisitIntrinsicCall()
2401 BaseOpc = ISD::SUB; CondOpc = X86::SETOr; break; in X86VisitIntrinsicCall()
2403 BaseOpc = ISD::SUB; CondOpc = X86::SETBr; break; in X86VisitIntrinsicCall()
2405 BaseOpc = X86ISD::SMUL; CondOpc = X86::SETOr; break; in X86VisitIntrinsicCall()
2407 BaseOpc = X86ISD::UMUL; CondOpc = X86::SETOr; break; in X86VisitIntrinsicCall()
2437 { X86::MUL8r, X86::MUL16r, X86::MUL32r, X86::MUL64r }; in X86VisitIntrinsicCall()
2438 static const unsigned Reg[] = { X86::AL, X86::AX, X86::EAX, X86::RAX }; in X86VisitIntrinsicCall()
2448 { X86::IMUL8r, X86::IMUL16rr, X86::IMUL32rr, X86::IMUL64rr }; in X86VisitIntrinsicCall()
2453 TII.get(TargetOpcode::COPY), X86::AL) in X86VisitIntrinsicCall()
2501 { { X86::CVTTSS2SIrr, X86::VCVTTSS2SIrr }, in X86VisitIntrinsicCall()
2502 { X86::CVTTSS2SI64rr, X86::VCVTTSS2SI64rr } }, in X86VisitIntrinsicCall()
2503 { { X86::CVTTSD2SIrr, X86::VCVTTSD2SIrr }, in X86VisitIntrinsicCall()
2504 { X86::CVTTSD2SI64rr, X86::VCVTTSD2SI64rr } } in X86VisitIntrinsicCall()
2602 X86::EDI, X86::ESI, X86::EDX, X86::ECX, X86::R8D, X86::R9D in FastLowerArguments()
2605 X86::RDI, X86::RSI, X86::RDX, X86::RCX, X86::R8 , X86::R9 in FastLowerArguments()
2608 X86::XMM0, X86::XMM1, X86::XMM2, X86::XMM3, in FastLowerArguments()
2609 X86::XMM4, X86::XMM5, X86::XMM6, X86::XMM7 in FastLowerArguments()
2706 if (X86::isCalleePop(CC, Subtarget->is64Bit(), isVarArg, in DoSelectCall()
2947 TII.get(TargetOpcode::COPY), X86::EBX).addReg(Base); in DoSelectCall()
2953 X86::XMM0, X86::XMM1, X86::XMM2, X86::XMM3, in DoSelectCall()
2954 X86::XMM4, X86::XMM5, X86::XMM6, X86::XMM7 in DoSelectCall()
2957 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::MOV8ri), in DoSelectCall()
2958 X86::AL).addImm(NumXMMRegs); in DoSelectCall()
2967 CallOpc = X86::CALL64r; in DoSelectCall()
2969 CallOpc = X86::CALL32r; in DoSelectCall()
2978 CallOpc = X86::CALL64pcrel32; in DoSelectCall()
2980 CallOpc = X86::CALLpcrel32; in DoSelectCall()
3017 MIB.addReg(X86::EBX, RegState::Implicit); in DoSelectCall()
3020 MIB.addReg(X86::AL, RegState::Implicit); in DoSelectCall()
3069 if ((RVLocs[i].getLocReg() == X86::ST0 || in DoSelectCall()
3070 RVLocs[i].getLocReg() == X86::ST1)) { in DoSelectCall()
3073 CopyReg = createResultReg(&X86::RFP80RegClass); in DoSelectCall()
3076 TII.get(X86::FpPOP_RETVAL), CopyReg); in DoSelectCall()
3089 unsigned Opc = ResVT == MVT::f32 ? X86::ST_Fp80m32 : X86::ST_Fp80m64; in DoSelectCall()
3095 Opc = ResVT == MVT::f32 ? X86::MOVSSrm : X86::MOVSDrm; in DoSelectCall()
3180 Opc = X86::MOV8rm; in TargetMaterializeConstant()
3181 RC = &X86::GR8RegClass; in TargetMaterializeConstant()
3184 Opc = X86::MOV16rm; in TargetMaterializeConstant()
3185 RC = &X86::GR16RegClass; in TargetMaterializeConstant()
3188 Opc = X86::MOV32rm; in TargetMaterializeConstant()
3189 RC = &X86::GR32RegClass; in TargetMaterializeConstant()
3193 Opc = X86::MOV64rm; in TargetMaterializeConstant()
3194 RC = &X86::GR64RegClass; in TargetMaterializeConstant()
3198 Opc = Subtarget->hasAVX() ? X86::VMOVSSrm : X86::MOVSSrm; in TargetMaterializeConstant()
3199 RC = &X86::FR32RegClass; in TargetMaterializeConstant()
3201 Opc = X86::LD_Fp32m; in TargetMaterializeConstant()
3202 RC = &X86::RFP32RegClass; in TargetMaterializeConstant()
3207 Opc = Subtarget->hasAVX() ? X86::VMOVSDrm : X86::MOVSDrm; in TargetMaterializeConstant()
3208 RC = &X86::FR64RegClass; in TargetMaterializeConstant()
3210 Opc = X86::LD_Fp64m; in TargetMaterializeConstant()
3211 RC = &X86::RFP64RegClass; in TargetMaterializeConstant()
3234 Opc = X86::MOV64ri; in TargetMaterializeConstant()
3238 Opc = TLI.getPointerTy() == MVT::i32 ? X86::LEA32r : X86::LEA64r; in TargetMaterializeConstant()
3265 PICBase = X86::RIP; in TargetMaterializeConstant()
3293 unsigned Opc = Subtarget->is64Bit() ? X86::LEA64r : X86::LEA32r; in TargetMaterializeAlloca()
3313 Opc = X86::FsFLD0SS; in TargetMaterializeFloatZero()
3314 RC = &X86::FR32RegClass; in TargetMaterializeFloatZero()
3316 Opc = X86::LD_Fp032; in TargetMaterializeFloatZero()
3317 RC = &X86::RFP32RegClass; in TargetMaterializeFloatZero()
3322 Opc = X86::FsFLD0SD; in TargetMaterializeFloatZero()
3323 RC = &X86::FR64RegClass; in TargetMaterializeFloatZero()
3325 Opc = X86::LD_Fp064; in TargetMaterializeFloatZero()
3326 RC = &X86::RFP64RegClass; in TargetMaterializeFloatZero()
3371 FastISel *X86::createFastISel(FunctionLoweringInfo &funcInfo, in createFastISel()