Searched refs:X86ScalarSSEf64 (Results 1 – 9 of 9) sorted by relevance
925 return !X86ScalarSSEf64 || VT == MVT::f80; in ShouldShrinkFPConstant()936 return (VT == MVT::f64 && X86ScalarSSEf64) || // f64 is when SSE2 in isScalarFPTypeInSSEReg()1016 bool X86ScalarSSEf64; variable
57 bool X86ScalarSSEf64; member in __anon392708c50111::X86FastISel65 X86ScalarSSEf64 = Subtarget->hasSSE2(); in X86FastISel()157 return (VT == MVT::f64 && X86ScalarSSEf64) || // f64 is when SSE2 in isScalarFPTypeInSSEReg()329 if (VT == MVT::f64 && !X86ScalarSSEf64) in isTypeLegal()389 if (X86ScalarSSEf64) { in X86FastEmitLoad()1331 bool X86ScalarSSEf64 = Subtarget->hasSSE2(); in X86ChooseCmpOpcode() local1342 return X86ScalarSSEf64 ? (HasAVX ? X86::VUCOMISDrr : X86::UCOMISDrr) : 0; in X86ChooseCmpOpcode()2332 if (X86ScalarSSEf64 && I->getType()->isDoubleTy() && in X86SelectFPExt()2343 if (X86ScalarSSEf64 && I->getType()->isFloatTy() && in X86SelectFPTrunc()3559 if (X86ScalarSSEf64) { in X86MaterializeFP()[all …]
76 X86ScalarSSEf64 = Subtarget.hasSSE2(); in X86TargetLowering()261 if (!X86ScalarSSEf64) { in X86TargetLowering()484 if (!Subtarget.useSoftFloat() && X86ScalarSSEf64) { in X86TargetLowering()1851 return X86ScalarSSEf64; in isSafeMemOpType()13712 if (SrcVT == MVT::i64 && DstVT == MVT::f64 && X86ScalarSSEf64) in LowerUINT_TO_FP()13714 if (SrcVT == MVT::i32 && X86ScalarSSEf64) in LowerUINT_TO_FP()
1254 return !X86ScalarSSEf64 || VT == MVT::f80; in ShouldShrinkFPConstant()1265 return (VT == MVT::f64 && X86ScalarSSEf64) || // f64 is when SSE2 in isScalarFPTypeInSSEReg()1423 bool X86ScalarSSEf64; variable
56 bool X86ScalarSSEf64; member in __anon6361a6b30111::X86FastISel64 X86ScalarSSEf64 = Subtarget->hasSSE2(); in X86FastISel()160 return (VT == MVT::f64 && X86ScalarSSEf64) || // f64 is when SSE2 in isScalarFPTypeInSSEReg()300 if (VT == MVT::f64 && !X86ScalarSSEf64) in isTypeLegal()357 if (X86ScalarSSEf64) in X86FastEmitLoad()1359 bool X86ScalarSSEf64 = Subtarget->hasSSE2(); in X86ChooseCmpOpcode() local1373 return X86ScalarSSEf64 in X86ChooseCmpOpcode()2519 if (X86ScalarSSEf64 && I->getType()->isDoubleTy() && in X86SelectFPExt()2533 if (X86ScalarSSEf64 && I->getType()->isFloatTy() && in X86SelectFPTrunc()3781 if (X86ScalarSSEf64) in X86MaterializeFP()[all …]
107 X86ScalarSSEf64 = Subtarget.hasSSE2(); in X86TargetLowering()287 if (!X86ScalarSSEf64) { in X86TargetLowering()515 if (!Subtarget.useSoftFloat() && X86ScalarSSEf64) { in X86TargetLowering()2309 return X86ScalarSSEf64; in isSafeMemOpType()20299 if (SrcVT == MVT::i64 && DstVT == MVT::f64 && X86ScalarSSEf64 && !IsStrict) in LowerUINT_TO_FP()20301 if (SrcVT == MVT::i32 && X86ScalarSSEf64 && DstVT != MVT::f80) in LowerUINT_TO_FP()
57 bool X86ScalarSSEf64; member in __anonab3409270111::X86FastISel65 X86ScalarSSEf64 = Subtarget->hasSSE2(); in X86FastISel()161 return (VT == MVT::f64 && X86ScalarSSEf64) || // f64 is when SSE2 in isScalarFPTypeInSSEReg()301 if (VT == MVT::f64 && !X86ScalarSSEf64) in isTypeLegal()358 if (X86ScalarSSEf64) in X86FastEmitLoad()1343 bool X86ScalarSSEf64 = Subtarget->hasSSE2(); in X86ChooseCmpOpcode() local1357 return X86ScalarSSEf64 in X86ChooseCmpOpcode()2503 if (X86ScalarSSEf64 && I->getType()->isDoubleTy() && in X86SelectFPExt()2517 if (X86ScalarSSEf64 && I->getType()->isFloatTy() && in X86SelectFPTrunc()3759 if (X86ScalarSSEf64) in X86MaterializeFP()[all …]
1128 return !X86ScalarSSEf64 || VT == MVT::f80; in ShouldShrinkFPConstant()1139 return (VT == MVT::f64 && X86ScalarSSEf64) || // f64 is when SSE2 in isScalarFPTypeInSSEReg()1292 bool X86ScalarSSEf64; variable
113 X86ScalarSSEf64 = Subtarget.hasSSE2(); in X86TargetLowering()280 if (!X86ScalarSSEf64) { in X86TargetLowering()507 if (!Subtarget.useSoftFloat() && X86ScalarSSEf64) { in X86TargetLowering()2306 return X86ScalarSSEf64; in isSafeMemOpType()19332 if (SrcVT == MVT::i64 && DstVT == MVT::f64 && X86ScalarSSEf64) in LowerUINT_TO_FP()19334 if (SrcVT == MVT::i32 && X86ScalarSSEf64 && DstVT != MVT::f80) in LowerUINT_TO_FP()