• Home
  • Raw
  • Download

Lines Matching refs:__v4sf

339   return (__m512)__builtin_shufflevector((__v4sf) __A,  in _mm512_broadcastss_ps()
340 (__v4sf)_mm_undefined_ps(), in _mm512_broadcastss_ps()
935 return (__m128) __builtin_ia32_maxss_round_mask ((__v4sf) __A, in _mm_mask_max_ss()
936 (__v4sf) __B, in _mm_mask_max_ss()
937 (__v4sf) __W, in _mm_mask_max_ss()
944 return (__m128) __builtin_ia32_maxss_round_mask ((__v4sf) __A, in _mm_maskz_max_ss()
945 (__v4sf) __B, in _mm_maskz_max_ss()
946 (__v4sf) _mm_setzero_ps (), in _mm_maskz_max_ss()
952 (__m128)__builtin_ia32_maxss_round_mask((__v4sf)(__m128)(A), \
953 (__v4sf)(__m128)(B), \
954 (__v4sf)_mm_setzero_ps(), \
958 (__m128)__builtin_ia32_maxss_round_mask((__v4sf)(__m128)(A), \
959 (__v4sf)(__m128)(B), \
960 (__v4sf)(__m128)(W), (__mmask8)(U), \
964 (__m128)__builtin_ia32_maxss_round_mask((__v4sf)(__m128)(A), \
965 (__v4sf)(__m128)(B), \
966 (__v4sf)_mm_setzero_ps(), \
1220 return (__m128) __builtin_ia32_minss_round_mask ((__v4sf) __A, in _mm_mask_min_ss()
1221 (__v4sf) __B, in _mm_mask_min_ss()
1222 (__v4sf) __W, in _mm_mask_min_ss()
1229 return (__m128) __builtin_ia32_minss_round_mask ((__v4sf) __A, in _mm_maskz_min_ss()
1230 (__v4sf) __B, in _mm_maskz_min_ss()
1231 (__v4sf) _mm_setzero_ps (), in _mm_maskz_min_ss()
1237 (__m128)__builtin_ia32_minss_round_mask((__v4sf)(__m128)(A), \
1238 (__v4sf)(__m128)(B), \
1239 (__v4sf)_mm_setzero_ps(), \
1243 (__m128)__builtin_ia32_minss_round_mask((__v4sf)(__m128)(A), \
1244 (__v4sf)(__m128)(B), \
1245 (__v4sf)(__m128)(W), (__mmask8)(U), \
1249 (__m128)__builtin_ia32_minss_round_mask((__v4sf)(__m128)(A), \
1250 (__v4sf)(__m128)(B), \
1251 (__v4sf)_mm_setzero_ps(), \
1622 return (__m128) __builtin_ia32_rsqrt14ss_mask ((__v4sf) __A, in _mm_rsqrt14_ss()
1623 (__v4sf) __B, in _mm_rsqrt14_ss()
1624 (__v4sf) in _mm_rsqrt14_ss()
1632 return (__m128) __builtin_ia32_rsqrt14ss_mask ((__v4sf) __A, in _mm_mask_rsqrt14_ss()
1633 (__v4sf) __B, in _mm_mask_rsqrt14_ss()
1634 (__v4sf) __W, in _mm_mask_rsqrt14_ss()
1641 return (__m128) __builtin_ia32_rsqrt14ss_mask ((__v4sf) __A, in _mm_maskz_rsqrt14_ss()
1642 (__v4sf) __B, in _mm_maskz_rsqrt14_ss()
1643 (__v4sf) _mm_setzero_ps (), in _mm_maskz_rsqrt14_ss()
1730 return (__m128) __builtin_ia32_rcp14ss_mask ((__v4sf) __A, in _mm_rcp14_ss()
1731 (__v4sf) __B, in _mm_rcp14_ss()
1732 (__v4sf) in _mm_rcp14_ss()
1740 return (__m128) __builtin_ia32_rcp14ss_mask ((__v4sf) __A, in _mm_mask_rcp14_ss()
1741 (__v4sf) __B, in _mm_mask_rcp14_ss()
1742 (__v4sf) __W, in _mm_mask_rcp14_ss()
1749 return (__m128) __builtin_ia32_rcp14ss_mask ((__v4sf) __A, in _mm_maskz_rcp14_ss()
1750 (__v4sf) __B, in _mm_maskz_rcp14_ss()
1751 (__v4sf) _mm_setzero_ps (), in _mm_maskz_rcp14_ss()
1909 return (__m128) __builtin_ia32_addss_round_mask ((__v4sf) __A, in _mm_mask_add_ss()
1910 (__v4sf) __B, in _mm_mask_add_ss()
1911 (__v4sf) __W, in _mm_mask_add_ss()
1918 return (__m128) __builtin_ia32_addss_round_mask ((__v4sf) __A, in _mm_maskz_add_ss()
1919 (__v4sf) __B, in _mm_maskz_add_ss()
1920 (__v4sf) _mm_setzero_ps (), in _mm_maskz_add_ss()
1926 (__m128)__builtin_ia32_addss_round_mask((__v4sf)(__m128)(A), \
1927 (__v4sf)(__m128)(B), \
1928 (__v4sf)_mm_setzero_ps(), \
1932 (__m128)__builtin_ia32_addss_round_mask((__v4sf)(__m128)(A), \
1933 (__v4sf)(__m128)(B), \
1934 (__v4sf)(__m128)(W), (__mmask8)(U), \
1938 (__m128)__builtin_ia32_addss_round_mask((__v4sf)(__m128)(A), \
1939 (__v4sf)(__m128)(B), \
1940 (__v4sf)_mm_setzero_ps(), \
2052 return (__m128) __builtin_ia32_subss_round_mask ((__v4sf) __A, in _mm_mask_sub_ss()
2053 (__v4sf) __B, in _mm_mask_sub_ss()
2054 (__v4sf) __W, in _mm_mask_sub_ss()
2061 return (__m128) __builtin_ia32_subss_round_mask ((__v4sf) __A, in _mm_maskz_sub_ss()
2062 (__v4sf) __B, in _mm_maskz_sub_ss()
2063 (__v4sf) _mm_setzero_ps (), in _mm_maskz_sub_ss()
2068 (__m128)__builtin_ia32_subss_round_mask((__v4sf)(__m128)(A), \
2069 (__v4sf)(__m128)(B), \
2070 (__v4sf)_mm_setzero_ps(), \
2074 (__m128)__builtin_ia32_subss_round_mask((__v4sf)(__m128)(A), \
2075 (__v4sf)(__m128)(B), \
2076 (__v4sf)(__m128)(W), (__mmask8)(U), \
2080 (__m128)__builtin_ia32_subss_round_mask((__v4sf)(__m128)(A), \
2081 (__v4sf)(__m128)(B), \
2082 (__v4sf)_mm_setzero_ps(), \
2197 return (__m128) __builtin_ia32_mulss_round_mask ((__v4sf) __A, in _mm_mask_mul_ss()
2198 (__v4sf) __B, in _mm_mask_mul_ss()
2199 (__v4sf) __W, in _mm_mask_mul_ss()
2206 return (__m128) __builtin_ia32_mulss_round_mask ((__v4sf) __A, in _mm_maskz_mul_ss()
2207 (__v4sf) __B, in _mm_maskz_mul_ss()
2208 (__v4sf) _mm_setzero_ps (), in _mm_maskz_mul_ss()
2213 (__m128)__builtin_ia32_mulss_round_mask((__v4sf)(__m128)(A), \
2214 (__v4sf)(__m128)(B), \
2215 (__v4sf)_mm_setzero_ps(), \
2219 (__m128)__builtin_ia32_mulss_round_mask((__v4sf)(__m128)(A), \
2220 (__v4sf)(__m128)(B), \
2221 (__v4sf)(__m128)(W), (__mmask8)(U), \
2225 (__m128)__builtin_ia32_mulss_round_mask((__v4sf)(__m128)(A), \
2226 (__v4sf)(__m128)(B), \
2227 (__v4sf)_mm_setzero_ps(), \
2342 return (__m128) __builtin_ia32_divss_round_mask ((__v4sf) __A, in _mm_mask_div_ss()
2343 (__v4sf) __B, in _mm_mask_div_ss()
2344 (__v4sf) __W, in _mm_mask_div_ss()
2351 return (__m128) __builtin_ia32_divss_round_mask ((__v4sf) __A, in _mm_maskz_div_ss()
2352 (__v4sf) __B, in _mm_maskz_div_ss()
2353 (__v4sf) _mm_setzero_ps (), in _mm_maskz_div_ss()
2359 (__m128)__builtin_ia32_divss_round_mask((__v4sf)(__m128)(A), \
2360 (__v4sf)(__m128)(B), \
2361 (__v4sf)_mm_setzero_ps(), \
2365 (__m128)__builtin_ia32_divss_round_mask((__v4sf)(__m128)(A), \
2366 (__v4sf)(__m128)(B), \
2367 (__v4sf)(__m128)(W), (__mmask8)(U), \
2371 (__m128)__builtin_ia32_divss_round_mask((__v4sf)(__m128)(A), \
2372 (__v4sf)(__m128)(B), \
2373 (__v4sf)_mm_setzero_ps(), \
3499 (__v4sf)_mm_setzero_ps(), \
3504 (__v4sf)(__m128)(W), \
3509 (__v4sf)_mm_setzero_ps(), \
5681 (__m128)__builtin_ia32_fixupimmss_mask((__v4sf)(__m128)(A), \
5682 (__v4sf)(__m128)(B), \
5687 (__m128)__builtin_ia32_fixupimmss_mask((__v4sf)(__m128)(A), \
5688 (__v4sf)(__m128)(B), \
5693 (__m128)__builtin_ia32_fixupimmss_mask((__v4sf)(__m128)(A), \
5694 (__v4sf)(__m128)(B), \
5700 (__m128)__builtin_ia32_fixupimmss_mask((__v4sf)(__m128)(A), \
5701 (__v4sf)(__m128)(B), \
5707 (__m128)__builtin_ia32_fixupimmss_maskz((__v4sf)(__m128)(A), \
5708 (__v4sf)(__m128)(B), \
5713 (__m128)__builtin_ia32_fixupimmss_maskz((__v4sf)(__m128)(A), \
5714 (__v4sf)(__m128)(B), \
5766 (__m128)__builtin_ia32_getexpss128_round_mask((__v4sf)(__m128)(A), \
5767 (__v4sf)(__m128)(B), \
5768 (__v4sf)_mm_setzero_ps(), \
5774 return (__m128) __builtin_ia32_getexpss128_round_mask ((__v4sf) __A, in _mm_getexp_ss()
5775 (__v4sf) __B, (__v4sf) _mm_setzero_ps(), (__mmask8) -1, _MM_FROUND_CUR_DIRECTION); in _mm_getexp_ss()
5781 return (__m128) __builtin_ia32_getexpss128_round_mask ((__v4sf) __A, in _mm_mask_getexp_ss()
5782 (__v4sf) __B, in _mm_mask_getexp_ss()
5783 (__v4sf) __W, in _mm_mask_getexp_ss()
5789 (__m128)__builtin_ia32_getexpss128_round_mask((__v4sf)(__m128)(A), \
5790 (__v4sf)(__m128)(B), \
5791 (__v4sf)(__m128)(W), \
5797 return (__m128) __builtin_ia32_getexpss128_round_mask ((__v4sf) __A, in _mm_maskz_getexp_ss()
5798 (__v4sf) __B, in _mm_maskz_getexp_ss()
5799 (__v4sf) _mm_setzero_pd (), in _mm_maskz_getexp_ss()
5805 (__m128)__builtin_ia32_getexpss128_round_mask((__v4sf)(__m128)(A), \
5806 (__v4sf)(__m128)(B), \
5807 (__v4sf)_mm_setzero_ps(), \
5856 (__m128)__builtin_ia32_getmantss_round_mask((__v4sf)(__m128)(A), \
5857 (__v4sf)(__m128)(B), \
5859 (__v4sf)_mm_setzero_ps(), \
5863 (__m128)__builtin_ia32_getmantss_round_mask((__v4sf)(__m128)(A), \
5864 (__v4sf)(__m128)(B), \
5866 (__v4sf)_mm_setzero_ps(), \
5871 (__m128)__builtin_ia32_getmantss_round_mask((__v4sf)(__m128)(A), \
5872 (__v4sf)(__m128)(B), \
5874 (__v4sf)(__m128)(W), \
5879 (__m128)__builtin_ia32_getmantss_round_mask((__v4sf)(__m128)(A), \
5880 (__v4sf)(__m128)(B), \
5882 (__v4sf)(__m128)(W), \
5886 (__m128)__builtin_ia32_getmantss_round_mask((__v4sf)(__m128)(A), \
5887 (__v4sf)(__m128)(B), \
5889 (__v4sf)_mm_setzero_pd(), \
5894 (__m128)__builtin_ia32_getmantss_round_mask((__v4sf)(__m128)(A), \
5895 (__v4sf)(__m128)(B), \
5897 (__v4sf)_mm_setzero_ps(), \
5911 (int)__builtin_ia32_vcomiss((__v4sf)(__m128)(A), (__v4sf)(__m128)(B), \
6344 (int)__builtin_ia32_vcvtss2si32((__v4sf)(__m128)(A), (int)(R)); })
6347 (int)__builtin_ia32_vcvtss2si32((__v4sf)(__m128)(A), (int)(R)); })
6350 (long long)__builtin_ia32_vcvtss2si64((__v4sf)(__m128)(A), (int)(R)); })
6353 (long long)__builtin_ia32_vcvtss2si64((__v4sf)(__m128)(A), (int)(R)); })
6356 (unsigned int)__builtin_ia32_vcvtss2usi32((__v4sf)(__m128)(A), (int)(R)); })
6361 return (unsigned) __builtin_ia32_vcvtss2usi32 ((__v4sf) __A, in _mm_cvtss_u32()
6366 (unsigned long long)__builtin_ia32_vcvtss2usi64((__v4sf)(__m128)(A), \
6372 return (unsigned long long) __builtin_ia32_vcvtss2usi64 ((__v4sf) in _mm_cvtss_u64()
6426 (int)__builtin_ia32_vcvttss2si32((__v4sf)(__m128)(A), (int)(R)); })
6429 (int)__builtin_ia32_vcvttss2si32((__v4sf)(__m128)(A), (int)(R)); })
6434 return (int) __builtin_ia32_vcvttss2si32 ((__v4sf) __A, in _mm_cvttss_i32()
6439 (long long)__builtin_ia32_vcvttss2si64((__v4sf)(__m128)(A), (int)(R)); })
6442 (long long)__builtin_ia32_vcvttss2si64((__v4sf)(__m128)(A), (int)(R)); })
6447 return (long long) __builtin_ia32_vcvttss2si64 ((__v4sf) __A, in _mm_cvttss_i64()
6452 (unsigned int)__builtin_ia32_vcvttss2usi32((__v4sf)(__m128)(A), (int)(R)); })
6457 return (unsigned) __builtin_ia32_vcvttss2usi32 ((__v4sf) __A, in _mm_cvttss_u32()
6462 (unsigned long long)__builtin_ia32_vcvttss2usi64((__v4sf)(__m128)(A), \
6468 return (unsigned long long) __builtin_ia32_vcvttss2usi64 ((__v4sf) in _mm_cvttss_u64()
6795 (__m128)__builtin_ia32_rndscaless_round_mask((__v4sf)(__m128)(A), \
6796 (__v4sf)(__m128)(B), \
6797 (__v4sf)_mm_setzero_ps(), \
6802 (__m128)__builtin_ia32_rndscaless_round_mask((__v4sf)(__m128)(A), \
6803 (__v4sf)(__m128)(B), \
6804 (__v4sf)_mm_setzero_ps(), \
6809 (__m128)__builtin_ia32_rndscaless_round_mask((__v4sf)(__m128)(A), \
6810 (__v4sf)(__m128)(B), \
6811 (__v4sf)(__m128)(W), \
6816 (__m128)__builtin_ia32_rndscaless_round_mask((__v4sf)(__m128)(A), \
6817 (__v4sf)(__m128)(B), \
6818 (__v4sf)(__m128)(W), \
6823 (__m128)__builtin_ia32_rndscaless_round_mask((__v4sf)(__m128)(A), \
6824 (__v4sf)(__m128)(B), \
6825 (__v4sf)_mm_setzero_ps(), \
6830 (__m128)__builtin_ia32_rndscaless_round_mask((__v4sf)(__m128)(A), \
6831 (__v4sf)(__m128)(B), \
6832 (__v4sf)_mm_setzero_ps(), \
6984 (__m128)__builtin_ia32_scalefss_round_mask((__v4sf)(__m128)(A), \
6985 (__v4sf)(__m128)(B), \
6986 (__v4sf)_mm_setzero_ps(), \
6992 return (__m128) __builtin_ia32_scalefss_round_mask ((__v4sf) __A, in _mm_scalef_ss()
6993 (__v4sf)( __B), (__v4sf) _mm_setzero_ps(), in _mm_scalef_ss()
7001 return (__m128) __builtin_ia32_scalefss_round_mask ( (__v4sf) __A, in _mm_mask_scalef_ss()
7002 (__v4sf) __B, in _mm_mask_scalef_ss()
7003 (__v4sf) __W, in _mm_mask_scalef_ss()
7009 (__m128)__builtin_ia32_scalefss_round_mask((__v4sf)(__m128)(A), \
7010 (__v4sf)(__m128)(B), \
7011 (__v4sf)(__m128)(W), \
7017 return (__m128) __builtin_ia32_scalefss_round_mask ( (__v4sf) __A, in _mm_maskz_scalef_ss()
7018 (__v4sf) __B, in _mm_maskz_scalef_ss()
7019 (__v4sf) _mm_setzero_ps (), in _mm_maskz_scalef_ss()
7025 (__m128)__builtin_ia32_scalefss_round_mask((__v4sf)(__m128)(A), \
7026 (__v4sf)(__m128)(B), \
7027 (__v4sf)_mm_setzero_ps(), \
7224 (__m128)__builtin_ia32_sqrtss_round_mask((__v4sf)(__m128)(A), \
7225 (__v4sf)(__m128)(B), \
7226 (__v4sf)_mm_setzero_ps(), \
7232 return (__m128) __builtin_ia32_sqrtss_round_mask ( (__v4sf) __A, in _mm_mask_sqrt_ss()
7233 (__v4sf) __B, in _mm_mask_sqrt_ss()
7234 (__v4sf) __W, in _mm_mask_sqrt_ss()
7240 (__m128)__builtin_ia32_sqrtss_round_mask((__v4sf)(__m128)(A), \
7241 (__v4sf)(__m128)(B), \
7242 (__v4sf)(__m128)(W), (__mmask8)(U), \
7248 return (__m128) __builtin_ia32_sqrtss_round_mask ( (__v4sf) __A, in _mm_maskz_sqrt_ss()
7249 (__v4sf) __B, in _mm_maskz_sqrt_ss()
7250 (__v4sf) _mm_setzero_ps (), in _mm_maskz_sqrt_ss()
7256 (__m128)__builtin_ia32_sqrtss_round_mask((__v4sf)(__m128)(A), \
7257 (__v4sf)(__m128)(B), \
7258 (__v4sf)_mm_setzero_ps(), \
7264 return (__m512) __builtin_ia32_broadcastf32x4_512 ((__v4sf) __A, in _mm512_broadcast_f32x4()
7273 return (__m512) __builtin_ia32_broadcastf32x4_512 ((__v4sf) __A, in _mm512_mask_broadcast_f32x4()
7281 return (__m512) __builtin_ia32_broadcastf32x4_512 ((__v4sf) __A, in _mm512_maskz_broadcast_f32x4()
7904 (__v4sf)(__m128)(B), (int)(imm), \
7910 (__v4sf)(__m128)(B), (int)(imm), \
7916 (__v4sf)(__m128)(B), (int)(imm), \
8279 return (__m128) __builtin_ia32_vfmaddss3_mask ((__v4sf) __A, in _mm_mask_fmadd_ss()
8280 (__v4sf) __B, in _mm_mask_fmadd_ss()
8281 (__v4sf) __W, in _mm_mask_fmadd_ss()
8287 (__m128)__builtin_ia32_vfmaddss3_mask((__v4sf)(__m128)(A), \
8288 (__v4sf)(__m128)(B), \
8289 (__v4sf)(__m128)(W), (__mmask8)(U), \
8295 return (__m128) __builtin_ia32_vfmaddss3_maskz ((__v4sf) __A, in _mm_maskz_fmadd_ss()
8296 (__v4sf) __B, in _mm_maskz_fmadd_ss()
8297 (__v4sf) __C, in _mm_maskz_fmadd_ss()
8303 (__m128)__builtin_ia32_vfmaddss3_maskz((__v4sf)(__m128)(A), \
8304 (__v4sf)(__m128)(B), \
8305 (__v4sf)(__m128)(C), (__mmask8)(U), \
8311 return (__m128) __builtin_ia32_vfmaddss3_mask3 ((__v4sf) __W, in _mm_mask3_fmadd_ss()
8312 (__v4sf) __X, in _mm_mask3_fmadd_ss()
8313 (__v4sf) __Y, in _mm_mask3_fmadd_ss()
8319 (__m128)__builtin_ia32_vfmaddss3_mask3((__v4sf)(__m128)(W), \
8320 (__v4sf)(__m128)(X), \
8321 (__v4sf)(__m128)(Y), (__mmask8)(U), \
8327 return (__m128) __builtin_ia32_vfmaddss3_mask ((__v4sf) __A, in _mm_mask_fmsub_ss()
8328 -(__v4sf) __B, in _mm_mask_fmsub_ss()
8329 (__v4sf) __W, in _mm_mask_fmsub_ss()
8335 (__m128)__builtin_ia32_vfmaddss3_mask((__v4sf)(__m128)(A), \
8336 -(__v4sf)(__m128)(B), \
8337 (__v4sf)(__m128)(W), (__mmask8)(U), \
8343 return (__m128) __builtin_ia32_vfmaddss3_maskz ((__v4sf) __A, in _mm_maskz_fmsub_ss()
8344 (__v4sf) __B, in _mm_maskz_fmsub_ss()
8345 -(__v4sf) __C, in _mm_maskz_fmsub_ss()
8351 (__m128)__builtin_ia32_vfmaddss3_maskz((__v4sf)(__m128)(A), \
8352 (__v4sf)(__m128)(B), \
8353 -(__v4sf)(__m128)(C), (__mmask8)(U), \
8359 return (__m128) __builtin_ia32_vfmaddss3_mask3 ((__v4sf) __W, in _mm_mask3_fmsub_ss()
8360 (__v4sf) __X, in _mm_mask3_fmsub_ss()
8361 -(__v4sf) __Y, in _mm_mask3_fmsub_ss()
8367 (__m128)__builtin_ia32_vfmaddss3_mask3((__v4sf)(__m128)(W), \
8368 (__v4sf)(__m128)(X), \
8369 -(__v4sf)(__m128)(Y), (__mmask8)(U), \
8375 return (__m128) __builtin_ia32_vfmaddss3_mask (-(__v4sf) __A, in _mm_mask_fnmadd_ss()
8376 (__v4sf) __B, in _mm_mask_fnmadd_ss()
8377 (__v4sf) __W, in _mm_mask_fnmadd_ss()
8383 (__m128)__builtin_ia32_vfmaddss3_mask(-(__v4sf)(__m128)(A), \
8384 (__v4sf)(__m128)(B), \
8385 (__v4sf)(__m128)(W), (__mmask8)(U), \
8391 return (__m128) __builtin_ia32_vfmaddss3_maskz (-(__v4sf) __A, in _mm_maskz_fnmadd_ss()
8392 (__v4sf) __B, in _mm_maskz_fnmadd_ss()
8393 (__v4sf) __C, in _mm_maskz_fnmadd_ss()
8399 (__m128)__builtin_ia32_vfmaddss3_maskz(-(__v4sf)(__m128)(A), \
8400 (__v4sf)(__m128)(B), \
8401 (__v4sf)(__m128)(C), (__mmask8)(U), \
8407 return (__m128) __builtin_ia32_vfmaddss3_mask3 (-(__v4sf) __W, in _mm_mask3_fnmadd_ss()
8408 (__v4sf) __X, in _mm_mask3_fnmadd_ss()
8409 (__v4sf) __Y, in _mm_mask3_fnmadd_ss()
8415 (__m128)__builtin_ia32_vfmaddss3_mask3(-(__v4sf)(__m128)(W), \
8416 (__v4sf)(__m128)(X), \
8417 (__v4sf)(__m128)(Y), (__mmask8)(U), \
8423 return (__m128) __builtin_ia32_vfmaddss3_mask (-(__v4sf) __A, in _mm_mask_fnmsub_ss()
8424 -(__v4sf) __B, in _mm_mask_fnmsub_ss()
8425 (__v4sf) __W, in _mm_mask_fnmsub_ss()
8431 (__m128)__builtin_ia32_vfmaddss3_mask(-(__v4sf)(__m128)(A), \
8432 -(__v4sf)(__m128)(B), \
8433 (__v4sf)(__m128)(W), (__mmask8)(U), \
8439 return (__m128) __builtin_ia32_vfmaddss3_maskz (-(__v4sf) __A, in _mm_maskz_fnmsub_ss()
8440 (__v4sf) __B, in _mm_maskz_fnmsub_ss()
8441 -(__v4sf) __C, in _mm_maskz_fnmsub_ss()
8447 (__m128)__builtin_ia32_vfmaddss3_maskz(-(__v4sf)(__m128)(A), \
8448 (__v4sf)(__m128)(B), \
8449 -(__v4sf)(__m128)(C), (__mmask8)(U), \
8455 return (__m128) __builtin_ia32_vfmaddss3_mask3 (-(__v4sf) __W, in _mm_mask3_fnmsub_ss()
8456 (__v4sf) __X, in _mm_mask3_fnmsub_ss()
8457 -(__v4sf) __Y, in _mm_mask3_fnmsub_ss()
8463 (__m128)__builtin_ia32_vfmaddss3_mask3(-(__v4sf)(__m128)(W), \
8464 (__v4sf)(__m128)(X), \
8465 -(__v4sf)(__m128)(Y), (__mmask8)(U), \
8956 (__mmask8)__builtin_ia32_cmpss_mask((__v4sf)(__m128)(X), \
8957 (__v4sf)(__m128)(Y), (int)(P), \
8961 (__mmask8)__builtin_ia32_cmpss_mask((__v4sf)(__m128)(X), \
8962 (__v4sf)(__m128)(Y), (int)(P), \
8966 (__mmask8)__builtin_ia32_cmpss_mask((__v4sf)(__m128)(X), \
8967 (__v4sf)(__m128)(Y), (int)(P), \
8972 (__mmask8)__builtin_ia32_cmpss_mask((__v4sf)(__m128)(X), \
8973 (__v4sf)(__m128)(Y), (int)(P), \
9308 (__m128)__builtin_ia32_cvtsd2ss_round_mask((__v4sf)(__m128)(A), \
9310 (__v4sf)_mm_undefined_ps(), \
9314 (__m128)__builtin_ia32_cvtsd2ss_round_mask((__v4sf)(__m128)(A), \
9316 (__v4sf)(__m128)(W), \
9320 (__m128)__builtin_ia32_cvtsd2ss_round_mask((__v4sf)(__m128)(A), \
9322 (__v4sf)_mm_setzero_ps(), \
9328 return __builtin_ia32_cvtsd2ss_round_mask ((__v4sf)(__A), in _mm_mask_cvtsd_ss()
9330 (__v4sf)(__W), in _mm_mask_cvtsd_ss()
9337 return __builtin_ia32_cvtsd2ss_round_mask ((__v4sf)(__A), in _mm_maskz_cvtsd_ss()
9339 (__v4sf)_mm_setzero_ps(), in _mm_maskz_cvtsd_ss()
9361 (__m128)__builtin_ia32_cvtsi2ss32((__v4sf)(__m128)(A), (int)(B), (int)(R)); })
9364 (__m128)__builtin_ia32_cvtsi2ss32((__v4sf)(__m128)(A), (int)(B), (int)(R)); })
9367 (__m128)__builtin_ia32_cvtsi2ss64((__v4sf)(__m128)(A), (long long)(B), \
9371 (__m128)__builtin_ia32_cvtsi2ss64((__v4sf)(__m128)(A), (long long)(B), \
9376 (__v4sf)(__m128)(B), \
9382 (__v4sf)(__m128)(B), \
9388 (__v4sf)(__m128)(B), \
9396 (__v4sf)(__B), in _mm_mask_cvtss_sd()
9405 (__v4sf)(__B), in _mm_maskz_cvtss_sd()
9428 (__m128)__builtin_ia32_cvtusi2ss32((__v4sf)(__m128)(A), (unsigned int)(B), \
9434 return (__m128) __builtin_ia32_cvtusi2ss32 ((__v4sf) __A, __B, in _mm_cvtu32_ss()
9439 (__m128)__builtin_ia32_cvtusi2ss64((__v4sf)(__m128)(A), \
9445 return (__m128) __builtin_ia32_cvtusi2ss64 ((__v4sf) __A, __B, in _mm_cvtu64_ss()