Lines Matching refs:__a
61 _mm_add_ss(__m128 __a, __m128 __b) in _mm_add_ss() argument
63 __a[0] += __b[0]; in _mm_add_ss()
64 return __a; in _mm_add_ss()
81 _mm_add_ps(__m128 __a, __m128 __b) in _mm_add_ps() argument
83 return (__m128)((__v4sf)__a + (__v4sf)__b); in _mm_add_ps()
103 _mm_sub_ss(__m128 __a, __m128 __b) in _mm_sub_ss() argument
105 __a[0] -= __b[0]; in _mm_sub_ss()
106 return __a; in _mm_sub_ss()
124 _mm_sub_ps(__m128 __a, __m128 __b) in _mm_sub_ps() argument
126 return (__m128)((__v4sf)__a - (__v4sf)__b); in _mm_sub_ps()
146 _mm_mul_ss(__m128 __a, __m128 __b) in _mm_mul_ss() argument
148 __a[0] *= __b[0]; in _mm_mul_ss()
149 return __a; in _mm_mul_ss()
166 _mm_mul_ps(__m128 __a, __m128 __b) in _mm_mul_ps() argument
168 return (__m128)((__v4sf)__a * (__v4sf)__b); in _mm_mul_ps()
188 _mm_div_ss(__m128 __a, __m128 __b) in _mm_div_ss() argument
190 __a[0] /= __b[0]; in _mm_div_ss()
191 return __a; in _mm_div_ss()
207 _mm_div_ps(__m128 __a, __m128 __b) in _mm_div_ps() argument
209 return (__m128)((__v4sf)__a / (__v4sf)__b); in _mm_div_ps()
225 _mm_sqrt_ss(__m128 __a) in _mm_sqrt_ss() argument
227 __m128 __c = __builtin_ia32_sqrtss((__v4sf)__a); in _mm_sqrt_ss()
228 return (__m128) { __c[0], __a[1], __a[2], __a[3] }; in _mm_sqrt_ss()
243 _mm_sqrt_ps(__m128 __a) in _mm_sqrt_ps() argument
245 return __builtin_ia32_sqrtps((__v4sf)__a); in _mm_sqrt_ps()
261 _mm_rcp_ss(__m128 __a) in _mm_rcp_ss() argument
263 __m128 __c = __builtin_ia32_rcpss((__v4sf)__a); in _mm_rcp_ss()
264 return (__m128) { __c[0], __a[1], __a[2], __a[3] }; in _mm_rcp_ss()
279 _mm_rcp_ps(__m128 __a) in _mm_rcp_ps() argument
281 return __builtin_ia32_rcpps((__v4sf)__a); in _mm_rcp_ps()
298 _mm_rsqrt_ss(__m128 __a) in _mm_rsqrt_ss() argument
300 __m128 __c = __builtin_ia32_rsqrtss((__v4sf)__a); in _mm_rsqrt_ss()
301 return (__m128) { __c[0], __a[1], __a[2], __a[3] }; in _mm_rsqrt_ss()
316 _mm_rsqrt_ps(__m128 __a) in _mm_rsqrt_ps() argument
318 return __builtin_ia32_rsqrtps((__v4sf)__a); in _mm_rsqrt_ps()
339 _mm_min_ss(__m128 __a, __m128 __b) in _mm_min_ss() argument
341 return __builtin_ia32_minss((__v4sf)__a, (__v4sf)__b); in _mm_min_ss()
358 _mm_min_ps(__m128 __a, __m128 __b) in _mm_min_ps() argument
360 return __builtin_ia32_minps((__v4sf)__a, (__v4sf)__b); in _mm_min_ps()
381 _mm_max_ss(__m128 __a, __m128 __b) in _mm_max_ss() argument
383 return __builtin_ia32_maxss((__v4sf)__a, (__v4sf)__b); in _mm_max_ss()
400 _mm_max_ps(__m128 __a, __m128 __b) in _mm_max_ps() argument
402 return __builtin_ia32_maxps((__v4sf)__a, (__v4sf)__b); in _mm_max_ps()
418 _mm_and_ps(__m128 __a, __m128 __b) in _mm_and_ps() argument
420 return (__m128)((__v4su)__a & (__v4su)__b); in _mm_and_ps()
440 _mm_andnot_ps(__m128 __a, __m128 __b) in _mm_andnot_ps() argument
442 return (__m128)(~(__v4su)__a & (__v4su)__b); in _mm_andnot_ps()
458 _mm_or_ps(__m128 __a, __m128 __b) in _mm_or_ps() argument
460 return (__m128)((__v4su)__a | (__v4su)__b); in _mm_or_ps()
477 _mm_xor_ps(__m128 __a, __m128 __b) in _mm_xor_ps() argument
479 return (__m128)((__v4su)__a ^ (__v4su)__b); in _mm_xor_ps()
499 _mm_cmpeq_ss(__m128 __a, __m128 __b) in _mm_cmpeq_ss() argument
501 return (__m128)__builtin_ia32_cmpeqss((__v4sf)__a, (__v4sf)__b); in _mm_cmpeq_ss()
517 _mm_cmpeq_ps(__m128 __a, __m128 __b) in _mm_cmpeq_ps() argument
519 return (__m128)__builtin_ia32_cmpeqps((__v4sf)__a, (__v4sf)__b); in _mm_cmpeq_ps()
540 _mm_cmplt_ss(__m128 __a, __m128 __b) in _mm_cmplt_ss() argument
542 return (__m128)__builtin_ia32_cmpltss((__v4sf)__a, (__v4sf)__b); in _mm_cmplt_ss()
559 _mm_cmplt_ps(__m128 __a, __m128 __b) in _mm_cmplt_ps() argument
561 return (__m128)__builtin_ia32_cmpltps((__v4sf)__a, (__v4sf)__b); in _mm_cmplt_ps()
583 _mm_cmple_ss(__m128 __a, __m128 __b) in _mm_cmple_ss() argument
585 return (__m128)__builtin_ia32_cmpless((__v4sf)__a, (__v4sf)__b); in _mm_cmple_ss()
602 _mm_cmple_ps(__m128 __a, __m128 __b) in _mm_cmple_ps() argument
604 return (__m128)__builtin_ia32_cmpleps((__v4sf)__a, (__v4sf)__b); in _mm_cmple_ps()
625 _mm_cmpgt_ss(__m128 __a, __m128 __b) in _mm_cmpgt_ss() argument
627 return (__m128)__builtin_shufflevector((__v4sf)__a, in _mm_cmpgt_ss()
628 (__v4sf)__builtin_ia32_cmpltss((__v4sf)__b, (__v4sf)__a), in _mm_cmpgt_ss()
646 _mm_cmpgt_ps(__m128 __a, __m128 __b) in _mm_cmpgt_ps() argument
648 return (__m128)__builtin_ia32_cmpltps((__v4sf)__b, (__v4sf)__a); in _mm_cmpgt_ps()
670 _mm_cmpge_ss(__m128 __a, __m128 __b) in _mm_cmpge_ss() argument
672 return (__m128)__builtin_shufflevector((__v4sf)__a, in _mm_cmpge_ss()
673 (__v4sf)__builtin_ia32_cmpless((__v4sf)__b, (__v4sf)__a), in _mm_cmpge_ss()
691 _mm_cmpge_ps(__m128 __a, __m128 __b) in _mm_cmpge_ps() argument
693 return (__m128)__builtin_ia32_cmpleps((__v4sf)__b, (__v4sf)__a); in _mm_cmpge_ps()
713 _mm_cmpneq_ss(__m128 __a, __m128 __b) in _mm_cmpneq_ss() argument
715 return (__m128)__builtin_ia32_cmpneqss((__v4sf)__a, (__v4sf)__b); in _mm_cmpneq_ss()
731 _mm_cmpneq_ps(__m128 __a, __m128 __b) in _mm_cmpneq_ps() argument
733 return (__m128)__builtin_ia32_cmpneqps((__v4sf)__a, (__v4sf)__b); in _mm_cmpneq_ps()
754 _mm_cmpnlt_ss(__m128 __a, __m128 __b) in _mm_cmpnlt_ss() argument
756 return (__m128)__builtin_ia32_cmpnltss((__v4sf)__a, (__v4sf)__b); in _mm_cmpnlt_ss()
773 _mm_cmpnlt_ps(__m128 __a, __m128 __b) in _mm_cmpnlt_ps() argument
775 return (__m128)__builtin_ia32_cmpnltps((__v4sf)__a, (__v4sf)__b); in _mm_cmpnlt_ps()
797 _mm_cmpnle_ss(__m128 __a, __m128 __b) in _mm_cmpnle_ss() argument
799 return (__m128)__builtin_ia32_cmpnless((__v4sf)__a, (__v4sf)__b); in _mm_cmpnle_ss()
816 _mm_cmpnle_ps(__m128 __a, __m128 __b) in _mm_cmpnle_ps() argument
818 return (__m128)__builtin_ia32_cmpnleps((__v4sf)__a, (__v4sf)__b); in _mm_cmpnle_ps()
840 _mm_cmpngt_ss(__m128 __a, __m128 __b) in _mm_cmpngt_ss() argument
842 return (__m128)__builtin_shufflevector((__v4sf)__a, in _mm_cmpngt_ss()
843 (__v4sf)__builtin_ia32_cmpnltss((__v4sf)__b, (__v4sf)__a), in _mm_cmpngt_ss()
861 _mm_cmpngt_ps(__m128 __a, __m128 __b) in _mm_cmpngt_ps() argument
863 return (__m128)__builtin_ia32_cmpnltps((__v4sf)__b, (__v4sf)__a); in _mm_cmpngt_ps()
885 _mm_cmpnge_ss(__m128 __a, __m128 __b) in _mm_cmpnge_ss() argument
887 return (__m128)__builtin_shufflevector((__v4sf)__a, in _mm_cmpnge_ss()
888 (__v4sf)__builtin_ia32_cmpnless((__v4sf)__b, (__v4sf)__a), in _mm_cmpnge_ss()
906 _mm_cmpnge_ps(__m128 __a, __m128 __b) in _mm_cmpnge_ps() argument
908 return (__m128)__builtin_ia32_cmpnleps((__v4sf)__b, (__v4sf)__a); in _mm_cmpnge_ps()
930 _mm_cmpord_ss(__m128 __a, __m128 __b) in _mm_cmpord_ss() argument
932 return (__m128)__builtin_ia32_cmpordss((__v4sf)__a, (__v4sf)__b); in _mm_cmpord_ss()
949 _mm_cmpord_ps(__m128 __a, __m128 __b) in _mm_cmpord_ps() argument
951 return (__m128)__builtin_ia32_cmpordps((__v4sf)__a, (__v4sf)__b); in _mm_cmpord_ps()
973 _mm_cmpunord_ss(__m128 __a, __m128 __b) in _mm_cmpunord_ss() argument
975 return (__m128)__builtin_ia32_cmpunordss((__v4sf)__a, (__v4sf)__b); in _mm_cmpunord_ss()
992 _mm_cmpunord_ps(__m128 __a, __m128 __b) in _mm_cmpunord_ps() argument
994 return (__m128)__builtin_ia32_cmpunordps((__v4sf)__a, (__v4sf)__b); in _mm_cmpunord_ps()
1012 _mm_comieq_ss(__m128 __a, __m128 __b) in _mm_comieq_ss() argument
1014 return __builtin_ia32_comieq((__v4sf)__a, (__v4sf)__b); in _mm_comieq_ss()
1033 _mm_comilt_ss(__m128 __a, __m128 __b) in _mm_comilt_ss() argument
1035 return __builtin_ia32_comilt((__v4sf)__a, (__v4sf)__b); in _mm_comilt_ss()
1054 _mm_comile_ss(__m128 __a, __m128 __b) in _mm_comile_ss() argument
1056 return __builtin_ia32_comile((__v4sf)__a, (__v4sf)__b); in _mm_comile_ss()
1075 _mm_comigt_ss(__m128 __a, __m128 __b) in _mm_comigt_ss() argument
1077 return __builtin_ia32_comigt((__v4sf)__a, (__v4sf)__b); in _mm_comigt_ss()
1096 _mm_comige_ss(__m128 __a, __m128 __b) in _mm_comige_ss() argument
1098 return __builtin_ia32_comige((__v4sf)__a, (__v4sf)__b); in _mm_comige_ss()
1117 _mm_comineq_ss(__m128 __a, __m128 __b) in _mm_comineq_ss() argument
1119 return __builtin_ia32_comineq((__v4sf)__a, (__v4sf)__b); in _mm_comineq_ss()
1138 _mm_ucomieq_ss(__m128 __a, __m128 __b) in _mm_ucomieq_ss() argument
1140 return __builtin_ia32_ucomieq((__v4sf)__a, (__v4sf)__b); in _mm_ucomieq_ss()
1159 _mm_ucomilt_ss(__m128 __a, __m128 __b) in _mm_ucomilt_ss() argument
1161 return __builtin_ia32_ucomilt((__v4sf)__a, (__v4sf)__b); in _mm_ucomilt_ss()
1181 _mm_ucomile_ss(__m128 __a, __m128 __b) in _mm_ucomile_ss() argument
1183 return __builtin_ia32_ucomile((__v4sf)__a, (__v4sf)__b); in _mm_ucomile_ss()
1203 _mm_ucomigt_ss(__m128 __a, __m128 __b) in _mm_ucomigt_ss() argument
1205 return __builtin_ia32_ucomigt((__v4sf)__a, (__v4sf)__b); in _mm_ucomigt_ss()
1225 _mm_ucomige_ss(__m128 __a, __m128 __b) in _mm_ucomige_ss() argument
1227 return __builtin_ia32_ucomige((__v4sf)__a, (__v4sf)__b); in _mm_ucomige_ss()
1246 _mm_ucomineq_ss(__m128 __a, __m128 __b) in _mm_ucomineq_ss() argument
1248 return __builtin_ia32_ucomineq((__v4sf)__a, (__v4sf)__b); in _mm_ucomineq_ss()
1263 _mm_cvtss_si32(__m128 __a) in _mm_cvtss_si32() argument
1265 return __builtin_ia32_cvtss2si((__v4sf)__a); in _mm_cvtss_si32()
1280 _mm_cvt_ss2si(__m128 __a) in _mm_cvt_ss2si() argument
1282 return _mm_cvtss_si32(__a); in _mm_cvt_ss2si()
1299 _mm_cvtss_si64(__m128 __a) in _mm_cvtss_si64() argument
1301 return __builtin_ia32_cvtss2si64((__v4sf)__a); in _mm_cvtss_si64()
1317 _mm_cvtps_pi32(__m128 __a) in _mm_cvtps_pi32() argument
1319 return (__m64)__builtin_ia32_cvtps2pi((__v4sf)__a); in _mm_cvtps_pi32()
1333 _mm_cvt_ps2pi(__m128 __a) in _mm_cvt_ps2pi() argument
1335 return _mm_cvtps_pi32(__a); in _mm_cvt_ps2pi()
1351 _mm_cvttss_si32(__m128 __a) in _mm_cvttss_si32() argument
1353 return __a[0]; in _mm_cvttss_si32()
1369 _mm_cvtt_ss2si(__m128 __a) in _mm_cvtt_ss2si() argument
1371 return _mm_cvttss_si32(__a); in _mm_cvtt_ss2si()
1387 _mm_cvttss_si64(__m128 __a) in _mm_cvttss_si64() argument
1389 return __a[0]; in _mm_cvttss_si64()
1404 _mm_cvttps_pi32(__m128 __a) in _mm_cvttps_pi32() argument
1406 return (__m64)__builtin_ia32_cvttps2pi((__v4sf)__a); in _mm_cvttps_pi32()
1421 _mm_cvtt_ps2pi(__m128 __a) in _mm_cvtt_ps2pi() argument
1423 return _mm_cvttps_pi32(__a); in _mm_cvtt_ps2pi()
1443 _mm_cvtsi32_ss(__m128 __a, int __b) in _mm_cvtsi32_ss() argument
1445 __a[0] = __b; in _mm_cvtsi32_ss()
1446 return __a; in _mm_cvtsi32_ss()
1466 _mm_cvt_si2ss(__m128 __a, int __b) in _mm_cvt_si2ss() argument
1468 return _mm_cvtsi32_ss(__a, __b); in _mm_cvt_si2ss()
1490 _mm_cvtsi64_ss(__m128 __a, long long __b) in _mm_cvtsi64_ss() argument
1492 __a[0] = __b; in _mm_cvtsi64_ss()
1493 return __a; in _mm_cvtsi64_ss()
1516 _mm_cvtpi32_ps(__m128 __a, __m64 __b) in _mm_cvtpi32_ps() argument
1518 return __builtin_ia32_cvtpi2ps((__v4sf)__a, (__v2si)__b); in _mm_cvtpi32_ps()
1539 _mm_cvt_pi2ps(__m128 __a, __m64 __b) in _mm_cvt_pi2ps() argument
1541 return _mm_cvtpi32_ps(__a, __b); in _mm_cvt_pi2ps()
1556 _mm_cvtss_f32(__m128 __a) in _mm_cvtss_f32() argument
1558 return __a[0]; in _mm_cvtss_f32()
1577 _mm_loadh_pi(__m128 __a, const __m64 *__p) in _mm_loadh_pi() argument
1585 return __builtin_shufflevector(__a, __bb, 0, 1, 4, 5); in _mm_loadh_pi()
1604 _mm_loadl_pi(__m128 __a, const __m64 *__p) in _mm_loadl_pi() argument
1612 return __builtin_shufflevector(__a, __bb, 4, 5, 2, 3); in _mm_loadl_pi()
1717 __m128 __a = _mm_load_ps(__p); in _mm_loadr_ps() local
1718 return __builtin_shufflevector((__v4sf)__a, (__v4sf)__a, 3, 2, 1, 0); in _mm_loadr_ps()
1874 _mm_storeh_pi(__m64 *__p, __m128 __a) in _mm_storeh_pi() argument
1876 __builtin_ia32_storehps((__v2si *)__p, (__v4sf)__a); in _mm_storeh_pi()
1891 _mm_storel_pi(__m64 *__p, __m128 __a) in _mm_storel_pi() argument
1893 __builtin_ia32_storelps((__v2si *)__p, (__v4sf)__a); in _mm_storel_pi()
1908 _mm_store_ss(float *__p, __m128 __a) in _mm_store_ss() argument
1913 ((struct __mm_store_ss_struct*)__p)->__u = __a[0]; in _mm_store_ss()
1929 _mm_storeu_ps(float *__p, __m128 __a) in _mm_storeu_ps() argument
1934 ((struct __storeu_ps*)__p)->__v = __a; in _mm_storeu_ps()
1951 _mm_store_ps(float *__p, __m128 __a) in _mm_store_ps() argument
1953 *(__m128*)__p = __a; in _mm_store_ps()
1970 _mm_store1_ps(float *__p, __m128 __a) in _mm_store1_ps() argument
1972 __a = __builtin_shufflevector((__v4sf)__a, (__v4sf)__a, 0, 0, 0, 0); in _mm_store1_ps()
1973 _mm_store_ps(__p, __a); in _mm_store1_ps()
1989 _mm_store_ps1(float *__p, __m128 __a) in _mm_store_ps1() argument
1991 return _mm_store1_ps(__p, __a); in _mm_store_ps1()
2008 _mm_storer_ps(float *__p, __m128 __a) in _mm_storer_ps() argument
2010 __a = __builtin_shufflevector((__v4sf)__a, (__v4sf)__a, 3, 2, 1, 0); in _mm_storer_ps()
2011 _mm_store_ps(__p, __a); in _mm_storer_ps()
2062 _mm_stream_pi(__m64 *__p, __m64 __a) in _mm_stream_pi() argument
2064 __builtin_ia32_movntq(__p, __a); in _mm_stream_pi()
2081 _mm_stream_ps(float *__p, __m128 __a) in _mm_stream_ps() argument
2083 __builtin_nontemporal_store((__v4sf)__a, (__v4sf*)__p); in _mm_stream_ps()
2161 _mm_max_pi16(__m64 __a, __m64 __b) in _mm_max_pi16() argument
2163 return (__m64)__builtin_ia32_pmaxsw((__v4hi)__a, (__v4hi)__b); in _mm_max_pi16()
2180 _mm_max_pu8(__m64 __a, __m64 __b) in _mm_max_pu8() argument
2182 return (__m64)__builtin_ia32_pmaxub((__v8qi)__a, (__v8qi)__b); in _mm_max_pu8()
2199 _mm_min_pi16(__m64 __a, __m64 __b) in _mm_min_pi16() argument
2201 return (__m64)__builtin_ia32_pminsw((__v4hi)__a, (__v4hi)__b); in _mm_min_pi16()
2218 _mm_min_pu8(__m64 __a, __m64 __b) in _mm_min_pu8() argument
2220 return (__m64)__builtin_ia32_pminub((__v8qi)__a, (__v8qi)__b); in _mm_min_pu8()
2236 _mm_movemask_pi8(__m64 __a) in _mm_movemask_pi8() argument
2238 return __builtin_ia32_pmovmskb((__v8qi)__a); in _mm_movemask_pi8()
2255 _mm_mulhi_pu16(__m64 __a, __m64 __b) in _mm_mulhi_pu16() argument
2257 return (__m64)__builtin_ia32_pmulhuw((__v4hi)__a, (__v4hi)__b); in _mm_mulhi_pu16()
2331 _mm_avg_pu8(__m64 __a, __m64 __b) in _mm_avg_pu8() argument
2333 return (__m64)__builtin_ia32_pavgb((__v8qi)__a, (__v8qi)__b); in _mm_avg_pu8()
2350 _mm_avg_pu16(__m64 __a, __m64 __b) in _mm_avg_pu16() argument
2352 return (__m64)__builtin_ia32_pavgw((__v4hi)__a, (__v4hi)__b); in _mm_avg_pu16()
2372 _mm_sad_pu8(__m64 __a, __m64 __b) in _mm_sad_pu8() argument
2374 return (__m64)__builtin_ia32_psadbw((__v8qi)__a, (__v8qi)__b); in _mm_sad_pu8()
2513 _mm_unpackhi_ps(__m128 __a, __m128 __b) in _mm_unpackhi_ps() argument
2515 return __builtin_shufflevector((__v4sf)__a, (__v4sf)__b, 2, 6, 3, 7); in _mm_unpackhi_ps()
2536 _mm_unpacklo_ps(__m128 __a, __m128 __b) in _mm_unpacklo_ps() argument
2538 return __builtin_shufflevector((__v4sf)__a, (__v4sf)__b, 0, 4, 1, 5); in _mm_unpacklo_ps()
2557 _mm_move_ss(__m128 __a, __m128 __b) in _mm_move_ss() argument
2559 return __builtin_shufflevector((__v4sf)__a, (__v4sf)__b, 4, 1, 2, 3); in _mm_move_ss()
2578 _mm_movehl_ps(__m128 __a, __m128 __b) in _mm_movehl_ps() argument
2580 return __builtin_shufflevector((__v4sf)__a, (__v4sf)__b, 6, 7, 2, 3); in _mm_movehl_ps()
2599 _mm_movelh_ps(__m128 __a, __m128 __b) in _mm_movelh_ps() argument
2601 return __builtin_shufflevector((__v4sf)__a, (__v4sf)__b, 0, 1, 4, 5); in _mm_movelh_ps()
2617 _mm_cvtpi16_ps(__m64 __a) in _mm_cvtpi16_ps() argument
2623 __b = _mm_cmpgt_pi16(__b, __a); in _mm_cvtpi16_ps()
2624 __c = _mm_unpackhi_pi16(__a, __b); in _mm_cvtpi16_ps()
2628 __c = _mm_unpacklo_pi16(__a, __b); in _mm_cvtpi16_ps()
2647 _mm_cvtpu16_ps(__m64 __a) in _mm_cvtpu16_ps() argument
2653 __c = _mm_unpackhi_pi16(__a, __b); in _mm_cvtpu16_ps()
2657 __c = _mm_unpacklo_pi16(__a, __b); in _mm_cvtpu16_ps()
2676 _mm_cvtpi8_ps(__m64 __a) in _mm_cvtpi8_ps() argument
2681 __b = _mm_cmpgt_pi8(__b, __a); in _mm_cvtpi8_ps()
2682 __b = _mm_unpacklo_pi8(__a, __b); in _mm_cvtpi8_ps()
2701 _mm_cvtpu8_ps(__m64 __a) in _mm_cvtpu8_ps() argument
2706 __b = _mm_unpacklo_pi8(__a, __b); in _mm_cvtpu8_ps()
2728 _mm_cvtpi32x2_ps(__m64 __a, __m64 __b) in _mm_cvtpi32x2_ps() argument
2736 return _mm_cvtpi32_ps(__c, __a); in _mm_cvtpi32x2_ps()
2756 _mm_cvtps_pi16(__m128 __a) in _mm_cvtps_pi16() argument
2760 __b = _mm_cvtps_pi32(__a); in _mm_cvtps_pi16()
2761 __a = _mm_movehl_ps(__a, __a); in _mm_cvtps_pi16()
2762 __c = _mm_cvtps_pi32(__a); in _mm_cvtps_pi16()
2785 _mm_cvtps_pi8(__m128 __a) in _mm_cvtps_pi8() argument
2789 __b = _mm_cvtps_pi16(__a); in _mm_cvtps_pi8()
2810 _mm_movemask_ps(__m128 __a) in _mm_movemask_ps() argument
2812 return __builtin_ia32_movmskps((__v4sf)__a); in _mm_movemask_ps()