Lines Matching refs:vreinterpretq_m128_f32
163 #define vreinterpretq_m128_f32(x) (x) macro
433 return vreinterpretq_m128_f32(vdupq_n_f32(0)); in _mm_setzero_ps()
443 return vreinterpretq_m128_f32(vdupq_n_f32(_w)); in _mm_set1_ps()
450 return vreinterpretq_m128_f32(vdupq_n_f32(_w)); in _mm_set_ps1()
458 return vreinterpretq_m128_f32(vld1q_f32(data)); in _mm_set_ps()
467 return vreinterpretq_m128_f32(vld1q_f32(data)); in _mm_set_ss()
476 return vreinterpretq_m128_f32(vld1q_f32(data)); in _mm_setr_ps()
769 return vreinterpretq_m128_f32(vld1q_dup_f32(p)); in _mm_load1_ps()
796 return vreinterpretq_m128_f32( in _mm_loadl_pi()
813 return vreinterpretq_m128_f32(vextq_f32(v, v, 2)); in _mm_loadr_ps()
828 return vreinterpretq_m128_f32( in _mm_loadh_pi()
836 return vreinterpretq_m128_f32(vld1q_f32(p)); in _mm_load_ps()
845 return vreinterpretq_m128_f32(vld1q_f32(p)); in _mm_loadu_ps()
920 return vreinterpretq_m128_f32(vsetq_lane_f32(*p, vdupq_n_f32(0), 0)); in _mm_load_ss()
975 return vreinterpretq_m128_f32( in _mm_move_ss()
1148 return vreinterpretq_m128_f32(__builtin_shufflevector( in _mm_movehdup_ps()
1154 return vreinterpretq_m128_f32(vld1q_f32(data)); in _mm_movehdup_ps()
1164 return vreinterpretq_m128_f32(__builtin_shufflevector( in _mm_moveldup_ps()
1170 return vreinterpretq_m128_f32(vld1q_f32(data)); in _mm_moveldup_ps()
1184 return vreinterpretq_m128_f32(vcombine_f32(b32, a32)); in _mm_movehl_ps()
1197 return vreinterpretq_m128_f32(vcombine_f32(a10, b10)); in _mm_movelh_ps()
1290 return vreinterpretq_m128_f32(vcombine_f32(a32, b10)); in _mm_shuffle_ps_1032()
1300 return vreinterpretq_m128_f32(vcombine_f32(a01, b23)); in _mm_shuffle_ps_2301()
1309 return vreinterpretq_m128_f32(vcombine_f32(a21, b03)); in _mm_shuffle_ps_0321()
1318 return vreinterpretq_m128_f32(vcombine_f32(a03, b21)); in _mm_shuffle_ps_2103()
1325 return vreinterpretq_m128_f32(vcombine_f32(a10, b10)); in _mm_shuffle_ps_1010()
1332 return vreinterpretq_m128_f32(vcombine_f32(a01, b10)); in _mm_shuffle_ps_1001()
1339 return vreinterpretq_m128_f32(vcombine_f32(a01, b01)); in _mm_shuffle_ps_0101()
1348 return vreinterpretq_m128_f32(vcombine_f32(a10, b32)); in _mm_shuffle_ps_3210()
1355 return vreinterpretq_m128_f32(vcombine_f32(a11, b00)); in _mm_shuffle_ps_0011()
1363 return vreinterpretq_m128_f32(vcombine_f32(a22, b00)); in _mm_shuffle_ps_0022()
1371 return vreinterpretq_m128_f32(vcombine_f32(a00, b22)); in _mm_shuffle_ps_2200()
1381 return vreinterpretq_m128_f32(vcombine_f32(a02, b32)); in _mm_shuffle_ps_3202()
1389 return vreinterpretq_m128_f32(vcombine_f32(a33, b11)); in _mm_shuffle_ps_1133()
1398 return vreinterpretq_m128_f32(vcombine_f32(a10, b20)); in _mm_shuffle_ps_2010()
1407 return vreinterpretq_m128_f32(vcombine_f32(a01, b20)); in _mm_shuffle_ps_2001()
1416 return vreinterpretq_m128_f32(vcombine_f32(a32, b20)); in _mm_shuffle_ps_2032()
1447 vreinterpretq_m128_f32(ret); \
1460 vreinterpretq_m128_f32(_shuf); \
2369 return vreinterpretq_m128_f32( in _mm_sub_ps()
2824 return vreinterpretq_m128_f32( in _mm_add_ps()
2864 return vreinterpretq_m128_f32(vaddq_f32(a, value)); in _mm_add_ss()
2995 return vreinterpretq_m128_f32( in _mm_mul_ps()
3152 return vreinterpretq_m128_f32(vfmaq_f32(vreinterpretq_f32_m128(c), in _mm_fmadd_ps()
3224 return vreinterpretq_m128_f32( in _mm_div_ps()
3230 return vreinterpretq_m128_f32(vmulq_f32(vreinterpretq_f32_m128(a), recip1)); in _mm_div_ps()
3240 return vreinterpretq_m128_f32( in _mm_div_ss()
3251 return vreinterpretq_m128_f32(recip); in _mm_rcp_ps()
3281 return vreinterpretq_m128_f32(vsqrtq_f32(vreinterpretq_f32_m128(in))); in _mm_sqrt_ps()
3286 return vreinterpretq_m128_f32(sq); in _mm_sqrt_ps()
3297 return vreinterpretq_m128_f32( in _mm_sqrt_ss()
3306 return vreinterpretq_m128_f32(vrsqrteq_f32(vreinterpretq_f32_m128(in))); in _mm_rsqrt_ps()
3355 return vreinterpretq_m128_f32( in _mm_max_ps()
3422 return vreinterpretq_m128_f32( in _mm_min_ps()
3459 return vreinterpretq_m128_f32( in _mm_max_ss()
3469 return vreinterpretq_m128_f32( in _mm_min_ss()
3611 return vreinterpretq_m128_f32( in _mm_hadd_ps()
3618 return vreinterpretq_m128_f32( in _mm_hadd_ps()
3644 return vreinterpretq_m128_f32(vsubq_f32( in _mm_hsub_ps()
3650 return vreinterpretq_m128_f32(vsubq_f32(c.val[0], c.val[1])); in _mm_hsub_ps()
3809 return vreinterpretq_m128_f32(res); in _mm_dp_ps()
4294 return vreinterpretq_m128_f32( in _mm_cvt_pi2ps()
4310 return vreinterpretq_m128_f32( in _mm_cvt_si2ss()
4347 return vreinterpretq_m128_f32( in _mm_cvtpi16_ps()
4363 return vreinterpretq_m128_f32( in _mm_cvtpi32_ps()
4382 return vreinterpretq_m128_f32(vcvtq_f32_s32( in _mm_cvtpi32x2_ps()
4398 return vreinterpretq_m128_f32(vcvtq_f32_s32( in _mm_cvtpi8_ps()
4414 return vreinterpretq_m128_f32( in _mm_cvtpu16_ps()
4431 return vreinterpretq_m128_f32(vcvtq_f32_u32( in _mm_cvtpu8_ps()
4448 return vreinterpretq_m128_f32(vcvtq_f32_s32(vreinterpretq_s32_m128i(a))); in _mm_cvtepi32_ps()
4769 return vreinterpretq_m128_f32(vcombine_f32(tmp, vdup_n_f32(0))); in _mm_cvtpd_ps()
4827 return vreinterpretq_m128_f32(vbslq_f32(vreinterpretq_u32_m128(mask), in _mm_blendv_ps()
4841 return vreinterpretq_m128_f32(vrndnq_f32(vreinterpretq_f32_m128(a))); in _mm_round_ps()
4843 return vreinterpretq_m128_f32(vrndmq_f32(vreinterpretq_f32_m128(a))); in _mm_round_ps()
4845 return vreinterpretq_m128_f32(vrndpq_f32(vreinterpretq_f32_m128(a))); in _mm_round_ps()
4847 return vreinterpretq_m128_f32(vrndq_f32(vreinterpretq_f32_m128(a))); in _mm_round_ps()
4849 return vreinterpretq_m128_f32(vrndiq_f32(vreinterpretq_f32_m128(a))); in _mm_round_ps()
5102 return vreinterpretq_m128_f32( in _mm_unpacklo_ps()
5108 return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1])); in _mm_unpacklo_ps()
5124 return vreinterpretq_m128_f32( in _mm_unpackhi_ps()
5130 return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1])); in _mm_unpackhi_ps()