Searched refs:_mm_load_sd (Results 1 – 13 of 13) sorted by relevance
82 __m128d m = _mm_load_sd(&x); in _mesa_roundeven()121 return _mm_cvtsd_si64(_mm_load_sd(&x)); in _mesa_lroundeven()123 return _mm_cvtsd_si32(_mm_load_sd(&x)); in _mesa_lroundeven()
60 const __m128 vx = _mm_castpd_ps(_mm_load_sd((const double*) x)); in xnn_x32_zip_x3_ukernel__sse2()63 const __m128 vy = _mm_castpd_ps(_mm_load_sd((const double*) y)); in xnn_x32_zip_x3_ukernel__sse2()66 const __m128 vz = _mm_castpd_ps(_mm_load_sd((const double*) z)); in xnn_x32_zip_x3_ukernel__sse2()
237 _mm_castpd_si128(_mm_load_sd((double *)(&sig[0]))); in vp8_denoiser_filter_uv_sse2()241 _mm_castpd_si128(_mm_load_sd((double *)(&mc_running_avg[0]))); in vp8_denoiser_filter_uv_sse2()319 _mm_castpd_si128(_mm_load_sd((double *)(&sig[0]))); in vp8_denoiser_filter_uv_sse2()323 _mm_castpd_si128(_mm_load_sd((double *)(&mc_running_avg[0]))); in vp8_denoiser_filter_uv_sse2()336 _mm_castpd_si128(_mm_load_sd((double *)(&running_avg[0]))); in vp8_denoiser_filter_uv_sse2()
1042 v0r = _mm_castpd_ps(_mm_load_sd((double *)v0[0])); in calc_fixed_position()1044 v1r = _mm_castpd_ps(_mm_load_sd((double *)v1[0])); in calc_fixed_position()
1347 resd = _mm_load_sd(dp); in test_load()
347 …return vec4f_swizzle1(_mm_castpd_ps(_mm_load_sd(reinterpret_cast<const double*>(from))), 0, 0, 1, …841 return _mm_move_sd(a, _mm_load_sd(&b));
83 __m128 vLo = _mm_castpd_ps(_mm_load_sd((double*)pSrc));203 __m128 vLo = _mm_castpd_ps(_mm_load_sd((double*)pSrc));
644 return _mm_load_sd(A); in test_mm_load_sd()
649 return _mm_load_sd(A); in test_mm_load_sd()
1724 _mm_load_sd(double const *__dp) in _mm_load_sd() function
519 _mm_load_sd(double const *__dp) in _mm_load_sd() function
149 _mm_load_sd (double const *__P) in _mm_load_sd() function
880 FORCE_INLINE __m128d _mm_load_sd(const double *p) in _mm_load_sd() function