Searched refs:_mm_loadu_si128 (Results 1 – 15 of 15) sorted by relevance
71 const __m128i out0 = _mm_loadu_si128((__m128i*)&out[0]); in CollectHistogramSSE2()72 const __m128i out1 = _mm_loadu_si128((__m128i*)&out[8]); in CollectHistogramSSE2()468 const __m128i a0 = _mm_loadu_si128((__m128i*)&a[BPS * 0]); in SSE_Nx4SSE2()469 const __m128i a1 = _mm_loadu_si128((__m128i*)&a[BPS * 1]); in SSE_Nx4SSE2()470 const __m128i a2 = _mm_loadu_si128((__m128i*)&a[BPS * 2]); in SSE_Nx4SSE2()471 const __m128i a3 = _mm_loadu_si128((__m128i*)&a[BPS * 3]); in SSE_Nx4SSE2()472 const __m128i b0 = _mm_loadu_si128((__m128i*)&b[BPS * 0]); in SSE_Nx4SSE2()473 const __m128i b1 = _mm_loadu_si128((__m128i*)&b[BPS * 1]); in SSE_Nx4SSE2()474 const __m128i b2 = _mm_loadu_si128((__m128i*)&b[BPS * 2]); in SSE_Nx4SSE2()475 const __m128i b3 = _mm_loadu_si128((__m128i*)&b[BPS * 3]); in SSE_Nx4SSE2()[all …]
65 const __m128i a = _mm_loadu_si128((__m128i*)&(r1)[0]); \66 const __m128i b = _mm_loadu_si128((__m128i*)&(r1)[1]); \67 const __m128i c = _mm_loadu_si128((__m128i*)&(r2)[0]); \68 const __m128i d = _mm_loadu_si128((__m128i*)&(r2)[1]); \
599 __m128i p1 = _mm_loadu_si128((__m128i*)&p[-2 * stride]); in SimpleVFilter16SSE2()600 __m128i p0 = _mm_loadu_si128((__m128i*)&p[-stride]); in SimpleVFilter16SSE2()601 __m128i q0 = _mm_loadu_si128((__m128i*)&p[0]); in SimpleVFilter16SSE2()602 __m128i q1 = _mm_loadu_si128((__m128i*)&p[stride]); in SimpleVFilter16SSE2()653 e1 = _mm_loadu_si128((__m128i*)&(p)[0 * stride]); \654 e2 = _mm_loadu_si128((__m128i*)&(p)[1 * stride]); \655 e3 = _mm_loadu_si128((__m128i*)&(p)[2 * stride]); \656 e4 = _mm_loadu_si128((__m128i*)&(p)[3 * stride]); \
722 const __m128i in = _mm_loadu_si128((__m128i*)&argb_data[i]); in VP8LSubtractGreenFromBlueAndRed()750 const __m128i in = _mm_loadu_si128((__m128i*)data); in AddGreenToBlueAndRed()
269 m_XMM0 = _mm_loadu_si128( (__m128i *)&0[vec1L] ); in bbs_dotProduct_u128SSE2()272 m_XMM2 = _mm_loadu_si128( (__m128i *)&0[vec2L] ); in bbs_dotProduct_u128SSE2()274 m_XMM6 = _mm_loadu_si128( (__m128i *)&8[vec1L] ); in bbs_dotProduct_u128SSE2()280 m_XMM3 = _mm_loadu_si128( (__m128i *)&8[vec2L] ); in bbs_dotProduct_u128SSE2()292 m_XMM0 = _mm_loadu_si128( (__m128i *)&m_XMM5 ); in bbs_dotProduct_u128SSE2()
52 const __m128i v_sig = _mm_loadu_si128((__m128i *)(&sig[0])); in vp8_denoiser_filter_sse2()53 const __m128i v_mc_running_avg_y = _mm_loadu_si128( in vp8_denoiser_filter_sse2()
48 __m128i src_pixel = _mm_loadu_si128(s); in S32_Blend_BlitRow32_SSE2()136 __m128i src_pixel = _mm_loadu_si128(s); in S32A_Opaque_BlitRow32_SSE2()184 __m128i src_pixel = _mm_loadu_si128(s); in S32A_Opaque_BlitRow32_SSE2()261 __m128i src_pixel = _mm_loadu_si128(s); in S32A_Blend_BlitRow32_SSE2()376 __m128i src_pixel = _mm_loadu_si128(s); in Color32_SSE2()
49 const __m128i xx = _mm_loadu_si128(reinterpret_cast<const __m128i *>(xy)); in PrepareConstantsTwoPixelPairs()
49 __m128i src_pixel = _mm_loadu_si128(s); in S32_Blend_BlitRow32_SSE2()137 __m128i src_pixel = _mm_loadu_si128(s); in S32A_Opaque_BlitRow32_SSE2()185 __m128i src_pixel = _mm_loadu_si128(s); in S32A_Opaque_BlitRow32_SSE2()262 __m128i src_pixel = _mm_loadu_si128(s); in S32A_Blend_BlitRow32_SSE2()377 __m128i src_pixel = _mm_loadu_si128(s); in Color32_SSE2()
49 const __m128i xx = _mm_loadu_si128(reinterpret_cast<const __m128i *>(xy)); in PrepareConstantsTwoPixelPairs()95 _mm_loadu_si128(reinterpret_cast<const __m128i *>(xy)); in PrepareConstantsTwoPixelPairsDXDY()
222 …du<Packet4i>(const int* from) { EIGEN_DEBUG_UNALIGNED_LOAD return _mm_loadu_si128(reinterpret_c…269 return _mm_loadu_si128(reinterpret_cast<const Packet4i*>(from));
571 _mm_loadu_si128(reinterpret_cast<const __m128i *>(current)); in FindColonsAndParseIntoKeyValue()1027 _mm_loadu_si128(const_cast<__m128i *>( in ProcessHeaders()
267 __m128i rv = _mm_loadu_si128((const __m128i*)(rptr + d)); in icvFindStereoCorrespondenceBM_SSE2()
1061 _mm_loadu_si128(__m128i const *__p) in _mm_loadu_si128() function
17 _mm_loadu_si128((__m128 *) (___m128i_shift_right + offset)));