Searched refs:xmm_tmp2 (Results 1 – 6 of 6) sorted by relevance
/external/libopus/silk/fixed/x86/ |
D | burg_modified_FIX_sse4_1.c | 214 __m128i xmm_tmp1, xmm_tmp2; in silk_burg_modified_sse4_1() local 219 xmm_tmp2 = _mm_set1_epi32( tmp2 ); in silk_burg_modified_sse4_1() 236 xmm_x_ptr_sub_x2x0 = _mm_mul_epi32( xmm_x_ptr_sub_x2x0, xmm_tmp2 ); in silk_burg_modified_sse4_1() 237 xmm_x_ptr_sub_x3x1 = _mm_mul_epi32( xmm_x_ptr_sub_x3x1, xmm_tmp2 ); in silk_burg_modified_sse4_1()
|
/external/angle/third_party/zlib/ |
D | crc_folding.c | 235 __m128i xmm_shl, xmm_shr, xmm_tmp1, xmm_tmp2, xmm_tmp3; in partial_fold() local 250 xmm_tmp2 = _mm_shuffle_epi8(*xmm_crc2, xmm_shl); in partial_fold() 251 *xmm_crc1 = _mm_or_si128(*xmm_crc1, xmm_tmp2); in partial_fold()
|
/external/rust/crates/libz-sys/src/zlib-ng/arch/x86/ |
D | crc_folding.c | 191 __m128i xmm_shl, xmm_shr, xmm_tmp1, xmm_tmp2, xmm_tmp3; in partial_fold() local 206 xmm_tmp2 = _mm_shuffle_epi8(*xmm_crc2, xmm_shl); in partial_fold() 207 *xmm_crc1 = _mm_or_si128(*xmm_crc1, xmm_tmp2); in partial_fold()
|
/external/zlib/ |
D | crc_folding.c | 235 __m128i xmm_shl, xmm_shr, xmm_tmp1, xmm_tmp2, xmm_tmp3; in partial_fold() local 250 xmm_tmp2 = _mm_shuffle_epi8(*xmm_crc2, xmm_shl); in partial_fold() 251 *xmm_crc1 = _mm_or_si128(*xmm_crc1, xmm_tmp2); in partial_fold()
|
/external/zlib/patches/ |
D | 0001-simd.patch | 281 + __m128i xmm_shl, xmm_shr, xmm_tmp1, xmm_tmp2, xmm_tmp3; 296 + xmm_tmp2 = _mm_shuffle_epi8(*xmm_crc2, xmm_shl); 297 + *xmm_crc1 = _mm_or_si128(*xmm_crc1, xmm_tmp2);
|
/external/angle/third_party/zlib/patches/ |
D | 0001-simd.patch | 281 + __m128i xmm_shl, xmm_shr, xmm_tmp1, xmm_tmp2, xmm_tmp3; 296 + xmm_tmp2 = _mm_shuffle_epi8(*xmm_crc2, xmm_shl); 297 + *xmm_crc1 = _mm_or_si128(*xmm_crc1, xmm_tmp2);
|