Home
last modified time | relevance | path

Searched refs:_mm256_unpackhi_epi64 (Results 1 – 16 of 16) sorted by relevance

/external/libaom/libaom/aom_dsp/x86/
Dcommon_avx2.h94 tr0_1 = _mm256_unpackhi_epi64(tr1_0, tr1_4); in mm256_transpose_16x16()
96 tr0_3 = _mm256_unpackhi_epi64(tr1_1, tr1_5); in mm256_transpose_16x16()
98 tr0_5 = _mm256_unpackhi_epi64(tr1_2, tr1_6); in mm256_transpose_16x16()
100 tr0_7 = _mm256_unpackhi_epi64(tr1_3, tr1_7); in mm256_transpose_16x16()
103 tr0_9 = _mm256_unpackhi_epi64(tr1_8, tr1_c); in mm256_transpose_16x16()
105 tr0_b = _mm256_unpackhi_epi64(tr1_9, tr1_d); in mm256_transpose_16x16()
107 tr0_d = _mm256_unpackhi_epi64(tr1_a, tr1_e); in mm256_transpose_16x16()
109 tr0_f = _mm256_unpackhi_epi64(tr1_b, tr1_f); in mm256_transpose_16x16()
Dtxfm_common_avx2.h148 c[i / 2 + 8] = _mm256_unpackhi_epi64(b[i], b[i + 1]); in transpose_16bit_16x16_avx2()
192 out[1] = _mm256_unpackhi_epi64(b0, b1); in transpose_16bit_16x8_avx2()
194 out[3] = _mm256_unpackhi_epi64(b4, b5); in transpose_16bit_16x8_avx2()
196 out[5] = _mm256_unpackhi_epi64(b2, b3); in transpose_16bit_16x8_avx2()
198 out[7] = _mm256_unpackhi_epi64(b6, b7); in transpose_16bit_16x8_avx2()
Davg_intrin_avx2.c76 in[1] = _mm256_unpackhi_epi64(b0, b1); in hadamard_col8x2_avx2()
78 in[3] = _mm256_unpackhi_epi64(b2, b3); in hadamard_col8x2_avx2()
80 in[5] = _mm256_unpackhi_epi64(b4, b5); in hadamard_col8x2_avx2()
82 in[7] = _mm256_unpackhi_epi64(b6, b7); in hadamard_col8x2_avx2()
Dsad4d_avx2.c72 sum_mhigh = _mm256_unpackhi_epi64(sum_ref0, sum_ref2); in aom_sad32x32x4d_avx2()
157 sum_mhigh = _mm256_unpackhi_epi64(sum_ref0, sum_ref2); in aom_sad64x64x4d_avx2()
Dintrapred_avx2.c26 u0 = _mm256_unpackhi_epi64(y0, y0); in dc_sum_64()
36 u = _mm256_unpackhi_epi64(y, y); in dc_sum_32()
123 d[1] = _mm256_unpackhi_epi64(ww0, ww1); // 01 11 21 31 41 51 61 71 in highbd_transpose4x16_avx2()
129 d[3] = _mm256_unpackhi_epi64(ww0, ww1); // 03 13 23 33 43 53 63 73 in highbd_transpose4x16_avx2()
144 d[1] = _mm256_unpackhi_epi64(ww0, ww1); // 01 11 21 31 41 51 61 71 in highbd_transpose8x16_16x8_avx2()
150 d[3] = _mm256_unpackhi_epi64(ww0, ww1); // 03 13 23 33 43 53 63 73 in highbd_transpose8x16_16x8_avx2()
161 d[5] = _mm256_unpackhi_epi64(ww0, ww1); // 05 15 25 35 45 55 65 75 in highbd_transpose8x16_16x8_avx2()
167 d[7] = _mm256_unpackhi_epi64(ww0, ww1); // 07 17 27 37 47 57 67 77 in highbd_transpose8x16_16x8_avx2()
182 dd[1] = _mm256_unpackhi_epi64(ww0, ww1); in highbd_transpose16x16_avx2()
188 dd[3] = _mm256_unpackhi_epi64(ww0, ww1); in highbd_transpose16x16_avx2()
[all …]
Dsad_highbd_avx2.c732 u3 = _mm256_unpackhi_epi64(u0, u2); in get_4d_sad_from_mm256_epi32()
/external/libvpx/libvpx/vpx_dsp/x86/
Davg_intrin_avx2.c68 b2 = _mm256_unpackhi_epi64(a0, a1); in highbd_hadamard_col8_avx2()
69 b3 = _mm256_unpackhi_epi64(a4, a5); in highbd_hadamard_col8_avx2()
72 b6 = _mm256_unpackhi_epi64(a2, a3); in highbd_hadamard_col8_avx2()
73 b7 = _mm256_unpackhi_epi64(a6, a7); in highbd_hadamard_col8_avx2()
278 in[1] = _mm256_unpackhi_epi64(b0, b1); in hadamard_col8x2_avx2()
280 in[3] = _mm256_unpackhi_epi64(b2, b3); in hadamard_col8x2_avx2()
282 in[5] = _mm256_unpackhi_epi64(b4, b5); in hadamard_col8x2_avx2()
284 in[7] = _mm256_unpackhi_epi64(b6, b7); in hadamard_col8x2_avx2()
Dfwd_dct32x32_impl_avx2.h2833 __m256i tr2_1 = _mm256_unpackhi_epi64(tr1_0, tr1_4); in FDCT32x32_2D_AVX2()
2835 __m256i tr2_3 = _mm256_unpackhi_epi64(tr1_2, tr1_6); in FDCT32x32_2D_AVX2()
2837 __m256i tr2_5 = _mm256_unpackhi_epi64(tr1_1, tr1_5); in FDCT32x32_2D_AVX2()
2839 __m256i tr2_7 = _mm256_unpackhi_epi64(tr1_3, tr1_7); in FDCT32x32_2D_AVX2()
/external/libaom/libaom/av1/encoder/x86/
Dcorner_match_avx2.c67 __m256i unp_hig = _mm256_unpackhi_epi64(sumsq2_vec, cross_vec); in compute_cross_correlation_avx2()
Dhighbd_fwd_txfm_avx2.c168 x0 = _mm256_unpackhi_epi64(u0, u2); in av1_fwd_txfm_transpose_8x8_avx2()
169 x1 = _mm256_unpackhi_epi64(u4, u6); in av1_fwd_txfm_transpose_8x8_avx2()
178 x0 = _mm256_unpackhi_epi64(u1, u3); in av1_fwd_txfm_transpose_8x8_avx2()
179 x1 = _mm256_unpackhi_epi64(u5, u7); in av1_fwd_txfm_transpose_8x8_avx2()
/external/eigen/Eigen/src/Core/arch/CUDA/
DPacketMathHalf.h587 __m256i abcdefgh_23 = _mm256_unpackhi_epi64(abcd_03, efgh_03);
589 __m256i ijklmnop_23 = _mm256_unpackhi_epi64(ijkl_03, mnop_03);
591 __m256i abcdefgh_67 = _mm256_unpackhi_epi64(abcd_47, efgh_47);
593 __m256i ijklmnop_67 = _mm256_unpackhi_epi64(ijkl_47, mnop_47);
595 __m256i abcdefgh_ab = _mm256_unpackhi_epi64(abcd_8b, efgh_8b);
597 __m256i ijklmnop_ab = _mm256_unpackhi_epi64(ijkl_8b, mnop_8b);
599 __m256i abcdefgh_ef = _mm256_unpackhi_epi64(abcd_cf, efgh_cf);
601 __m256i ijklmnop_ef = _mm256_unpackhi_epi64(ijkl_cf, mnop_cf);
/external/libaom/libaom/aom_dsp/simd/
Dv256_intrinsics_x86.h184 return _mm256_unpackhi_epi64( in v256_ziphi_64()
457 v256 t = _mm256_add_epi32(s, _mm256_unpackhi_epi64(s, s)); in v256_sad_u8_sum()
487 v256 t = _mm256_add_epi32(s, _mm256_unpackhi_epi64(s, s)); in v256_ssd_u8_sum()
/external/libaom/libaom/av1/common/x86/
Dhighbd_inv_txfm_avx2.c140 x0 = _mm256_unpackhi_epi64(u0, u2); in transpose_8x8_avx2()
141 x1 = _mm256_unpackhi_epi64(u4, u6); in transpose_8x8_avx2()
150 x0 = _mm256_unpackhi_epi64(u1, u3); in transpose_8x8_avx2()
151 x1 = _mm256_unpackhi_epi64(u5, u7); in transpose_8x8_avx2()
177 x0 = _mm256_unpackhi_epi64(u0, u2); in transpose_8x8_flip_avx2()
178 x1 = _mm256_unpackhi_epi64(u4, u6); in transpose_8x8_flip_avx2()
187 x0 = _mm256_unpackhi_epi64(u1, u3); in transpose_8x8_flip_avx2()
188 x1 = _mm256_unpackhi_epi64(u5, u7); in transpose_8x8_flip_avx2()
/external/clang/test/CodeGen/
Davx2-builtins.c1195 return _mm256_unpackhi_epi64(a, b); in test_mm256_unpackhi_epi64()
/external/clang/lib/Headers/
Davx2intrin.h797 _mm256_unpackhi_epi64(__m256i __a, __m256i __b) in _mm256_unpackhi_epi64() function
Davx512vlintrin.h6911 (__v4di)_mm256_unpackhi_epi64(__A, __B), in _mm256_mask_unpackhi_epi64()
6919 (__v4di)_mm256_unpackhi_epi64(__A, __B), in _mm256_maskz_unpackhi_epi64()