/external/libvpx/libvpx/vpx_dsp/x86/ |
D | quantize_avx.c | 37 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_avx() local 57 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in vpx_quantize_b_avx() 61 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in vpx_quantize_b_avx() 88 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in vpx_quantize_b_avx() 99 scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, 0, zero); in vpx_quantize_b_avx() 110 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in vpx_quantize_b_avx() 113 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in vpx_quantize_b_avx() 130 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in vpx_quantize_b_avx() 139 eob0 = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, index, in vpx_quantize_b_avx() 163 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_32x32_avx() local [all …]
|
D | quantize_ssse3.c | 33 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_ssse3() local 50 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in vpx_quantize_b_ssse3() 65 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in vpx_quantize_b_ssse3() 75 eob = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, 0, zero); in vpx_quantize_b_ssse3() 85 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in vpx_quantize_b_ssse3() 94 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in vpx_quantize_b_ssse3() 103 eob0 = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, index, in vpx_quantize_b_ssse3() 128 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_32x32_ssse3() local 166 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in vpx_quantize_b_32x32_ssse3() 170 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in vpx_quantize_b_32x32_ssse3() [all …]
|
D | quantize_sse2.c | 33 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_sse2() local 54 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in vpx_quantize_b_sse2() 71 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in vpx_quantize_b_sse2() 81 eob = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, 0, zero); in vpx_quantize_b_sse2() 93 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in vpx_quantize_b_sse2() 102 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in vpx_quantize_b_sse2() 111 eob0 = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, index, in vpx_quantize_b_sse2()
|
/external/libaom/libaom/aom_dsp/x86/ |
D | highbd_adaptive_quantize_sse2.c | 57 static INLINE void highbd_update_mask1(__m128i *cmp_mask0, in highbd_update_mask1() argument 61 if (_mm_movemask_epi8(*cmp_mask0)) { in highbd_update_mask1() 63 __m128i mask0 = _mm_and_si128(*cmp_mask0, iscan0); in highbd_update_mask1() 74 __m128i coeff[2], cmp_mask0, cmp_mask1; in highbd_update_mask0() local 77 cmp_mask0 = _mm_cmpgt_epi32(coeff[0], threshold[0]); in highbd_update_mask0() 81 cmp_mask0 = _mm_packs_epi32(cmp_mask0, cmp_mask1); in highbd_update_mask0() 83 highbd_update_mask1(&cmp_mask0, iscan_ptr, is_found, mask); in highbd_update_mask0() 111 __m128i cmp_mask0, cmp_mask1, cmp_mask; in aom_highbd_quantize_b_adaptive_sse2() local 161 cmp_mask0 = _mm_cmpgt_epi32(qcoeff0, zbin); in aom_highbd_quantize_b_adaptive_sse2() 164 cmp_mask = _mm_packs_epi32(cmp_mask0, cmp_mask1); in aom_highbd_quantize_b_adaptive_sse2() [all …]
|
D | adaptive_quantize_sse2.c | 34 __m128i cmp_mask0, cmp_mask1; in aom_quantize_b_adaptive_sse2() local 69 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in aom_quantize_b_adaptive_sse2() 73 update_mask1(&cmp_mask0, &cmp_mask1, iscan, &is_found1, &mask1); in aom_quantize_b_adaptive_sse2() 76 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in aom_quantize_b_adaptive_sse2() 104 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in aom_quantize_b_adaptive_sse2() 131 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in aom_quantize_b_adaptive_sse2() 134 update_mask1(&cmp_mask0, &cmp_mask1, iscan + index, &is_found1, &mask1); in aom_quantize_b_adaptive_sse2() 136 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in aom_quantize_b_adaptive_sse2() 155 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in aom_quantize_b_adaptive_sse2() 237 __m128i cmp_mask0, cmp_mask1; in aom_quantize_b_32x32_adaptive_sse2() local [all …]
|
D | quantize_sse2.c | 34 __m128i cmp_mask0, cmp_mask1; in aom_quantize_b_sse2() local 53 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in aom_quantize_b_sse2() 70 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in aom_quantize_b_sse2() 84 scan_for_eob(&coeff0, &coeff1, cmp_mask0, cmp_mask1, iscan_ptr, 0, zero); in aom_quantize_b_sse2() 96 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in aom_quantize_b_sse2() 105 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in aom_quantize_b_sse2() 117 eob0 = scan_for_eob(&coeff0, &coeff1, cmp_mask0, cmp_mask1, iscan_ptr, in aom_quantize_b_sse2()
|
D | quantize_ssse3.c | 77 __m128i cmp_mask0, cmp_mask1, all_zero; in aom_quantize_b_64x64_ssse3() local 103 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in aom_quantize_b_64x64_ssse3() 106 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in aom_quantize_b_64x64_ssse3() 132 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in aom_quantize_b_64x64_ssse3() 143 scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, 0, zero); in aom_quantize_b_64x64_ssse3() 154 cmp_mask0 = _mm_cmpgt_epi16(qcoeff0, zbin); in aom_quantize_b_64x64_ssse3() 157 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in aom_quantize_b_64x64_ssse3() 175 qcoeff0 = _mm_and_si128(qcoeff0, cmp_mask0); in aom_quantize_b_64x64_ssse3() 186 eob0 = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, index, in aom_quantize_b_64x64_ssse3()
|
D | quantize_x86.h | 147 static INLINE void update_mask1(__m128i *cmp_mask0, __m128i *cmp_mask1, in update_mask1() argument 152 all_zero = _mm_or_si128(*cmp_mask0, *cmp_mask1); in update_mask1() 155 __m128i mask0 = _mm_and_si128(*cmp_mask0, iscan0); in update_mask1() 168 __m128i coeff[4], cmp_mask0, cmp_mask1, cmp_mask2, cmp_mask3; in update_mask0() local 176 cmp_mask0 = _mm_cmpgt_epi32(coeff[0], threshold[0]); in update_mask0() 184 cmp_mask0 = _mm_packs_epi32(cmp_mask0, cmp_mask1); in update_mask0() 187 update_mask1(&cmp_mask0, &cmp_mask1, iscan_ptr, is_found, mask); in update_mask0()
|
D | adaptive_quantize_avx2.c | 61 __m256i coeff[2], cmp_mask0, cmp_mask1; in update_mask0_avx2() local 65 cmp_mask0 = _mm256_cmpgt_epi32(coeff[0], threshold[0]); in update_mask0_avx2() 68 cmp_mask0 = in update_mask0_avx2() 69 _mm256_permute4x64_epi64(_mm256_packs_epi32(cmp_mask0, cmp_mask1), 0xd8); in update_mask0_avx2() 70 update_mask1_avx2(&cmp_mask0, iscan_ptr, is_found, mask); in update_mask0_avx2()
|
D | highbd_adaptive_quantize_avx2.c | 51 __m256i coeff[2], cmp_mask0, cmp_mask1; in highbd_update_mask0_avx2() local 53 cmp_mask0 = _mm256_cmpgt_epi32(coeff[0], threshold[0]); in highbd_update_mask0_avx2() 56 cmp_mask0 = in highbd_update_mask0_avx2() 57 _mm256_permute4x64_epi64(_mm256_packs_epi32(cmp_mask0, cmp_mask1), 0xd8); in highbd_update_mask0_avx2() 58 highbd_update_mask1_avx2(&cmp_mask0, iscan_ptr, is_found, mask); in highbd_update_mask0_avx2()
|