Home
last modified time | relevance | path

Searched refs:AOM_BLEND_A64_ROUND_BITS (Results 1 – 15 of 15) sorted by relevance

/external/libaom/libaom/aom_dsp/x86/
Dmasked_sad_intrin_ssse3.c102 const __m128i mask_max = _mm_set1_epi8((1 << AOM_BLEND_A64_ROUND_BITS)); in masked_sad_ssse3()
118 pred_l = xx_roundn_epu16(pred_l, AOM_BLEND_A64_ROUND_BITS); in masked_sad_ssse3()
123 pred_r = xx_roundn_epu16(pred_r, AOM_BLEND_A64_ROUND_BITS); in masked_sad_ssse3()
147 const __m128i mask_max = _mm_set1_epi8((1 << AOM_BLEND_A64_ROUND_BITS)); in aom_masked_sad8xh_ssse3()
165 pred_l = xx_roundn_epu16(pred_l, AOM_BLEND_A64_ROUND_BITS); in aom_masked_sad8xh_ssse3()
170 pred_r = xx_roundn_epu16(pred_r, AOM_BLEND_A64_ROUND_BITS); in aom_masked_sad8xh_ssse3()
192 const __m128i mask_max = _mm_set1_epi8((1 << AOM_BLEND_A64_ROUND_BITS)); in aom_masked_sad4xh_ssse3()
214 pred_16bit = xx_roundn_epu16(pred_16bit, AOM_BLEND_A64_ROUND_BITS); in aom_masked_sad4xh_ssse3()
295 const __m128i mask_max = _mm_set1_epi16((1 << AOM_BLEND_A64_ROUND_BITS)); in highbd_masked_sad_ssse3()
297 _mm_set1_epi32((1 << AOM_BLEND_A64_ROUND_BITS) >> 1); in highbd_masked_sad_ssse3()
[all …]
Dmasked_sad_intrin_avx2.c28 const __m256i mask_max = _mm256_set1_epi8((1 << AOM_BLEND_A64_ROUND_BITS)); in masked_sad32xh_avx2()
30 _mm256_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in masked_sad32xh_avx2()
83 const __m256i mask_max = _mm256_set1_epi8((1 << AOM_BLEND_A64_ROUND_BITS)); in masked_sad16xh_avx2()
85 _mm256_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in masked_sad16xh_avx2()
211 const __m256i mask_max = _mm256_set1_epi16((1 << AOM_BLEND_A64_ROUND_BITS)); in highbd_masked_sad8xh_avx2()
213 _mm256_set1_epi32((1 << AOM_BLEND_A64_ROUND_BITS) >> 1); in highbd_masked_sad8xh_avx2()
230 AOM_BLEND_A64_ROUND_BITS); in highbd_masked_sad8xh_avx2()
236 AOM_BLEND_A64_ROUND_BITS); in highbd_masked_sad8xh_avx2()
268 const __m256i mask_max = _mm256_set1_epi16((1 << AOM_BLEND_A64_ROUND_BITS)); in highbd_masked_sad16xh_avx2()
270 _mm256_set1_epi32((1 << AOM_BLEND_A64_ROUND_BITS) >> 1); in highbd_masked_sad16xh_avx2()
[all …]
Dblend_sse4.h36 const __m128i v_res_w = xx_roundn_epu16(v_sum_w, AOM_BLEND_A64_ROUND_BITS); in blend_4()
53 const __m128i v_res_w = xx_roundn_epu16(v_sum_w, AOM_BLEND_A64_ROUND_BITS); in blend_8()
116 const __m128i v_res_w = xx_roundn_epu16(v_sum_w, AOM_BLEND_A64_ROUND_BITS); in blend_4_b10()
131 const __m128i v_res_w = xx_roundn_epu16(v_sum_w, AOM_BLEND_A64_ROUND_BITS); in blend_8_b10()
150 _mm_srli_epi32(v_sum_d, AOM_BLEND_A64_ROUND_BITS - 1); in blend_4_b12()
178 _mm_srli_epi32(v_suml_d, AOM_BLEND_A64_ROUND_BITS - 1); in blend_8_b12()
180 _mm_srli_epi32(v_sumh_d, AOM_BLEND_A64_ROUND_BITS - 1); in blend_8_b12()
Dmasked_variance_intrin_ssse3.h29 _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in comp_mask_pred_16_ssse3()
58 _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in comp_mask_pred_8_ssse3()
Dblend_a64_mask_sse4.c38 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_w4_sse4_1()
59 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_w8_sse4_1()
78 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_w16n_sse4_1()
110 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sx_w4_sse4_1()
137 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sx_w8_sse4_1()
163 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sx_w16n_sse4_1()
200 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sy_w4_sse4_1()
226 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sy_w8_sse4_1()
248 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sy_w16n_sse4_1()
279 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sx_sy_w4_sse4_1()
[all …]
Dblend_a64_mask_avx2.c296 << AOM_BLEND_A64_ROUND_BITS; in aom_lowbd_blend_a64_d16_mask_avx2()
298 const int shift = round_bits + AOM_BLEND_A64_ROUND_BITS; in aom_lowbd_blend_a64_d16_mask_avx2()
463 AOM_BLEND_A64_ROUND_BITS); in blend_a64_mask_sx_sy_w16_avx2()
504 src0 + c, src1 + c, &v_m0_b, &v_m1_b, AOM_BLEND_A64_ROUND_BITS); in blend_a64_mask_sx_sy_w32n_avx2()
521 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sx_sy_avx2()
596 AOM_BLEND_A64_ROUND_BITS); in blend_a64_mask_sx_w16_avx2()
629 src0 + c, src1 + c, &v_m0_b, &v_m1_b, AOM_BLEND_A64_ROUND_BITS); in blend_a64_mask_sx_w32n_avx2()
646 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sx_avx2()
701 const __m128i _r = _mm_set1_epi16(1 << (15 - AOM_BLEND_A64_ROUND_BITS)); in blend_a64_mask_sy_w16_avx2()
732 src0 + c, src1 + c, &v_m0_b, &v_m1_b, AOM_BLEND_A64_ROUND_BITS); in blend_a64_mask_sy_w32n_avx2()
[all …]
Dmasked_variance_intrin_ssse3.c400 const __m128i mask_max = _mm_set1_epi8((1 << AOM_BLEND_A64_ROUND_BITS)); in accumulate_block()
409 pred_l = xx_roundn_epu16(pred_l, AOM_BLEND_A64_ROUND_BITS); in accumulate_block()
414 pred_r = xx_roundn_epu16(pred_r, AOM_BLEND_A64_ROUND_BITS); in accumulate_block()
904 const __m128i mask_max = _mm_set1_epi16((1 << AOM_BLEND_A64_ROUND_BITS)); in highbd_masked_variance()
906 _mm_set1_epi32((1 << AOM_BLEND_A64_ROUND_BITS) >> 1); in highbd_masked_variance()
923 AOM_BLEND_A64_ROUND_BITS); in highbd_masked_variance()
929 AOM_BLEND_A64_ROUND_BITS); in highbd_masked_variance()
976 const __m128i mask_max = _mm_set1_epi16((1 << AOM_BLEND_A64_ROUND_BITS)); in highbd_masked_variance4xh()
978 _mm_set1_epi32((1 << AOM_BLEND_A64_ROUND_BITS) >> 1); in highbd_masked_variance4xh()
998 AOM_BLEND_A64_ROUND_BITS); in highbd_masked_variance4xh()
[all …]
Dvariance_avx2.c337 const int16_t round_bits = 15 - AOM_BLEND_A64_ROUND_BITS; in comp_mask_pred_line_avx2()
414 const __m256i alpha_max = _mm256_set1_epi16((1 << AOM_BLEND_A64_ROUND_BITS)); in highbd_comp_mask_pred_line_avx2()
416 _mm256_set1_epi32((1 << AOM_BLEND_A64_ROUND_BITS) >> 1); in highbd_comp_mask_pred_line_avx2()
423 _mm256_add_epi32(pred_lo, round_const), AOM_BLEND_A64_ROUND_BITS); in highbd_comp_mask_pred_line_avx2()
429 _mm256_add_epi32(pred_hi, round_const), AOM_BLEND_A64_ROUND_BITS); in highbd_comp_mask_pred_line_avx2()
Dvariance_sse2.c695 const __m128i alpha_max = _mm_set1_epi16((1 << AOM_BLEND_A64_ROUND_BITS)); in highbd_comp_mask_pred_line_sse2()
697 _mm_set1_epi32((1 << AOM_BLEND_A64_ROUND_BITS) >> 1); in highbd_comp_mask_pred_line_sse2()
704 AOM_BLEND_A64_ROUND_BITS); in highbd_comp_mask_pred_line_sse2()
710 AOM_BLEND_A64_ROUND_BITS); in highbd_comp_mask_pred_line_sse2()
/external/libaom/libaom/av1/common/arm/
Dblend_a64_hmask_neon.c57 res_q = vcombine_u8(vrshrn_n_u16(res_low, AOM_BLEND_A64_ROUND_BITS), in aom_blend_a64_hmask_neon()
58 vrshrn_n_u16(res_high, AOM_BLEND_A64_ROUND_BITS)); in aom_blend_a64_hmask_neon()
80 vst1_u8(dst, vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)); in aom_blend_a64_hmask_neon()
101 vreinterpret_u32_u8(vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)), 0); in aom_blend_a64_hmask_neon()
104 vreinterpret_u32_u8(vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)), 1); in aom_blend_a64_hmask_neon()
125 vreinterpret_u16_u8(vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)), 0); in aom_blend_a64_hmask_neon()
128 vreinterpret_u16_u8(vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)), 1); in aom_blend_a64_hmask_neon()
Dblend_a64_vmask_neon.c53 res_q = vcombine_u8(vrshrn_n_u16(res_low, AOM_BLEND_A64_ROUND_BITS), in aom_blend_a64_vmask_neon()
54 vrshrn_n_u16(res_high, AOM_BLEND_A64_ROUND_BITS)); in aom_blend_a64_vmask_neon()
74 vst1_u8(dst, vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)); in aom_blend_a64_vmask_neon()
100 vreinterpret_u32_u8(vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)), 0); in aom_blend_a64_vmask_neon()
103 vreinterpret_u32_u8(vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)), 1); in aom_blend_a64_vmask_neon()
132 vreinterpret_u16_u8(vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)), 0); in aom_blend_a64_vmask_neon()
135 vreinterpret_u16_u8(vrshrn_n_u16(res, AOM_BLEND_A64_ROUND_BITS)), 1); in aom_blend_a64_vmask_neon()
/external/libaom/libaom/aom_dsp/
Dblend.h23 #define AOM_BLEND_A64_ROUND_BITS 6 macro
24 #define AOM_BLEND_A64_MAX_ALPHA (1 << AOM_BLEND_A64_ROUND_BITS) // 64
28 AOM_BLEND_A64_ROUND_BITS)
Dblend_a64_mask.c65 AOM_BLEND_A64_ROUND_BITS); in aom_lowbd_blend_a64_d16_mask_c()
84 AOM_BLEND_A64_ROUND_BITS); in aom_lowbd_blend_a64_d16_mask_c()
99 AOM_BLEND_A64_ROUND_BITS); in aom_lowbd_blend_a64_d16_mask_c()
114 AOM_BLEND_A64_ROUND_BITS); in aom_lowbd_blend_a64_d16_mask_c()
159 AOM_BLEND_A64_ROUND_BITS); in aom_highbd_blend_a64_d16_mask_c()
178 AOM_BLEND_A64_ROUND_BITS; in aom_highbd_blend_a64_d16_mask_c()
194 AOM_BLEND_A64_ROUND_BITS; in aom_highbd_blend_a64_d16_mask_c()
210 AOM_BLEND_A64_ROUND_BITS; in aom_highbd_blend_a64_d16_mask_c()
/external/libaom/libaom/aom_dsp/arm/
Dblend_a64_mask_neon.c35 *res = vcombine_s16(vshrn_n_s32(im_res_low, AOM_BLEND_A64_ROUND_BITS), in blend8x1()
36 vshrn_n_s32(im_res_high, AOM_BLEND_A64_ROUND_BITS)); in blend8x1()
/external/libaom/libaom/av1/encoder/
Drdopt.c13896 wsrc[col] = (wsrc[col] >> AOM_BLEND_A64_ROUND_BITS) * m0 + in calc_target_weighted_pred_left()
13897 (tmp[col] << AOM_BLEND_A64_ROUND_BITS) * m1; in calc_target_weighted_pred_left()
13898 mask[col] = (mask[col] >> AOM_BLEND_A64_ROUND_BITS) * m0; in calc_target_weighted_pred_left()
13911 wsrc[col] = (wsrc[col] >> AOM_BLEND_A64_ROUND_BITS) * m0 + in calc_target_weighted_pred_left()
13912 (tmp16[col] << AOM_BLEND_A64_ROUND_BITS) * m1; in calc_target_weighted_pred_left()
13913 mask[col] = (mask[col] >> AOM_BLEND_A64_ROUND_BITS) * m0; in calc_target_weighted_pred_left()