Home
last modified time | relevance | path

Searched refs:_mm_adds_epu16 (Results 1 – 23 of 23) sorted by relevance

/external/webrtc/modules/desktop_capture/
Ddiffer_vector_sse2.cc33 acc = _mm_adds_epu16(acc, sad); in VectorDifference_SSE2_W16()
37 acc = _mm_adds_epu16(acc, sad); in VectorDifference_SSE2_W16()
41 acc = _mm_adds_epu16(acc, sad); in VectorDifference_SSE2_W16()
45 acc = _mm_adds_epu16(acc, sad); in VectorDifference_SSE2_W16()
50 sad = _mm_adds_epu16(sad, acc); in VectorDifference_SSE2_W16()
65 acc = _mm_adds_epu16(acc, sad); in VectorDifference_SSE2_W32()
69 acc = _mm_adds_epu16(acc, sad); in VectorDifference_SSE2_W32()
73 acc = _mm_adds_epu16(acc, sad); in VectorDifference_SSE2_W32()
77 acc = _mm_adds_epu16(acc, sad); in VectorDifference_SSE2_W32()
81 acc = _mm_adds_epu16(acc, sad); in VectorDifference_SSE2_W32()
[all …]
/external/libvpx/libvpx/vpx_dsp/x86/
Davg_intrin_sse2.c102 s0 = _mm_adds_epu16(s0, s1); in vpx_avg_8x8_sse2()
104 s0 = _mm_adds_epu16(s0, s1); in vpx_avg_8x8_sse2()
106 s0 = _mm_adds_epu16(s0, s1); in vpx_avg_8x8_sse2()
108 s0 = _mm_adds_epu16(s0, s1); in vpx_avg_8x8_sse2()
110 s0 = _mm_adds_epu16(s0, s1); in vpx_avg_8x8_sse2()
112 s0 = _mm_adds_epu16(s0, s1); in vpx_avg_8x8_sse2()
114 s0 = _mm_adds_epu16(s0, s1); in vpx_avg_8x8_sse2()
116 s0 = _mm_adds_epu16(s0, _mm_srli_si128(s0, 8)); in vpx_avg_8x8_sse2()
117 s0 = _mm_adds_epu16(s0, _mm_srli_epi64(s0, 32)); in vpx_avg_8x8_sse2()
118 s0 = _mm_adds_epu16(s0, _mm_srli_epi64(s0, 16)); in vpx_avg_8x8_sse2()
[all …]
Dhighbd_loopfilter_sse2.c117 abs_p0q0 = _mm_adds_epu16(abs_p0q0, abs_p0q0); // abs(p0 - q0) * 2 in vpx_highbd_lpf_horizontal_16_sse2()
119 mask = _mm_subs_epu16(_mm_adds_epu16(abs_p0q0, abs_p1q1), blimit_v); in vpx_highbd_lpf_horizontal_16_sse2()
121 mask = _mm_and_si128(mask, _mm_adds_epu16(limit_v, one)); in vpx_highbd_lpf_horizontal_16_sse2()
559 abs_p0q0 = _mm_adds_epu16(abs_p0q0, abs_p0q0); in vpx_highbd_lpf_horizontal_8_sse2()
561 mask = _mm_subs_epu16(_mm_adds_epu16(abs_p0q0, abs_p1q1), blimit_v); in vpx_highbd_lpf_horizontal_8_sse2()
565 mask = _mm_and_si128(mask, _mm_adds_epu16(limit_v, one)); in vpx_highbd_lpf_horizontal_8_sse2()
807 abs_p0q0 = _mm_adds_epu16(abs_p0q0, abs_p0q0); in vpx_highbd_lpf_horizontal_4_sse2()
809 mask = _mm_subs_epu16(_mm_adds_epu16(abs_p0q0, abs_p1q1), blimit_v); in vpx_highbd_lpf_horizontal_4_sse2()
813 mask = _mm_and_si128(mask, _mm_adds_epu16(limit_v, one)); in vpx_highbd_lpf_horizontal_4_sse2()
/external/libvpx/libvpx/vp9/encoder/x86/
Dtemporal_filter_sse4.c88 sum = _mm_adds_epu16(sum, rounding_u16); in average_8()
111 count_u16 = _mm_adds_epu16(count_u16, sum_u16); in accumulate_and_store_8()
142 count_0_u16 = _mm_adds_epu16(count_0_u16, sum_0_u16); in accumulate_and_store_16()
145 count_1_u16 = _mm_adds_epu16(count_1_u16, sum_1_u16); in accumulate_and_store_16()
181 *sum = _mm_adds_epu16(dist_reg, dist_left); in get_sum_8()
182 *sum = _mm_adds_epu16(*sum, dist_right); in get_sum_8()
245 y_reg = _mm_adds_epu16(y_reg, y_tmp); in add_luma_dist_to_8_chroma_mod()
254 y_first = _mm_adds_epu16(y_first, y_tmp_0); in add_luma_dist_to_8_chroma_mod()
255 y_second = _mm_adds_epu16(y_second, y_tmp_1); in add_luma_dist_to_8_chroma_mod()
264 *u_mod = _mm_adds_epu16(*u_mod, y_reg); in add_luma_dist_to_8_chroma_mod()
[all …]
Dhighbd_temporal_filter_sse4.c141 count_u16 = _mm_adds_epu16(count_u16, sum_u16); in highbd_accumulate_and_store_8()
/external/libaom/libaom/aom_dsp/x86/
Davg_intrin_sse2.c103 s0 = _mm_adds_epu16(s0, s1); in aom_avg_8x8_sse2()
105 s0 = _mm_adds_epu16(s0, s1); in aom_avg_8x8_sse2()
107 s0 = _mm_adds_epu16(s0, s1); in aom_avg_8x8_sse2()
109 s0 = _mm_adds_epu16(s0, s1); in aom_avg_8x8_sse2()
111 s0 = _mm_adds_epu16(s0, s1); in aom_avg_8x8_sse2()
113 s0 = _mm_adds_epu16(s0, s1); in aom_avg_8x8_sse2()
115 s0 = _mm_adds_epu16(s0, s1); in aom_avg_8x8_sse2()
117 s0 = _mm_adds_epu16(s0, _mm_srli_si128(s0, 8)); in aom_avg_8x8_sse2()
118 s0 = _mm_adds_epu16(s0, _mm_srli_epi64(s0, 32)); in aom_avg_8x8_sse2()
119 s0 = _mm_adds_epu16(s0, _mm_srli_epi64(s0, 16)); in aom_avg_8x8_sse2()
[all …]
Dhighbd_loopfilter_sse2.c88 abs_p0q0 = _mm_adds_epu16(abs_p0q0, abs_p0q0); in highbd_filter_mask_dual()
95 __m128i max = _mm_subs_epu16(_mm_adds_epu16(abs_p0q0, abs_p1q1), *bl); in highbd_filter_mask_dual()
97 max = _mm_and_si128(max, _mm_adds_epu16(*l, one)); in highbd_filter_mask_dual()
123 abs_p0q0 = _mm_adds_epu16(abs_p0q0_p1q1, abs_p0q0_p1q1); in highbd_hev_filter_mask_x_sse2()
129 max = _mm_subs_epu16(_mm_adds_epu16(abs_p0q0, abs_p1q1), *bl); in highbd_hev_filter_mask_x_sse2()
133 max = _mm_and_si128(max, _mm_adds_epu16(*l, one)); in highbd_hev_filter_mask_x_sse2()
852 abs_p0q0 = _mm_adds_epu16(abs_p0q0, abs_p0q0); in highbd_lpf_internal_6_dual_sse2()
854 mask = _mm_subs_epu16(_mm_adds_epu16(abs_p0q0, abs_p1q1), blimit0); in highbd_lpf_internal_6_dual_sse2()
858 mask = _mm_and_si128(mask, _mm_adds_epu16(limit0, one)); in highbd_lpf_internal_6_dual_sse2()
1114 abs_p0q0 = _mm_adds_epu16(abs_p0q0, abs_p0q0); in highbd_lpf_internal_8_dual_sse2()
[all …]
Dhighbd_variance_sse2.c751 __m128i sum = _mm_adds_epu16(mult0, mult1); in highbd_compute_dist_wtd_comp_avg()
752 __m128i round = _mm_adds_epu16(sum, *r); in highbd_compute_dist_wtd_comp_avg()
/external/libaom/libaom/av1/encoder/x86/
Dtemporal_filter_sse4.c92 sum = _mm_adds_epu16(sum, rounding_u16); in average_8()
119 sum = _mm_adds_epu16(sum, rounding_u16); in average_4_4()
144 input_0 = _mm_adds_epu16(input_0, rounding_u16); in average_16()
147 input_1 = _mm_adds_epu16(input_1, rounding_u16); in average_16()
171 count_u16 = _mm_adds_epu16(count_u16, sum_u16); in accumulate_and_store_8()
202 count_0_u16 = _mm_adds_epu16(count_0_u16, sum_0_u16); in accumulate_and_store_16()
205 count_1_u16 = _mm_adds_epu16(count_1_u16, sum_1_u16); in accumulate_and_store_16()
241 *sum = _mm_adds_epu16(dist_reg, dist_left); in get_sum_8()
242 *sum = _mm_adds_epu16(*sum, dist_right); in get_sum_8()
305 y_reg = _mm_adds_epu16(y_reg, y_tmp); in add_luma_dist_to_8_chroma_mod()
[all …]
/external/libaom/libaom/av1/common/x86/
Dreconinter_sse4.c130 _mm_srli_epi16(_mm_adds_epu16(diff, round_const), round); in av1_build_compound_diffwtd_mask_d16_sse4_1()
Dhighbd_convolve_2d_sse4.c102 _mm_adds_epu16(res, offset_const_16b); in av1_highbd_dist_wtd_convolve_2d_copy_sse4_1()
157 _mm_adds_epu16(res, offset_const_16b); in av1_highbd_dist_wtd_convolve_2d_copy_sse4_1()
/external/llvm-project/clang/test/CodeGen/
Dppc-emmintrin.c44 resi = _mm_adds_epu16(mi1, mi2); in test_add()
/external/webp/src/dsp/
Dyuv_sse41.c59 const __m128i B1 = _mm_adds_epu16(B0, Y1); in ConvertYUV444ToRGB_SSE41()
Dyuv_sse2.c59 const __m128i B1 = _mm_adds_epu16(B0, Y1); in ConvertYUV444ToRGB_SSE2()
/external/clang/test/CodeGen/
Dsse2-builtins.c71 return _mm_adds_epu16(A, B); in test_mm_adds_epu16()
/external/llvm-project/clang/test/CodeGen/X86/
Dsse2-builtins.c72 return _mm_adds_epu16(A, B); in test_mm_adds_epu16()
/external/llvm-project/clang/lib/Headers/
Demmintrin.h2286 _mm_adds_epu16(__m128i __a, __m128i __b) in _mm_adds_epu16() function
Davx512vlbwintrin.h766 (__v8hi)_mm_adds_epu16(__A, __B), in _mm_mask_adds_epu16()
774 (__v8hi)_mm_adds_epu16(__A, __B), in _mm_maskz_adds_epu16()
/external/clang/lib/Headers/
Demmintrin.h701 _mm_adds_epu16(__m128i __a, __m128i __b) in _mm_adds_epu16() function
/external/libgav1/libgav1/src/dsp/x86/
Dloop_filter_sse4.cc1202 const __m128i a = _mm_adds_epu16(abs_pmq, abs_pmq); in CheckOuterThreshF4()
1204 const __m128i c = _mm_adds_epu16(a, _mm_srli_si128(b, 8)); in CheckOuterThreshF4()
/external/llvm-project/clang/lib/Headers/ppc_wrappers/
Demmintrin.h1341 _mm_adds_epu16 (__m128i __A, __m128i __B) in _mm_adds_epu16() function
/external/pffft/
Dsse2neon.h2489 FORCE_INLINE __m128i _mm_adds_epu16(__m128i a, __m128i b) in _mm_adds_epu16() function
/external/neon_2_sse/
DNEON_2_SSE.h3260 return64(_mm_adds_epu16(_pM128i(a),_pM128i(b))); in vqadd_u16()
3335 #define vqaddq_u16 _mm_adds_epu16