Home
last modified time | relevance | path

Searched refs:_mm_adds_epi16 (Results 1 – 25 of 70) sorted by relevance

123

/external/libaom/libaom/av1/encoder/x86/
Dav1_fwd_txfm_sse2.c66 x1[0] = _mm_adds_epi16(input[0], input[3]); in fdct8x4_new_sse2()
68 x1[1] = _mm_adds_epi16(input[1], input[2]); in fdct8x4_new_sse2()
100 x1[0] = _mm_adds_epi16(input[0], input[7]); in fdct4x8_new_sse2()
102 x1[1] = _mm_adds_epi16(input[1], input[6]); in fdct4x8_new_sse2()
104 x1[2] = _mm_adds_epi16(input[2], input[5]); in fdct4x8_new_sse2()
106 x1[3] = _mm_adds_epi16(input[3], input[4]); in fdct4x8_new_sse2()
111 x2[0] = _mm_adds_epi16(x1[0], x1[3]); in fdct4x8_new_sse2()
113 x2[1] = _mm_adds_epi16(x1[1], x1[2]); in fdct4x8_new_sse2()
126 x3[4] = _mm_adds_epi16(x2[4], x2[5]); in fdct4x8_new_sse2()
129 x3[7] = _mm_adds_epi16(x2[7], x2[6]); in fdct4x8_new_sse2()
[all …]
Dav1_fwd_txfm_sse2.h64 output[0] = _mm_adds_epi16(input[0], input[0]); in fidentity8x8_new_sse2()
65 output[1] = _mm_adds_epi16(input[1], input[1]); in fidentity8x8_new_sse2()
66 output[2] = _mm_adds_epi16(input[2], input[2]); in fidentity8x8_new_sse2()
67 output[3] = _mm_adds_epi16(input[3], input[3]); in fidentity8x8_new_sse2()
68 output[4] = _mm_adds_epi16(input[4], input[4]); in fidentity8x8_new_sse2()
69 output[5] = _mm_adds_epi16(input[5], input[5]); in fidentity8x8_new_sse2()
70 output[6] = _mm_adds_epi16(input[6], input[6]); in fidentity8x8_new_sse2()
71 output[7] = _mm_adds_epi16(input[7], input[7]); in fidentity8x8_new_sse2()
/external/libavc/common/x86/
Dih264_weighted_pred_sse42.c422 y_0_8x16b = _mm_adds_epi16(round_8x16b, y_0_8x16b); in ih264_weighted_pred_luma_sse42()
423 y_2_8x16b = _mm_adds_epi16(round_8x16b, y_2_8x16b); in ih264_weighted_pred_luma_sse42()
428 y_0_8x16b = _mm_adds_epi16(ofst_8x16b, y_0_8x16b); in ih264_weighted_pred_luma_sse42()
429 y_2_8x16b = _mm_adds_epi16(ofst_8x16b, y_2_8x16b); in ih264_weighted_pred_luma_sse42()
468 y_0_8x16b = _mm_adds_epi16(round_8x16b, y_0_8x16b); in ih264_weighted_pred_luma_sse42()
469 y_1_8x16b = _mm_adds_epi16(round_8x16b, y_1_8x16b); in ih264_weighted_pred_luma_sse42()
470 y_2_8x16b = _mm_adds_epi16(round_8x16b, y_2_8x16b); in ih264_weighted_pred_luma_sse42()
471 y_3_8x16b = _mm_adds_epi16(round_8x16b, y_3_8x16b); in ih264_weighted_pred_luma_sse42()
478 y_0_8x16b = _mm_adds_epi16(ofst_8x16b, y_0_8x16b); in ih264_weighted_pred_luma_sse42()
479 y_1_8x16b = _mm_adds_epi16(ofst_8x16b, y_1_8x16b); in ih264_weighted_pred_luma_sse42()
[all …]
/external/libvpx/libvpx/vp9/common/x86/
Dvp9_idct_intrin_sse2.c88 in[0] = _mm_adds_epi16(in[0], final_rounding); in vp9_iht8x8_64_add_sse2()
89 in[1] = _mm_adds_epi16(in[1], final_rounding); in vp9_iht8x8_64_add_sse2()
90 in[2] = _mm_adds_epi16(in[2], final_rounding); in vp9_iht8x8_64_add_sse2()
91 in[3] = _mm_adds_epi16(in[3], final_rounding); in vp9_iht8x8_64_add_sse2()
92 in[4] = _mm_adds_epi16(in[4], final_rounding); in vp9_iht8x8_64_add_sse2()
93 in[5] = _mm_adds_epi16(in[5], final_rounding); in vp9_iht8x8_64_add_sse2()
94 in[6] = _mm_adds_epi16(in[6], final_rounding); in vp9_iht8x8_64_add_sse2()
95 in[7] = _mm_adds_epi16(in[7], final_rounding); in vp9_iht8x8_64_add_sse2()
141 in[0] = _mm_adds_epi16(in[0], final_rounding); in write_buffer_8x16()
142 in[1] = _mm_adds_epi16(in[1], final_rounding); in write_buffer_8x16()
[all …]
/external/libvpx/libvpx/vpx_dsp/x86/
Dconvolve_ssse3.h60 sum1 = _mm_adds_epi16(sum1, sum2); in convolve8_8_ssse3()
77 __m128i temp = _mm_adds_epi16(x0, x3); in convolve8_8_even_offset_ssse3()
78 temp = _mm_adds_epi16(temp, x1); in convolve8_8_even_offset_ssse3()
79 temp = _mm_adds_epi16(temp, x2); in convolve8_8_even_offset_ssse3()
80 temp = _mm_adds_epi16(temp, x4); in convolve8_8_even_offset_ssse3()
82 temp = _mm_adds_epi16(temp, k_64); in convolve8_8_even_offset_ssse3()
101 temp = _mm_adds_epi16(x0, x1); in convolve8_8_odd_offset_ssse3()
102 temp = _mm_adds_epi16(temp, x2); in convolve8_8_odd_offset_ssse3()
103 temp = _mm_adds_epi16(temp, x3); in convolve8_8_odd_offset_ssse3()
104 temp = _mm_adds_epi16(temp, x4); in convolve8_8_odd_offset_ssse3()
[all …]
Dhighbd_loopfilter_sse2.c159 filt = _mm_adds_epi16(filt, work_a); in vpx_highbd_lpf_horizontal_16_sse2()
160 filt = _mm_adds_epi16(filt, work_a); in vpx_highbd_lpf_horizontal_16_sse2()
161 filt = signed_char_clamp_bd_sse2(_mm_adds_epi16(filt, work_a), bd); in vpx_highbd_lpf_horizontal_16_sse2()
163 filter1 = signed_char_clamp_bd_sse2(_mm_adds_epi16(filt, t4), bd); in vpx_highbd_lpf_horizontal_16_sse2()
164 filter2 = signed_char_clamp_bd_sse2(_mm_adds_epi16(filt, t3), bd); in vpx_highbd_lpf_horizontal_16_sse2()
170 qs0 = _mm_adds_epi16( in vpx_highbd_lpf_horizontal_16_sse2()
172 ps0 = _mm_adds_epi16( in vpx_highbd_lpf_horizontal_16_sse2()
173 signed_char_clamp_bd_sse2(_mm_adds_epi16(ps0, filter2), bd), t80); in vpx_highbd_lpf_horizontal_16_sse2()
174 filt = _mm_adds_epi16(filter1, t1); in vpx_highbd_lpf_horizontal_16_sse2()
177 qs1 = _mm_adds_epi16(signed_char_clamp_bd_sse2(_mm_subs_epi16(qs1, filt), bd), in vpx_highbd_lpf_horizontal_16_sse2()
[all …]
Dvpx_subpixel_8t_intrin_ssse3.c100 srcRegFilt1 = _mm_adds_epi16(srcRegFilt1, srcRegFilt2); in vpx_filter_block1d4_h8_intrin_ssse3()
251 dst_first = _mm_adds_epi16(tmp_0, tmp_1); in vpx_filter_block1d16_h4_ssse3()
262 dst_second = _mm_adds_epi16(tmp_0, tmp_1); in vpx_filter_block1d16_h4_ssse3()
350 res_reg_m1012_lo = _mm_adds_epi16(res_reg_m10_lo, res_reg_12_lo); in vpx_filter_block1d16_v4_ssse3()
351 res_reg_0123_lo = _mm_adds_epi16(res_reg_01_lo, res_reg_23_lo); in vpx_filter_block1d16_v4_ssse3()
361 res_reg_m1012_hi = _mm_adds_epi16(res_reg_m10_hi, res_reg_12_hi); in vpx_filter_block1d16_v4_ssse3()
362 res_reg_0123_hi = _mm_adds_epi16(res_reg_01_hi, res_reg_23_hi); in vpx_filter_block1d16_v4_ssse3()
436 dst_first = _mm_adds_epi16(tmp_0, tmp_1); in vpx_filter_block1d8_h4_ssse3()
515 res_reg_m1012 = _mm_adds_epi16(res_reg_m10, res_reg_12); in vpx_filter_block1d8_v4_ssse3()
516 res_reg_0123 = _mm_adds_epi16(res_reg_01, res_reg_23); in vpx_filter_block1d8_v4_ssse3()
Dvpx_subpixel_4t_intrin_sse2.c202 res_reg_m1012_lo = _mm_adds_epi16(res_reg_m10_lo, res_reg_12_lo); in vpx_filter_block1d16_v4_sse2()
203 res_reg_0123_lo = _mm_adds_epi16(res_reg_01_lo, res_reg_23_lo); in vpx_filter_block1d16_v4_sse2()
224 res_reg_m1012_hi = _mm_adds_epi16(res_reg_m10_hi, res_reg_12_hi); in vpx_filter_block1d16_v4_sse2()
225 res_reg_0123_hi = _mm_adds_epi16(res_reg_01_hi, res_reg_23_hi); in vpx_filter_block1d16_v4_sse2()
402 res_reg_m1012_lo = _mm_adds_epi16(res_reg_m10_lo, res_reg_12_lo); in vpx_filter_block1d8_v4_sse2()
403 res_reg_0123_lo = _mm_adds_epi16(res_reg_01_lo, res_reg_23_lo); in vpx_filter_block1d8_v4_sse2()
580 res_reg_m1012_lo = _mm_adds_epi16(res_reg_m10_lo, res_reg_12_lo); in vpx_filter_block1d4_v4_sse2()
581 res_reg_0123_lo = _mm_adds_epi16(res_reg_01_lo, res_reg_23_lo); in vpx_filter_block1d4_v4_sse2()
/external/libaom/libaom/aom_dsp/x86/
Daom_subpixel_8t_intrin_ssse3.c98 srcRegFilt32b1_1 = _mm_adds_epi16(srcRegFilt32b1_1, addFilterReg32); in aom_filter_block1d4_h4_ssse3()
171 resReglo = _mm_adds_epi16(resReglo, addFilterReg32); in aom_filter_block1d4_v4_ssse3()
172 resReghi = _mm_adds_epi16(resReghi, addFilterReg32); in aom_filter_block1d4_v4_ssse3()
243 srcRegFilt1 = _mm_adds_epi16(srcRegFilt1, srcRegFilt4); in aom_filter_block1d4_h8_intrin_ssse3()
245 srcRegFilt1 = _mm_adds_epi16(srcRegFilt1, minReg); in aom_filter_block1d4_h8_intrin_ssse3()
246 srcRegFilt1 = _mm_adds_epi16(srcRegFilt1, srcRegFilt3); in aom_filter_block1d4_h8_intrin_ssse3()
247 srcRegFilt1 = _mm_adds_epi16(srcRegFilt1, addFilterReg64); in aom_filter_block1d4_h8_intrin_ssse3()
301 srcRegFilt32b1_1 = _mm_adds_epi16(srcRegFilt32b3, srcRegFilt32b2); in aom_filter_block1d8_h4_ssse3()
304 srcRegFilt32b1_1 = _mm_adds_epi16(srcRegFilt32b1_1, addFilterReg32); in aom_filter_block1d8_h4_ssse3()
373 resReg23_45 = _mm_adds_epi16(resReg23, resReg45); in aom_filter_block1d8_v4_ssse3()
[all …]
Daom_subpixel_8t_intrin_sse2.c91 srcRegFilt32b1_1 = _mm_adds_epi16(srcRegFilt32b1_1, addFilterReg32); in aom_filter_block1d16_h4_sse2()
92 srcRegFilt32b2_1 = _mm_adds_epi16(srcRegFilt32b2_1, addFilterReg32); in aom_filter_block1d16_h4_sse2()
192 resReg23_45_lo = _mm_adds_epi16(resReg23_lo, resReg45_lo); in aom_filter_block1d16_v4_sse2()
193 resReg34_56_lo = _mm_adds_epi16(resReg34_lo, resReg56_lo); in aom_filter_block1d16_v4_sse2()
218 resReg23_45_hi = _mm_adds_epi16(resReg23_hi, resReg45_hi); in aom_filter_block1d16_v4_sse2()
219 resReg34_56_hi = _mm_adds_epi16(resReg34_hi, resReg56_hi); in aom_filter_block1d16_v4_sse2()
222 resReg23_45_lo = _mm_adds_epi16(resReg23_45_lo, addFilterReg32); in aom_filter_block1d16_v4_sse2()
223 resReg34_56_lo = _mm_adds_epi16(resReg34_56_lo, addFilterReg32); in aom_filter_block1d16_v4_sse2()
224 resReg23_45_hi = _mm_adds_epi16(resReg23_45_hi, addFilterReg32); in aom_filter_block1d16_v4_sse2()
225 resReg34_56_hi = _mm_adds_epi16(resReg34_56_hi, addFilterReg32); in aom_filter_block1d16_v4_sse2()
[all …]
Daom_subpixel_8t_intrin_avx2.c146 _mm_adds_epi16(srcRegFilt1_1, _mm256_castsi256_si128(addFilterReg32)); in aom_filter_block1d4_h4_avx2()
247 srcRegFilt1_1 = _mm_adds_epi16(srcRegFilt1_1, srcRegFilt2); in aom_filter_block1d4_h8_avx2()
251 _mm_adds_epi16(srcRegFilt1_1, _mm256_castsi256_si128(addFilterReg32)); in aom_filter_block1d4_h8_avx2()
342 srcRegFilt1_1 = _mm_adds_epi16(srcRegFilt2, srcRegFilt3); in aom_filter_block1d8_h4_avx2()
346 _mm_adds_epi16(srcRegFilt1_1, _mm256_castsi256_si128(addFilterReg32)); in aom_filter_block1d8_h4_avx2()
458 srcRegFilt1_1 = _mm_adds_epi16(srcRegFilt1_1, srcRegFilt2); in aom_filter_block1d8_h8_avx2()
472 _mm_adds_epi16(srcRegFilt1_1, _mm_adds_epi16(srcRegFilt3, srcRegFilt2)); in aom_filter_block1d8_h8_avx2()
476 _mm_adds_epi16(srcRegFilt1_1, _mm256_castsi256_si128(addFilterReg32)); in aom_filter_block1d8_h8_avx2()
734 srcRegFilt1_1 = _mm_adds_epi16(srcRegFilt1_1, srcRegFilt2); in aom_filter_block1d16_h8_avx2()
748 _mm_adds_epi16(srcRegFilt1_1, _mm_adds_epi16(srcRegFilt3, srcRegFilt2)); in aom_filter_block1d16_h8_avx2()
[all …]
/external/libhevc/common/x86/
Dihevc_weighted_pred_ssse3_intr.c1324 lvl_shift1_8x16b = _mm_adds_epi16(lvl_shift1_8x16b, lvl_shift2_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
1325 lvl_shift1_8x16b = _mm_adds_epi16(lvl_shift1_8x16b, const_temp_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
1362 src_temp1_8x16b = _mm_adds_epi16(src_temp1_8x16b, src_temp2_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
1363 src_temp3_8x16b = _mm_adds_epi16(src_temp3_8x16b, src_temp4_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
1364 src_temp5_8x16b = _mm_adds_epi16(src_temp5_8x16b, src_temp6_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
1365 src_temp7_8x16b = _mm_adds_epi16(src_temp7_8x16b, src_temp8_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
1373 src_temp1_8x16b = _mm_adds_epi16(src_temp1_8x16b, lvl_shift1_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
1374 src_temp3_8x16b = _mm_adds_epi16(src_temp3_8x16b, lvl_shift1_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
1375 src_temp5_8x16b = _mm_adds_epi16(src_temp5_8x16b, lvl_shift1_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
1376 src_temp7_8x16b = _mm_adds_epi16(src_temp7_8x16b, lvl_shift1_8x16b); in ihevc_weighted_pred_bi_default_ssse3()
[all …]
Dihevc_weighted_pred_sse42_intr.c1810 lvl_shift1_8x16b = _mm_adds_epi16(lvl_shift1_8x16b, lvl_shift2_8x16b); in ihevc_weighted_pred_bi_default_sse42()
1811 lvl_shift1_8x16b = _mm_adds_epi16(lvl_shift1_8x16b, const_temp_8x16b); in ihevc_weighted_pred_bi_default_sse42()
1848 src_temp1_8x16b = _mm_adds_epi16(src_temp1_8x16b, src_temp2_8x16b); in ihevc_weighted_pred_bi_default_sse42()
1849 src_temp3_8x16b = _mm_adds_epi16(src_temp3_8x16b, src_temp4_8x16b); in ihevc_weighted_pred_bi_default_sse42()
1850 src_temp5_8x16b = _mm_adds_epi16(src_temp5_8x16b, src_temp6_8x16b); in ihevc_weighted_pred_bi_default_sse42()
1851 src_temp7_8x16b = _mm_adds_epi16(src_temp7_8x16b, src_temp8_8x16b); in ihevc_weighted_pred_bi_default_sse42()
1859 src_temp1_8x16b = _mm_adds_epi16(src_temp1_8x16b, lvl_shift1_8x16b); in ihevc_weighted_pred_bi_default_sse42()
1860 src_temp3_8x16b = _mm_adds_epi16(src_temp3_8x16b, lvl_shift1_8x16b); in ihevc_weighted_pred_bi_default_sse42()
1861 src_temp5_8x16b = _mm_adds_epi16(src_temp5_8x16b, lvl_shift1_8x16b); in ihevc_weighted_pred_bi_default_sse42()
1862 src_temp7_8x16b = _mm_adds_epi16(src_temp7_8x16b, lvl_shift1_8x16b); in ihevc_weighted_pred_bi_default_sse42()
[all …]
/external/libgav1/libgav1/src/dsp/x86/
Dinverse_transform_sse4.cc191 y = _mm_adds_epi16(*b, *a); in HadamardRotation()
194 x = _mm_adds_epi16(*a, *b); in HadamardRotation()
1652 const __m128i b = _mm_adds_epi16(a, v_src); in Identity4_SSE4_1()
1703 const __m128i v_dst_i = _mm_adds_epi16(v_src_mult, v_src); in Identity4ColumnStoreToFrame()
1704 const __m128i a = _mm_adds_epi16(v_dst_i, v_eight); in Identity4ColumnStoreToFrame()
1707 const __m128i d = _mm_adds_epi16(c, b); in Identity4ColumnStoreToFrame()
1721 const __m128i v_dst_i = _mm_adds_epi16(v_src_mult, v_src); in Identity4ColumnStoreToFrame()
1722 const __m128i a = _mm_adds_epi16(v_dst_i, v_eight); in Identity4ColumnStoreToFrame()
1725 const __m128i d = _mm_adds_epi16(c, b); in Identity4ColumnStoreToFrame()
1752 const __m128i v_dst_row = _mm_adds_epi16(v_src_mult, v_src); in Identity4RowColumnStoreToFrame()
[all …]
/external/fec/
Dviterbi615_sse2.c150 m0 = _mm_adds_epi16(vp->old_metrics->v[i],metric); in update_viterbi615_blk_sse2()
151 m3 = _mm_adds_epi16(vp->old_metrics->v[1024+i],metric); in update_viterbi615_blk_sse2()
152 m1 = _mm_adds_epi16(vp->old_metrics->v[1024+i],m_metric); in update_viterbi615_blk_sse2()
153 m2 = _mm_adds_epi16(vp->old_metrics->v[i],m_metric); in update_viterbi615_blk_sse2()
Dviterbi39_sse2.c147 m0 = _mm_adds_epi16(vp->old_metrics->v[i],metric); in update_viterbi39_blk_sse2()
148 m3 = _mm_adds_epi16(vp->old_metrics->v[16+i],metric); in update_viterbi39_blk_sse2()
149 m1 = _mm_adds_epi16(vp->old_metrics->v[16+i],m_metric); in update_viterbi39_blk_sse2()
150 m2 = _mm_adds_epi16(vp->old_metrics->v[i],m_metric); in update_viterbi39_blk_sse2()
/external/libvpx/libvpx/vp9/encoder/x86/
Dvp9_quantize_sse2.c69 qcoeff0 = _mm_adds_epi16(qcoeff0, round); in vp9_quantize_fp_sse2()
71 qcoeff1 = _mm_adds_epi16(qcoeff1, round); in vp9_quantize_fp_sse2()
140 qcoeff0 = _mm_adds_epi16(qcoeff0, round); in vp9_quantize_fp_sse2()
141 qcoeff1 = _mm_adds_epi16(qcoeff1, round); in vp9_quantize_fp_sse2()
/external/libaom/libaom/av1/common/x86/
Dav1_inv_txfm_ssse3.h41 in0 = _mm_adds_epi16(_in0, _in1); \
50 in0 = _mm_adds_epi16(_in0, _in1); \
57 out0 = _mm_adds_epi16(_in0, _in1); \
/external/XNNPACK/src/requantization/
Dfp32-sse2.c73 const __m128i xy_packed = _mm_adds_epi16(_mm_packs_epi32(x_rounded, y_rounded), vzero_point); in xnn_requantize_fp32__sse2()
74 const __m128i zw_packed = _mm_adds_epi16(_mm_packs_epi32(z_rounded, w_rounded), vzero_point); in xnn_requantize_fp32__sse2()
Dgemmlowp-ssse3.c62 const __m128i xy_packed = _mm_adds_epi16(_mm_packs_epi32(x_scaled, y_scaled), vzero_point); in xnn_requantize_gemmlowp__ssse3()
63 const __m128i zw_packed = _mm_adds_epi16(_mm_packs_epi32(z_scaled, w_scaled), vzero_point); in xnn_requantize_gemmlowp__ssse3()
Dgemmlowp-sse4.c62 const __m128i xy_packed = _mm_adds_epi16(_mm_packs_epi32(x_scaled, y_scaled), vzero_point); in xnn_requantize_gemmlowp__sse4()
63 const __m128i zw_packed = _mm_adds_epi16(_mm_packs_epi32(z_scaled, w_scaled), vzero_point); in xnn_requantize_gemmlowp__sse4()
Dgemmlowp-sse2.c62 const __m128i xy_packed = _mm_adds_epi16(_mm_packs_epi32(x_scaled, y_scaled), vzero_point); in xnn_requantize_gemmlowp__sse2()
63 const __m128i zw_packed = _mm_adds_epi16(_mm_packs_epi32(z_scaled, w_scaled), vzero_point); in xnn_requantize_gemmlowp__sse2()
Dprecise-sse4.c92 const __m128i xy_packed = _mm_adds_epi16(_mm_packs_epi32(x_scaled, y_scaled), vzero_point); in xnn_requantize_precise__sse4()
93 const __m128i zw_packed = _mm_adds_epi16(_mm_packs_epi32(z_scaled, w_scaled), vzero_point); in xnn_requantize_precise__sse4()
Dq31-sse4.c108 const __m128i xy_packed = _mm_adds_epi16(_mm_packs_epi32(x_scaled, y_scaled), vzero_point); in xnn_requantize_q31__sse4()
109 const __m128i zw_packed = _mm_adds_epi16(_mm_packs_epi32(z_scaled, w_scaled), vzero_point); in xnn_requantize_q31__sse4()
/external/mesa3d/src/intel/compiler/
Dbrw_fs_bank_conflicts.cpp121 _mm_adds_epi16(v.v[0], w.v[0]), in adds()
122 _mm_adds_epi16(v.v[1], w.v[1]) in adds()
159 const __m128i v8 = _mm_adds_epi16(v.v[0], v.v[1]); in sums()
160 const __m128i v4 = _mm_adds_epi16(v8, _mm_shuffle_epi32(v8, 0x4e)); in sums()
161 const __m128i v2 = _mm_adds_epi16(v4, _mm_shuffle_epi32(v4, 0xb1)); in sums()
162 const __m128i v1 = _mm_adds_epi16(v2, _mm_shufflelo_epi16(v2, 0xb1)); in sums()

123