Home
last modified time | relevance | path

Searched refs:res_odd (Results 1 – 18 of 18) sorted by relevance

/external/libaom/libaom/av1/common/x86/
Dhighbd_wiener_convolve_ssse3.c99 __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_5), in av1_highbd_wiener_convolve_add_src_ssse3() local
101 res_odd = _mm_srai_epi32(_mm_add_epi32(res_odd, round_const), in av1_highbd_wiener_convolve_add_src_ssse3()
107 __m128i res = _mm_packs_epi32(res_even, res_odd); in av1_highbd_wiener_convolve_add_src_ssse3()
181 const __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_3), in av1_highbd_wiener_convolve_add_src_ssse3() local
185 const __m128i res_lo = _mm_unpacklo_epi32(res_even, res_odd); in av1_highbd_wiener_convolve_add_src_ssse3()
186 const __m128i res_hi = _mm_unpackhi_epi32(res_even, res_odd); in av1_highbd_wiener_convolve_add_src_ssse3()
Dwiener_convolve_sse2.c97 __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_5), in av1_wiener_convolve_add_src_sse2() local
99 res_odd = _mm_srai_epi32(_mm_add_epi32(res_odd, round_const), in av1_wiener_convolve_add_src_sse2()
103 __m128i res = _mm_packs_epi32(res_even, res_odd); in av1_wiener_convolve_add_src_sse2()
179 const __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_3), in av1_wiener_convolve_add_src_sse2() local
183 const __m128i res_lo = _mm_unpacklo_epi32(res_even, res_odd); in av1_wiener_convolve_add_src_sse2()
184 const __m128i res_hi = _mm_unpackhi_epi32(res_even, res_odd); in av1_wiener_convolve_add_src_sse2()
Djnt_convolve_ssse3.c111 __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_5), in av1_dist_wtd_convolve_2d_ssse3() local
113 res_odd = in av1_dist_wtd_convolve_2d_ssse3()
114 _mm_sra_epi32(_mm_add_epi32(res_odd, round_const), round_shift); in av1_dist_wtd_convolve_2d_ssse3()
117 __m128i res = _mm_packs_epi32(res_even, res_odd); in av1_dist_wtd_convolve_2d_ssse3()
192 const __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_3), in av1_dist_wtd_convolve_2d_ssse3() local
196 const __m128i res_lo = _mm_unpacklo_epi32(res_even, res_odd); in av1_dist_wtd_convolve_2d_ssse3()
197 const __m128i res_hi = _mm_unpackhi_epi32(res_even, res_odd); in av1_dist_wtd_convolve_2d_ssse3()
Dhighbd_wiener_convolve_avx2.c123 const __m256i res_odd = _mm256_srai_epi32( in av1_highbd_wiener_convolve_add_src_avx2() local
130 const __m256i res = _mm256_packs_epi32(res_even, res_odd); in av1_highbd_wiener_convolve_add_src_avx2()
215 const __m256i res_odd = _mm256_add_epi32( in av1_highbd_wiener_convolve_add_src_avx2() local
225 const __m256i res_lo = _mm256_unpacklo_epi32(res_even, res_odd); in av1_highbd_wiener_convolve_add_src_avx2()
226 const __m256i res_hi = _mm256_unpackhi_epi32(res_even, res_odd); in av1_highbd_wiener_convolve_add_src_avx2()
Dwiener_convolve_avx2.c131 const __m256i res_odd = _mm256_srai_epi32( in av1_wiener_convolve_add_src_avx2() local
138 const __m256i res = _mm256_packs_epi32(res_even, res_odd); in av1_wiener_convolve_add_src_avx2()
221 const __m256i res_odd = _mm256_add_epi32( in av1_wiener_convolve_add_src_avx2() local
231 const __m256i res_lo = _mm256_unpacklo_epi32(res_even, res_odd); in av1_wiener_convolve_add_src_avx2()
232 const __m256i res_hi = _mm256_unpackhi_epi32(res_even, res_odd); in av1_wiener_convolve_add_src_avx2()
Dhighbd_convolve_2d_ssse3.c85 __m128i res_odd = convolve(s, coeffs_x); in av1_highbd_convolve_2d_sr_ssse3() local
86 res_odd = in av1_highbd_convolve_2d_sr_ssse3()
87 _mm_sra_epi32(_mm_add_epi32(res_odd, round_const_x), round_shift_x); in av1_highbd_convolve_2d_sr_ssse3()
90 __m128i res_odd1 = _mm_packs_epi32(res_odd, res_odd); in av1_highbd_convolve_2d_sr_ssse3()
Dhighbd_convolve_2d_sse4.c263 __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_5), in av1_highbd_dist_wtd_convolve_2d_sse4_1() local
265 res_odd = in av1_highbd_dist_wtd_convolve_2d_sse4_1()
266 _mm_sra_epi32(_mm_add_epi32(res_odd, round_const), round_shift); in av1_highbd_dist_wtd_convolve_2d_sse4_1()
269 __m128i res = _mm_packs_epi32(res_even, res_odd); in av1_highbd_dist_wtd_convolve_2d_sse4_1()
344 const __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_3), in av1_highbd_dist_wtd_convolve_2d_sse4_1() local
348 const __m128i res_lo = _mm_unpacklo_epi32(res_even, res_odd); in av1_highbd_dist_wtd_convolve_2d_sse4_1()
349 const __m128i res_hi = _mm_unpackhi_epi32(res_even, res_odd); in av1_highbd_dist_wtd_convolve_2d_sse4_1()
Dhighbd_jnt_convolve_sse4.c328 __m128i res_odd = convolve(s, coeffs_x); in av1_highbd_dist_wtd_convolve_x_sse4_1() local
329 res_odd = in av1_highbd_dist_wtd_convolve_x_sse4_1()
330 _mm_sra_epi32(_mm_add_epi32(res_odd, round_const_x), round_shift_x); in av1_highbd_dist_wtd_convolve_x_sse4_1()
333 res_odd = _mm_sll_epi32(res_odd, round_shift_bits); in av1_highbd_dist_wtd_convolve_x_sse4_1()
335 __m128i res1 = _mm_unpacklo_epi32(res_even, res_odd); in av1_highbd_dist_wtd_convolve_x_sse4_1()
356 __m128i res2 = _mm_unpackhi_epi32(res_even, res_odd); in av1_highbd_dist_wtd_convolve_x_sse4_1()
Djnt_convolve_sse2.c116 const __m128i res_odd = convolve_lo_x(s, coeffs); in av1_dist_wtd_convolve_x_sse2() local
119 const __m128i res_lo = _mm_unpacklo_epi32(res_even, res_odd); in av1_dist_wtd_convolve_x_sse2()
120 const __m128i res_hi = _mm_unpackhi_epi32(res_even, res_odd); in av1_dist_wtd_convolve_x_sse2()
495 __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_5), in av1_dist_wtd_convolve_2d_sse2() local
497 res_odd = in av1_dist_wtd_convolve_2d_sse2()
498 _mm_sra_epi32(_mm_add_epi32(res_odd, round_const), round_shift); in av1_dist_wtd_convolve_2d_sse2()
501 __m128i res = _mm_packs_epi32(res_even, res_odd); in av1_dist_wtd_convolve_2d_sse2()
576 const __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_3), in av1_dist_wtd_convolve_2d_sse2() local
580 const __m128i res_lo = _mm_unpacklo_epi32(res_even, res_odd); in av1_dist_wtd_convolve_2d_sse2()
581 const __m128i res_hi = _mm_unpackhi_epi32(res_even, res_odd); in av1_dist_wtd_convolve_2d_sse2()
Dconvolve_2d_sse2.c99 __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_5), in av1_convolve_2d_sr_sse2() local
101 res_odd = in av1_convolve_2d_sr_sse2()
102 _mm_sra_epi32(_mm_add_epi32(res_odd, round_const), round_shift); in av1_convolve_2d_sr_sse2()
105 __m128i res = _mm_packs_epi32(res_even, res_odd); in av1_convolve_2d_sr_sse2()
184 const __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_3), in av1_convolve_2d_sr_sse2() local
188 const __m128i res_lo = _mm_unpacklo_epi32(res_even, res_odd); in av1_convolve_2d_sr_sse2()
189 const __m128i res_hi = _mm_unpackhi_epi32(res_even, res_odd); in av1_convolve_2d_sr_sse2()
Dhighbd_jnt_convolve_avx2.c313 __m256i res_odd = convolve(s, coeffs_x); in av1_highbd_dist_wtd_convolve_2d_avx2() local
314 res_odd = _mm256_sra_epi32(_mm256_add_epi32(res_odd, round_const_x), in av1_highbd_dist_wtd_convolve_2d_avx2()
318 __m256i res_odd1 = _mm256_packs_epi32(res_odd, res_odd); in av1_highbd_dist_wtd_convolve_2d_avx2()
536 __m256i res_odd = convolve(s, coeffs_x); in av1_highbd_dist_wtd_convolve_x_avx2() local
537 res_odd = _mm256_sra_epi32(_mm256_add_epi32(res_odd, round_const_x), in av1_highbd_dist_wtd_convolve_x_avx2()
541 res_odd = _mm256_sll_epi32(res_odd, round_shift_bits); in av1_highbd_dist_wtd_convolve_x_avx2()
543 __m256i res1 = _mm256_unpacklo_epi32(res_even, res_odd); in av1_highbd_dist_wtd_convolve_x_avx2()
585 __m256i res2 = _mm256_unpackhi_epi32(res_even, res_odd); in av1_highbd_dist_wtd_convolve_x_avx2()
Dhighbd_convolve_2d_avx2.c94 __m256i res_odd = convolve(s, coeffs_x); in av1_highbd_convolve_2d_sr_avx2() local
95 res_odd = _mm256_sra_epi32(_mm256_add_epi32(res_odd, round_const_x), in av1_highbd_convolve_2d_sr_avx2()
99 __m256i res_odd1 = _mm256_packs_epi32(res_odd, res_odd); in av1_highbd_convolve_2d_sr_avx2()
Dhighbd_warp_plane_sse4.c138 __m128i res_odd = in highbd_filter_src_pixels() local
140 res_odd = _mm_sra_epi32(_mm_add_epi32(res_odd, round_const), in highbd_filter_src_pixels()
146 tmp[k + 7] = _mm_packs_epi32(res_even, res_odd); in highbd_filter_src_pixels()
521 const __m128i res_odd = _mm_add_epi32(_mm_add_epi32(res_1, res_3), in av1_highbd_warp_affine_sse4_1() local
525 __m128i res_lo = _mm_unpacklo_epi32(res_even, res_odd); in av1_highbd_warp_affine_sse4_1()
526 __m128i res_hi = _mm_unpackhi_epi32(res_even, res_odd); in av1_highbd_warp_affine_sse4_1()
Dconvolve_sse2.c316 const __m128i res_odd = convolve_lo_x(s, coeffs); in av1_convolve_x_sr_sse2() local
319 const __m128i res_lo = _mm_unpacklo_epi32(res_even, res_odd); in av1_convolve_x_sr_sse2()
320 const __m128i res_hi = _mm_unpackhi_epi32(res_even, res_odd); in av1_convolve_x_sr_sse2()
Dwarp_plane_sse4.c264 const __m128i res_odd = _mm_add_epi16(res_13, res_57); in filter_src_pixels() local
266 _mm_add_epi16(_mm_add_epi16(res_even, res_odd), round_const); in filter_src_pixels()
552 const __m128i res_odd = in filter_src_pixels_vertical() local
556 *res_lo = _mm_unpacklo_epi32(res_even, res_odd); in filter_src_pixels_vertical()
557 *res_hi = _mm_unpackhi_epi32(res_even, res_odd); in filter_src_pixels_vertical()
/external/libaom/libaom/aom_dsp/x86/
Dhighbd_convolve_ssse3.c225 __m128i res_odd = convolve(s, coeffs_x); in av1_highbd_convolve_x_sr_ssse3() local
226 res_odd = in av1_highbd_convolve_x_sr_ssse3()
227 _mm_sra_epi32(_mm_add_epi32(res_odd, round_const_x), round_shift_x); in av1_highbd_convolve_x_sr_ssse3()
231 res_odd = _mm_sra_epi32(_mm_add_epi32(res_odd, round_const_bits), in av1_highbd_convolve_x_sr_ssse3()
235 __m128i res_odd1 = _mm_packs_epi32(res_odd, res_odd); in av1_highbd_convolve_x_sr_ssse3()
Dhighbd_convolve_avx2.c325 __m256i res_odd = convolve(s, coeffs_x); in av1_highbd_convolve_x_sr_avx2() local
326 res_odd = _mm256_sra_epi32(_mm256_add_epi32(res_odd, round_const_x), in av1_highbd_convolve_x_sr_avx2()
331 res_odd = _mm256_sra_epi32(_mm256_add_epi32(res_odd, round_const_bits), in av1_highbd_convolve_x_sr_avx2()
335 __m256i res_odd1 = _mm256_packs_epi32(res_odd, res_odd); in av1_highbd_convolve_x_sr_avx2()
683 __m256i res_odd = convolve_4tap(s, ff); in aom_highbd_filter_block1d8_h4_avx2() local
684 res_odd = _mm256_srai_epi32(_mm256_add_epi32(res_odd, rounding), in aom_highbd_filter_block1d8_h4_avx2()
687 __m256i res = _mm256_packs_epi32(res_even, res_odd); in aom_highbd_filter_block1d8_h4_avx2()
/external/libaom/libaom/av1/common/arm/
Dwarp_plane_neon.c315 int32x4_t res_even, res_odd, im_res_0, im_res_1; in vertical_filter_neon() local
462 res_odd = vaddq_s32(im_res_0, im_res_1); in vertical_filter_neon()
465 c0 = vtrnq_s32(res_even, res_odd); in vertical_filter_neon()