Home
last modified time | relevance | path

Searched refs:_mm256_setzero_si256 (Results 1 – 25 of 70) sorted by relevance

123

/external/libvpx/libvpx/vpx_dsp/x86/
Dsad4d_avx2.c35 sums[0] = _mm256_setzero_si256(); in vpx_sad32x32x4d_avx2()
36 sums[1] = _mm256_setzero_si256(); in vpx_sad32x32x4d_avx2()
37 sums[2] = _mm256_setzero_si256(); in vpx_sad32x32x4d_avx2()
38 sums[3] = _mm256_setzero_si256(); in vpx_sad32x32x4d_avx2()
78 sums[0] = _mm256_setzero_si256(); in vpx_sad32x32x8_avx2()
79 sums[1] = _mm256_setzero_si256(); in vpx_sad32x32x8_avx2()
80 sums[2] = _mm256_setzero_si256(); in vpx_sad32x32x8_avx2()
81 sums[3] = _mm256_setzero_si256(); in vpx_sad32x32x8_avx2()
82 sums[4] = _mm256_setzero_si256(); in vpx_sad32x32x8_avx2()
83 sums[5] = _mm256_setzero_si256(); in vpx_sad32x32x8_avx2()
[all …]
Dvariance_avx2.c127 *vsum = _mm256_setzero_si256(); in variance16_avx2()
128 *vsse = _mm256_setzero_si256(); in variance16_avx2()
142 *vsum = _mm256_setzero_si256(); in variance32_avx2()
143 *vsse = _mm256_setzero_si256(); in variance32_avx2()
157 *vsum = _mm256_setzero_si256(); in variance64_avx2()
230 const __m256i zero_reg = _mm256_setzero_si256(); in spv32_x0_y0()
259 const __m256i zero_reg = _mm256_setzero_si256(); in spv32_half_zero()
306 const __m256i zero_reg = _mm256_setzero_si256(); in spv32_x4_y4()
345 const __m256i zero_reg = _mm256_setzero_si256(); in spv32_bilin_zero()
396 const __m256i zero_reg = _mm256_setzero_si256(); in spv32_x4_yb()
[all …]
Dsad_avx2.c19 __m256i sum_sad = _mm256_setzero_si256(); \
47 __m256i sum_sad = _mm256_setzero_si256(); \
97 __m256i sum_sad = _mm256_setzero_si256(); \
131 __m256i sum_sad = _mm256_setzero_si256(); \
/external/libaom/libaom/aom_dsp/x86/
Dblk_sse_sum_avx2.c52 regx_sum = _mm256_setzero_si256(); in sse_sum_wd4_avx2()
54 sum_buffer = _mm256_setzero_si256(); in sse_sum_wd4_avx2()
76 temp_buffer1 = _mm256_unpacklo_epi32(sse_buffer, _mm256_setzero_si256()); in sse_sum_wd4_avx2()
77 temp_buffer2 = _mm256_unpackhi_epi32(sse_buffer, _mm256_setzero_si256()); in sse_sum_wd4_avx2()
92 regx_sum = _mm256_setzero_si256(); in sse_sum_wd8_avx2()
94 sum_buffer = _mm256_setzero_si256(); in sse_sum_wd8_avx2()
111 temp_buffer1 = _mm256_unpacklo_epi32(sse_buffer, _mm256_setzero_si256()); in sse_sum_wd8_avx2()
112 temp_buffer2 = _mm256_unpackhi_epi32(sse_buffer, _mm256_setzero_si256()); in sse_sum_wd8_avx2()
127 regx_sum = _mm256_setzero_si256(); in sse_sum_wd16_avx2()
129 sum_buffer = _mm256_setzero_si256(); in sse_sum_wd16_avx2()
[all …]
Dsad4d_avx2.c29 sum_ref0 = _mm256_setzero_si256(); in aom_sadMxNx4d_avx2()
30 sum_ref2 = _mm256_setzero_si256(); in aom_sadMxNx4d_avx2()
31 sum_ref1 = _mm256_setzero_si256(); in aom_sadMxNx4d_avx2()
32 sum_ref3 = _mm256_setzero_si256(); in aom_sadMxNx4d_avx2()
Dsad_highbd_avx2.c42 const __m256i zero = _mm256_setzero_si256(); in highbd_sad16x4_core_avx2()
95 __m256i sad = _mm256_setzero_si256(); in aom_highbd_sad16xN_avx2()
145 __m256i sad = _mm256_setzero_si256(); in aom_highbd_sad32xN_avx2()
196 __m256i sad = _mm256_setzero_si256(); in aom_highbd_sad64xN_avx2()
242 __m256i sad = _mm256_setzero_si256(); in aom_highbd_sad128xN_avx2()
284 __m256i sad = _mm256_setzero_si256(); in aom_highbd_sad16x4_avg_avx2()
296 __m256i sad = _mm256_setzero_si256(); in aom_highbd_sad16x8_avg_avx2()
356 __m256i sad = _mm256_setzero_si256(); in aom_highbd_sad32x8_avg_avx2()
376 __m256i sad = _mm256_setzero_si256(); in aom_highbd_sad32x16_avg_avx2()
424 __m256i sad = _mm256_setzero_si256(); in aom_highbd_sad64x16_avg_avx2()
[all …]
Dsse_avx2.c24 const __m256i zero = _mm256_setzero_si256(); in sse_w32_avx2()
37 __m256i zero = _mm256_setzero_si256(); in summary_all_avx2()
103 __m256i sum = _mm256_setzero_si256(); in aom_sse_avx2()
104 __m256i zero = _mm256_setzero_si256(); in aom_sse_avx2()
258 __m256i sum = _mm256_setzero_si256(); in aom_highbd_sse_avx2()
290 __m256i sum32 = _mm256_setzero_si256(); in aom_highbd_sse_avx2()
306 __m256i sum32 = _mm256_setzero_si256(); in aom_highbd_sse_avx2()
324 __m256i sum32 = _mm256_setzero_si256(); in aom_highbd_sse_avx2()
347 __m256i sum32 = _mm256_setzero_si256(); in aom_highbd_sse_avx2()
364 __m256i sum32 = _mm256_setzero_si256(); in aom_highbd_sse_avx2()
Dhighbd_variance_avx2.c24 __m256i v_sum_d = _mm256_setzero_si256(); in aom_highbd_calc8x8var_avx2()
25 __m256i v_sse_d = _mm256_setzero_si256(); in aom_highbd_calc8x8var_avx2()
59 __m256i v_sum_d = _mm256_setzero_si256(); in aom_highbd_calc16x16var_avx2()
60 __m256i v_sse_d = _mm256_setzero_si256(); in aom_highbd_calc16x16var_avx2()
Dsad_avx2.c22 __m256i sum_sad = _mm256_setzero_si256(); \
51 __m256i sum_sad = _mm256_setzero_si256(); \
104 __m256i sum_sad = _mm256_setzero_si256(); \
139 __m256i sum_sad = _mm256_setzero_si256(); \
Dobmc_sad_avx2.c34 __m256i v_sad_d = _mm256_setzero_si256(); in obmc_sad_w4_avx2()
75 __m256i v_sad_d = _mm256_setzero_si256(); in obmc_sad_w8n_avx2()
157 __m256i v_sad_d = _mm256_setzero_si256(); in hbd_obmc_sad_w4_avx2()
200 __m256i v_sad_d = _mm256_setzero_si256(); in hbd_obmc_sad_w8n_avx2()
Dvariance_avx2.c125 *vsum = _mm256_setzero_si256(); in variance16_avx2()
138 *vsum = _mm256_setzero_si256(); in variance32_avx2()
151 *vsum = _mm256_setzero_si256(); in variance64_avx2()
165 *vsum = _mm256_setzero_si256(); in variance128_avx2()
181 __m256i vsse = _mm256_setzero_si256(); \
206 __m256i vsse = _mm256_setzero_si256(); \
207 __m256i vsum = _mm256_setzero_si256(); \
457 const __m256i zero = _mm256_setzero_si256(); in aom_highbd_comp_mask_pred_avx2()
Dsum_squares_avx2.c23 __m256i v_acc_q = _mm256_setzero_si256(); in aom_sum_squares_2d_i16_nxn_avx2()
26 __m256i v_acc_d = _mm256_setzero_si256(); in aom_sum_squares_2d_i16_nxn_avx2()
111 __m256i vzero = _mm256_setzero_si256(); in aom_var_2d_u8_avx2()
185 __m256i vzero = _mm256_setzero_si256(); in aom_var_2d_u16_avx2()
Dhighbd_quantize_intrin_avx2.c99 const __m256i zero = _mm256_setzero_si256(); in quantize()
106 const __m256i zero = _mm256_setzero_si256(); in quantize()
127 __m256i eob = _mm256_setzero_si256(); in aom_highbd_quantize_b_avx2()
/external/libaom/libaom/av1/encoder/x86/
Drdopt_avx2.c78 __m256i xy_sum_32 = _mm256_setzero_si256(); in av1_get_horver_correlation_full_avx2()
79 __m256i xz_sum_32 = _mm256_setzero_si256(); in av1_get_horver_correlation_full_avx2()
80 __m256i x_sum_32 = _mm256_setzero_si256(); in av1_get_horver_correlation_full_avx2()
81 __m256i x2_sum_32 = _mm256_setzero_si256(); in av1_get_horver_correlation_full_avx2()
101 xy_sum_32 = _mm256_setzero_si256(); in av1_get_horver_correlation_full_avx2()
102 xz_sum_32 = _mm256_setzero_si256(); in av1_get_horver_correlation_full_avx2()
103 x_sum_32 = _mm256_setzero_si256(); in av1_get_horver_correlation_full_avx2()
104 x2_sum_32 = _mm256_setzero_si256(); in av1_get_horver_correlation_full_avx2()
Derror_intrin_avx2.c34 const __m256i zero = _mm256_setzero_si256(); in av1_block_error_lp_avx2()
93 const __m256i zero_reg = _mm256_setzero_si256(); in av1_block_error_avx2()
96 sse_reg = _mm256_setzero_si256(); in av1_block_error_avx2()
97 ssz_reg = _mm256_setzero_si256(); in av1_block_error_avx2()
Dav1_quantize_avx2.c31 const __m256i zero = _mm256_setzero_si256(); in write_zero()
122 const __m256i zero = _mm256_setzero_si256(); in quantize()
138 const __m256i zero256 = _mm256_setzero_si256(); in scan_eob_256()
158 const __m256i zero = _mm256_setzero_si256(); in store_zero_tran_low()
269 __m256i eob = _mm256_setzero_si256(); in av1_quantize_fp_avx2()
313 const __m256i zero = _mm256_setzero_si256(); in quantize_32x32()
344 __m256i eob = _mm256_setzero_si256(); in av1_quantize_fp_32x32_avx2()
392 const __m256i zero = _mm256_setzero_si256(); in quantize_64x64()
423 __m256i eob = _mm256_setzero_si256(); in av1_quantize_fp_64x64_avx2()
/external/clang/lib/Headers/
Davx512vlcdintrin.h104 (__v4di) _mm256_setzero_si256 (), in _mm256_maskz_conflict_epi64()
153 _mm256_setzero_si256 (), in _mm256_maskz_conflict_epi32()
188 _mm256_setzero_si256 (), in _mm256_lzcnt_epi32()
205 _mm256_setzero_si256 (), in _mm256_maskz_lzcnt_epi32()
240 _mm256_setzero_si256 (), in _mm256_lzcnt_epi64()
257 _mm256_setzero_si256 (), in _mm256_maskz_lzcnt_epi64()
Davx512vlbwintrin.h630 _mm256_setzero_si256 (), in _mm256_maskz_add_epi8()
647 _mm256_setzero_si256 (), in _mm256_maskz_add_epi16()
664 _mm256_setzero_si256 (), in _mm256_maskz_sub_epi8()
681 _mm256_setzero_si256 (), in _mm256_maskz_sub_epi16()
765 _mm256_setzero_si256 (), in _mm256_maskz_mullo_epi16()
846 (__v32qi) _mm256_setzero_si256 (), in _mm256_maskz_abs_epi8()
878 (__v16hi) _mm256_setzero_si256 (), in _mm256_maskz_abs_epi16()
904 (__v16hi) _mm256_setzero_si256 (), in _mm256_maskz_packs_epi32()
941 (__v32qi) _mm256_setzero_si256 (), in _mm256_maskz_packs_epi16()
978 (__v16hi) _mm256_setzero_si256 (), in _mm256_maskz_packus_epi32()
[all …]
Davx512vldqintrin.h52 _mm256_setzero_si256 (), in _mm256_maskz_mullo_epi64()
375 (__v4di) _mm256_setzero_si256(), in _mm256_cvtpd_epi64()
389 (__v4di) _mm256_setzero_si256(), in _mm256_maskz_cvtpd_epi64()
417 (__v4di) _mm256_setzero_si256(), in _mm256_cvtpd_epu64()
431 (__v4di) _mm256_setzero_si256(), in _mm256_maskz_cvtpd_epu64()
459 (__v4di) _mm256_setzero_si256(), in _mm256_cvtps_epi64()
473 (__v4di) _mm256_setzero_si256(), in _mm256_maskz_cvtps_epi64()
501 (__v4di) _mm256_setzero_si256(), in _mm256_cvtps_epu64()
515 (__v4di) _mm256_setzero_si256(), in _mm256_maskz_cvtps_epu64()
627 (__v4di) _mm256_setzero_si256(), in _mm256_cvttpd_epi64()
[all …]
Davx512vlintrin.h634 _mm256_setzero_si256 (), in _mm256_maskz_add_epi32()
654 _mm256_setzero_si256 (), in _mm256_maskz_add_epi64()
674 _mm256_setzero_si256 (), in _mm256_maskz_sub_epi32()
694 _mm256_setzero_si256 (), in _mm256_maskz_sub_epi64()
793 _mm256_setzero_si256 (), in _mm256_maskz_mul_epi32()
831 _mm256_setzero_si256 (), in _mm256_maskz_mul_epu32()
860 _mm256_setzero_si256 (), in _mm256_maskz_mullo_epi32()
903 return (__m256i)_mm256_mask_and_epi32(_mm256_setzero_si256(), __U, __A, __B); in _mm256_maskz_and_epi32()
931 return (__m256i)_mm256_mask_andnot_epi32(_mm256_setzero_si256(), in _mm256_maskz_andnot_epi32()
960 return (__m256i)_mm256_mask_or_epi32(_mm256_setzero_si256(), __U, __A, __B); in _mm256_maskz_or_epi32()
[all …]
/external/libaom/libaom/aom_dsp/simd/
Dv256_intrinsics_x86.h79 SIMD_INLINE v256 v256_zero(void) { return _mm256_setzero_si256(); } in v256_zero()
264 _mm256_setzero_si256()); in v256_unpacklo_u8_s16()
270 _mm256_setzero_si256()); in v256_unpackhi_u8_s16()
322 _mm256_setzero_si256()); in v256_unpacklo_u16_s32()
335 _mm256_setzero_si256()); in v256_unpackhi_u16_s32()
434 v256 t = _mm256_sad_epu8(a, _mm256_setzero_si256()); in v256_hadd_u8()
444 return _mm256_setzero_si256(); in v256_sad_u8_init()
462 return _mm256_setzero_si256(); in v256_ssd_u8_init()
468 v256 l = _mm256_sub_epi16(_mm256_unpacklo_epi8(a, _mm256_setzero_si256()), in v256_ssd_u8()
469 _mm256_unpacklo_epi8(b, _mm256_setzero_si256())); in v256_ssd_u8()
[all …]
/external/libaom/libaom/av1/common/x86/
Dcfl_avx2.c131 const __m256i zeros = _mm256_setzero_si256(); in cfl_luma_subsampling_444_lbd_avx2()
332 highbd_clamp_epi16(res, _mm256_setzero_si256(), max)); in cfl_predict_hbd_avx2()
338 highbd_clamp_epi16(res_1, _mm256_setzero_si256(), max)); in cfl_predict_hbd_avx2()
398 return _mm256_add_epi32(_mm256_unpacklo_epi16(a, _mm256_setzero_si256()), in _mm256_addl_epi16()
399 _mm256_unpackhi_epi16(a, _mm256_setzero_si256())); in _mm256_addl_epi16()
415 __m256i sum = _mm256_setzero_si256(); in subtract_average_avx2()
419 if (width == 32) sum2 = _mm256_setzero_si256(); in subtract_average_avx2()
/external/flac/libFLAC/
Dstream_encoder_intrin_avx2.c65 __m256i sum256 = _mm256_setzero_si256(); in FLAC__precompute_partition_info_sums_intrin_avx2()
97 __m256i sum256 = _mm256_setzero_si256(); in FLAC__precompute_partition_info_sums_intrin_avx2()
/external/libvpx/libvpx/vp9/encoder/x86/
Dvp9_quantize_avx2.c22 const __m256i zero = _mm256_setzero_si256(); in store_zero_tran_low()
34 const __m256i zero256 = _mm256_setzero_si256(); in scan_eob_256()
Dvp9_error_avx2.c26 const __m256i zero = _mm256_setzero_si256(); in vp9_block_error_avx2()
112 const __m256i zero = _mm256_setzero_si256(); in vp9_block_error_fp_avx2()

123