Home
last modified time | relevance | path

Searched refs:_mm_cvtsi32_si128 (Results 1 – 25 of 90) sorted by relevance

1234

/external/flac/libFLAC/
Dlpc_intrin_sse41.c59 __m128i cnt = _mm_cvtsi32_si128(lp_quantization); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
138 xmm5 = _mm_cvtsi32_si128(qlp_coeff[10]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
149 xmm7 = _mm_cvtsi32_si128(data[i-11]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
253 xmm4 = _mm_cvtsi32_si128(qlp_coeff[8]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
263 xmm7 = _mm_cvtsi32_si128(data[i-9]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
352 xmm3 = _mm_cvtsi32_si128(qlp_coeff[6]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
361 xmm7 = _mm_cvtsi32_si128(data[i-7]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
431 xmm2 = _mm_cvtsi32_si128(qlp_coeff[4]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
439 xmm7 = _mm_cvtsi32_si128(data[i-5]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
494 xmm1 = _mm_cvtsi32_si128(qlp_coeff[2]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
[all …]
Dlpc_intrin_sse2.c61 __m128i cnt = _mm_cvtsi32_si128(lp_quantization); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
71 … q0 = _mm_cvtsi32_si128(0xffff & qlp_coeff[0]); q0 = _mm_shuffle_epi32(q0, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
72 … q1 = _mm_cvtsi32_si128(0xffff & qlp_coeff[1]); q1 = _mm_shuffle_epi32(q1, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
73 … q2 = _mm_cvtsi32_si128(0xffff & qlp_coeff[2]); q2 = _mm_shuffle_epi32(q2, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
74 … q3 = _mm_cvtsi32_si128(0xffff & qlp_coeff[3]); q3 = _mm_shuffle_epi32(q3, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
75 … q4 = _mm_cvtsi32_si128(0xffff & qlp_coeff[4]); q4 = _mm_shuffle_epi32(q4, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
76 … q5 = _mm_cvtsi32_si128(0xffff & qlp_coeff[5]); q5 = _mm_shuffle_epi32(q5, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
77 … q6 = _mm_cvtsi32_si128(0xffff & qlp_coeff[6]); q6 = _mm_shuffle_epi32(q6, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
78 … q7 = _mm_cvtsi32_si128(0xffff & qlp_coeff[7]); q7 = _mm_shuffle_epi32(q7, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
79 … q8 = _mm_cvtsi32_si128(0xffff & qlp_coeff[8]); q8 = _mm_shuffle_epi32(q8, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
[all …]
Dfixed_intrin_ssse3.c69 last_error = _mm_cvtsi32_si128(data[-1]); // 0 0 0 le0 in FLAC__fixed_compute_best_predictor_intrin_ssse3()
72 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // 0 0 le0 le1 in FLAC__fixed_compute_best_predictor_intrin_ssse3()
75 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // 0 le0 le1 le2 in FLAC__fixed_compute_best_predictor_intrin_ssse3()
78 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // le0 le1 le2 le3 in FLAC__fixed_compute_best_predictor_intrin_ssse3()
83 err0 = _mm_cvtsi32_si128(data[i]); // 0 0 0 e0 in FLAC__fixed_compute_best_predictor_intrin_ssse3()
160 last_error = _mm_cvtsi32_si128(data[-1]); // 0 0 0 le0 in FLAC__fixed_compute_best_predictor_wide_intrin_ssse3()
163 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // 0 0 le0 le1 in FLAC__fixed_compute_best_predictor_wide_intrin_ssse3()
166 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // 0 le0 le1 le2 in FLAC__fixed_compute_best_predictor_wide_intrin_ssse3()
169 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // le0 le1 le2 le3 in FLAC__fixed_compute_best_predictor_wide_intrin_ssse3()
174 err0 = _mm_cvtsi32_si128(data[i]); // 0 0 0 e0 in FLAC__fixed_compute_best_predictor_wide_intrin_ssse3()
Dfixed_intrin_sse2.c69 last_error = _mm_cvtsi32_si128(data[-1]); // 0 0 0 le0 in FLAC__fixed_compute_best_predictor_intrin_sse2()
72 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // 0 0 le0 le1 in FLAC__fixed_compute_best_predictor_intrin_sse2()
75 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // 0 le0 le1 le2 in FLAC__fixed_compute_best_predictor_intrin_sse2()
78 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // le0 le1 le2 le3 in FLAC__fixed_compute_best_predictor_intrin_sse2()
83 err0 = _mm_cvtsi32_si128(data[i]); // 0 0 0 e0 in FLAC__fixed_compute_best_predictor_intrin_sse2()
166 last_error = _mm_cvtsi32_si128(data[-1]); // 0 0 0 le0 in FLAC__fixed_compute_best_predictor_wide_intrin_sse2()
169 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // 0 0 le0 le1 in FLAC__fixed_compute_best_predictor_wide_intrin_sse2()
172 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // 0 le0 le1 le2 in FLAC__fixed_compute_best_predictor_wide_intrin_sse2()
175 last_error = _mm_sub_epi32(last_error, _mm_cvtsi32_si128(itmp)); // le0 le1 le2 le3 in FLAC__fixed_compute_best_predictor_wide_intrin_sse2()
180 err0 = _mm_cvtsi32_si128(data[i]); // 0 0 0 e0 in FLAC__fixed_compute_best_predictor_wide_intrin_sse2()
Dstream_encoder_intrin_ssse3.c75 __m128i mm_res = _mm_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); in FLAC__precompute_partition_info_sums_intrin_ssse3()
85 __m128i mm_res = _mm_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); in FLAC__precompute_partition_info_sums_intrin_ssse3()
108 …__m128i mm_res = _mm_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); /* 0 0 0 |r0| … in FLAC__precompute_partition_info_sums_intrin_ssse3()
119 __m128i mm_res = _mm_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); in FLAC__precompute_partition_info_sums_intrin_ssse3()
Dstream_encoder_intrin_sse2.c86 __m128i mm_res = local_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); in FLAC__precompute_partition_info_sums_intrin_sse2()
96 __m128i mm_res = local_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); in FLAC__precompute_partition_info_sums_intrin_sse2()
119 …__m128i mm_res = local_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); /* 0 0 0 |r0… in FLAC__precompute_partition_info_sums_intrin_sse2()
130 __m128i mm_res = local_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); in FLAC__precompute_partition_info_sums_intrin_sse2()
Dstream_encoder_intrin_avx2.c82 __m128i res128 = _mm_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); in FLAC__precompute_partition_info_sums_intrin_avx2()
116 __m128i res128 = _mm_abs_epi32(_mm_cvtsi32_si128(residual[residual_sample])); in FLAC__precompute_partition_info_sums_intrin_avx2()
/external/libaom/libaom/av1/common/x86/
Dconvolve_sse2.c87 const __m128i round_shift = _mm_cvtsi32_si128(FILTER_BITS); in av1_convolve_y_sr_sse2()
103 src6 = _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 6 * src_stride)); in av1_convolve_y_sr_sse2()
105 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 0 * src_stride)), in av1_convolve_y_sr_sse2()
106 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 1 * src_stride))); in av1_convolve_y_sr_sse2()
108 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 1 * src_stride)), in av1_convolve_y_sr_sse2()
109 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 2 * src_stride))); in av1_convolve_y_sr_sse2()
111 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 2 * src_stride)), in av1_convolve_y_sr_sse2()
112 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 3 * src_stride))); in av1_convolve_y_sr_sse2()
114 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 3 * src_stride)), in av1_convolve_y_sr_sse2()
115 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 4 * src_stride))); in av1_convolve_y_sr_sse2()
[all …]
Djnt_convolve_sse2.c31 const __m128i left_shift = _mm_cvtsi32_si128(bits); in av1_dist_wtd_convolve_x_sse2()
33 const __m128i round_shift = _mm_cvtsi32_si128(conv_params->round_0); in av1_dist_wtd_convolve_x_sse2()
165 const __m128i left_shift = _mm_cvtsi32_si128(bits); in av1_dist_wtd_convolve_y_sse2()
179 const __m128i round_shift = _mm_cvtsi32_si128(conv_params->round_1); in av1_dist_wtd_convolve_y_sse2()
189 src6 = _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 6 * src_stride)); in av1_dist_wtd_convolve_y_sse2()
191 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 0 * src_stride)), in av1_dist_wtd_convolve_y_sse2()
192 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 1 * src_stride))); in av1_dist_wtd_convolve_y_sse2()
194 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 1 * src_stride)), in av1_dist_wtd_convolve_y_sse2()
195 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 2 * src_stride))); in av1_dist_wtd_convolve_y_sse2()
197 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 2 * src_stride)), in av1_dist_wtd_convolve_y_sse2()
[all …]
Dreconinter_sse4.c36 const __m128i s0A = _mm_cvtsi32_si128(*(uint32_t *)src0); in av1_build_compound_diffwtd_mask_sse4_1()
37 const __m128i s0B = _mm_cvtsi32_si128(*(uint32_t *)(src0 + stride0)); in av1_build_compound_diffwtd_mask_sse4_1()
41 const __m128i s1A = _mm_cvtsi32_si128(*(uint32_t *)src1); in av1_build_compound_diffwtd_mask_sse4_1()
42 const __m128i s1B = _mm_cvtsi32_si128(*(uint32_t *)(src1 + stride1)); in av1_build_compound_diffwtd_mask_sse4_1()
Dav1_convolve_scale_sse4.c33 const __m128i round_shift = _mm_cvtsi32_si128(round); in hfilter8()
114 const __m128i round_shift = _mm_cvtsi32_si128(conv_params->round_1); in vfilter8()
124 const __m128i bits_shift = _mm_cvtsi32_si128(bits); in vfilter8()
272 const __m128i round_shift = _mm_cvtsi32_si128(round); in highbd_hfilter8()
341 const __m128i round_shift = _mm_cvtsi32_si128(conv_params->round_1); in highbd_vfilter8()
353 const __m128i bits_shift = _mm_cvtsi32_si128(bits); in highbd_vfilter8()
360 __m128i round_bits_shift = _mm_cvtsi32_si128(round_bits); in highbd_vfilter8()
Dhighbd_convolve_2d_ssse3.c42 const __m128i round_shift_x = _mm_cvtsi32_si128(conv_params->round_0); in av1_highbd_convolve_2d_sr_ssse3()
47 const __m128i round_shift_y = _mm_cvtsi32_si128(conv_params->round_1); in av1_highbd_convolve_2d_sr_ssse3()
51 const __m128i round_shift_bits = _mm_cvtsi32_si128(bits); in av1_highbd_convolve_2d_sr_ssse3()
/external/libhevc/common/x86/
Dihevc_itrans_recon_32x32_ssse3_intr.c1841 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_ssse3()
1842 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_ssse3()
1871 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_ssse3()
1872 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_ssse3()
1901 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_ssse3()
1902 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_ssse3()
1930 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_ssse3()
1931 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_ssse3()
1959 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_ssse3()
1960 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_ssse3()
[all …]
Dihevc_32x32_itrans_recon_sse42_intr.c1830 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_sse42()
1831 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_sse42()
1860 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_sse42()
1861 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_sse42()
1890 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_sse42()
1891 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_sse42()
1919 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_sse42()
1920 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_sse42()
1948 m_rdng_factor = _mm_cvtsi32_si128((1 << (i4_shift - 1))); in ihevc_itrans_recon_32x32_sse42()
1949 m_count = _mm_cvtsi32_si128(i4_shift); in ihevc_itrans_recon_32x32_sse42()
[all …]
/external/libaom/libaom/aom_dsp/x86/
Dhighbd_intrapred_sse2.c268 const __m128i two = _mm_cvtsi32_si128(2); in aom_highbd_dc_left_predictor_4x4_sse2()
279 const __m128i two = _mm_cvtsi32_si128(2); in aom_highbd_dc_top_predictor_4x4_sse2()
290 const __m128i dc = _mm_cvtsi32_si128(1 << (bd - 1)); in aom_highbd_dc_128_predictor_4x4_sse2()
323 const __m128i four = _mm_cvtsi32_si128(4); in aom_highbd_dc_left_predictor_4x8_sse2()
333 const __m128i two = _mm_cvtsi32_si128(2); in aom_highbd_dc_top_predictor_4x8_sse2()
344 const __m128i dc = _mm_cvtsi32_si128(1 << (bd - 1)); in aom_highbd_dc_128_predictor_4x8_sse2()
369 const __m128i four = _mm_cvtsi32_si128(4); in dc_top_predictor_8xh()
405 const __m128i two = _mm_cvtsi32_si128(2); in aom_highbd_dc_left_predictor_8x4_sse2()
416 const __m128i four = _mm_cvtsi32_si128(4); in aom_highbd_dc_left_predictor_8x8_sse2()
434 const __m128i eight = _mm_cvtsi32_si128(8); in aom_highbd_dc_left_predictor_8x16_sse2()
[all …]
Dmasked_sad_intrin_ssse3.c198 _mm_cvtsi32_si128(*(uint32_t *)src_ptr), in aom_masked_sad4xh_ssse3()
199 _mm_cvtsi32_si128(*(uint32_t *)&src_ptr[src_stride])); in aom_masked_sad4xh_ssse3()
201 _mm_unpacklo_epi32(_mm_cvtsi32_si128(*(uint32_t *)a_ptr), in aom_masked_sad4xh_ssse3()
202 _mm_cvtsi32_si128(*(uint32_t *)&a_ptr[a_stride])); in aom_masked_sad4xh_ssse3()
204 _mm_unpacklo_epi32(_mm_cvtsi32_si128(*(uint32_t *)b_ptr), in aom_masked_sad4xh_ssse3()
205 _mm_cvtsi32_si128(*(uint32_t *)&b_ptr[b_stride])); in aom_masked_sad4xh_ssse3()
207 _mm_unpacklo_epi32(_mm_cvtsi32_si128(*(uint32_t *)m_ptr), in aom_masked_sad4xh_ssse3()
208 _mm_cvtsi32_si128(*(uint32_t *)&m_ptr[m_stride])); in aom_masked_sad4xh_ssse3()
372 _mm_cvtsi32_si128(*(const uint32_t *)m_ptr), in aom_highbd_masked_sad4xh_ssse3()
373 _mm_cvtsi32_si128(*(const uint32_t *)&m_ptr[m_stride])), in aom_highbd_masked_sad4xh_ssse3()
Dintrapred_ssse3.c89 const __m128i t = _mm_cvtsi32_si128(((const uint32_t *)above)[0]); in aom_paeth_predictor_4x16_ssse3()
202 __m128i l = _mm_cvtsi32_si128(((const uint32_t *)left)[0]); in aom_paeth_predictor_16x4_ssse3()
589 __m128i d = _mm_cvtsi32_si128(((const uint32_t *)above)[0]); in load_pixel_w4()
591 pixels[1] = _mm_cvtsi32_si128(((const uint32_t *)left)[0]); in load_pixel_w4()
614 const __m128i t = _mm_cvtsi32_si128(((const uint32_t *)weight_array)[1]); in load_weight_w4()
721 pixels[2] = _mm_cvtsi32_si128(((const uint32_t *)left)[0]); in load_pixel_w8()
892 const __m128i bottom_left = _mm_cvtsi32_si128((uint32_t)left[bh - 1]); in smooth_predictor_wxh()
895 _mm_shuffle_epi8(_mm_cvtsi32_si128((uint32_t)above[bw - 1]), dup16); in smooth_predictor_wxh()
900 const __m128i weights_y = _mm_cvtsi32_si128((uint32_t)sm_weights_h[y]); in smooth_predictor_wxh()
901 const __m128i left_y = _mm_cvtsi32_si128((uint32_t)left[y]); in smooth_predictor_wxh()
[all …]
/external/webp/src/dsp/
Dlossless_sse2.c31 const __m128i C0 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(c0), zero); in ClampedAddSubtractFull_SSE2()
32 const __m128i C1 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(c1), zero); in ClampedAddSubtractFull_SSE2()
33 const __m128i C2 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(c2), zero); in ClampedAddSubtractFull_SSE2()
45 const __m128i C0 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(c0), zero); in ClampedAddSubtractHalf_SSE2()
46 const __m128i C1 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(c1), zero); in ClampedAddSubtractHalf_SSE2()
47 const __m128i B0 = _mm_unpacklo_epi8(_mm_cvtsi32_si128(c2), zero); in ClampedAddSubtractHalf_SSE2()
63 const __m128i A0 = _mm_cvtsi32_si128(a); in Select_SSE2()
64 const __m128i B0 = _mm_cvtsi32_si128(b); in Select_SSE2()
65 const __m128i C0 = _mm_cvtsi32_si128(c); in Select_SSE2()
98 const __m128i A0 = _mm_cvtsi32_si128(a0); in Average2_uint32_SSE2()
[all …]
/external/skia/src/opts/
DSkBitmapProcState_opts.h67 __m128i interlaced_A = _mm_unpacklo_epi8(_mm_cvtsi32_si128(A0), _mm_cvtsi32_si128(A1)), in interpolate_in_x()
68 interlaced_B = _mm_unpacklo_epi8(_mm_cvtsi32_si128(B0), _mm_cvtsi32_si128(B1)); in interpolate_in_x()
212 const __m128i a00 = _mm_cvtsi32_si128(row0[x0]), in S32_alpha_D32_filter_DX()
213 a01 = _mm_cvtsi32_si128(row0[x1]), in S32_alpha_D32_filter_DX()
214 a10 = _mm_cvtsi32_si128(row1[x0]), in S32_alpha_D32_filter_DX()
215 a11 = _mm_cvtsi32_si128(row1[x1]); in S32_alpha_D32_filter_DX()
/external/skqp/src/opts/
DSkBitmapProcState_opts.h67 __m128i interlaced_A = _mm_unpacklo_epi8(_mm_cvtsi32_si128(A0), _mm_cvtsi32_si128(A1)), in interpolate_in_x()
68 interlaced_B = _mm_unpacklo_epi8(_mm_cvtsi32_si128(B0), _mm_cvtsi32_si128(B1)); in interpolate_in_x()
212 const __m128i a00 = _mm_cvtsi32_si128(row0[x0]), in S32_alpha_D32_filter_DX()
213 a01 = _mm_cvtsi32_si128(row0[x1]), in S32_alpha_D32_filter_DX()
214 a10 = _mm_cvtsi32_si128(row1[x0]), in S32_alpha_D32_filter_DX()
215 a11 = _mm_cvtsi32_si128(row1[x1]); in S32_alpha_D32_filter_DX()
/external/libvpx/libvpx/vpx_dsp/x86/
Dmem_sse2.h32 return _mm_cvtsi32_si128(val); in load_unaligned_u32()
50 d[0] = _mm_cvtsi32_si128(*(const int *)(s + 0 * stride)); in load_8bit_4x4()
51 d[1] = _mm_cvtsi32_si128(*(const int *)(s + 1 * stride)); in load_8bit_4x4()
52 d[2] = _mm_cvtsi32_si128(*(const int *)(s + 2 * stride)); in load_8bit_4x4()
53 d[3] = _mm_cvtsi32_si128(*(const int *)(s + 3 * stride)); in load_8bit_4x4()
Dhighbd_intrapred_intrin_sse2.c183 const __m128i two = _mm_cvtsi32_si128(2); in vpx_highbd_dc_left_predictor_4x4_sse2()
194 const __m128i two = _mm_cvtsi32_si128(2); in vpx_highbd_dc_top_predictor_4x4_sse2()
205 const __m128i dc = _mm_cvtsi32_si128(1 << (bd - 1)); in vpx_highbd_dc_128_predictor_4x4_sse2()
237 const __m128i four = _mm_cvtsi32_si128(4); in vpx_highbd_dc_left_predictor_8x8_sse2()
248 const __m128i four = _mm_cvtsi32_si128(4); in vpx_highbd_dc_top_predictor_8x8_sse2()
259 const __m128i dc = _mm_cvtsi32_si128(1 << (bd - 1)); in vpx_highbd_dc_128_predictor_8x8_sse2()
289 const __m128i eight = _mm_cvtsi32_si128(8); in vpx_highbd_dc_left_predictor_16x16_sse2()
300 const __m128i eight = _mm_cvtsi32_si128(8); in vpx_highbd_dc_top_predictor_16x16_sse2()
311 const __m128i dc = _mm_cvtsi32_si128(1 << (bd - 1)); in vpx_highbd_dc_128_predictor_16x16_sse2()
346 const __m128i sixteen = _mm_cvtsi32_si128(16); in vpx_highbd_dc_left_predictor_32x32_sse2()
[all …]
/external/libaom/libaom/aom_dsp/simd/
Dv256_intrinsics_x86.h476 v128 c = _mm_cvtsi32_si128(32); in v256_ssd_u8()
602 _mm256_sll_epi16(a, _mm_cvtsi32_si128(c))); in v256_shl_8()
607 _mm256_srl_epi16(a, _mm_cvtsi32_si128(c))); in v256_shr_u8()
611 __m128i x = _mm_cvtsi32_si128(c + 8); in v256_shr_s8()
617 return _mm256_sll_epi16(a, _mm_cvtsi32_si128(c)); in v256_shl_16()
621 return _mm256_srl_epi16(a, _mm_cvtsi32_si128(c)); in v256_shr_u16()
625 return _mm256_sra_epi16(a, _mm_cvtsi32_si128(c)); in v256_shr_s16()
629 return _mm256_sll_epi32(a, _mm_cvtsi32_si128(c)); in v256_shl_32()
633 return _mm256_srl_epi32(a, _mm_cvtsi32_si128(c)); in v256_shr_u32()
637 return _mm256_sra_epi32(a, _mm_cvtsi32_si128(c)); in v256_shr_s32()
[all …]
Dv64_intrinsics_x86.h437 _mm_sll_epi16(a, _mm_cvtsi32_si128(c))); in v64_shl_8()
442 _mm_srl_epi16(a, _mm_cvtsi32_si128(c))); in v64_shr_u8()
447 _mm_sra_epi16(_mm_unpacklo_epi8(a, a), _mm_cvtsi32_si128(c + 8)), a); in v64_shr_s8()
451 return _mm_sll_epi16(a, _mm_cvtsi32_si128(c)); in v64_shl_16()
455 return _mm_srl_epi16(a, _mm_cvtsi32_si128(c)); in v64_shr_u16()
459 return _mm_sra_epi16(a, _mm_cvtsi32_si128(c)); in v64_shr_s16()
463 return _mm_sll_epi32(a, _mm_cvtsi32_si128(c)); in v64_shl_32()
467 return _mm_srl_epi32(a, _mm_cvtsi32_si128(c)); in v64_shr_u32()
471 return _mm_sra_epi32(a, _mm_cvtsi32_si128(c)); in v64_shr_s32()
Dv128_intrinsics_x86.h537 _mm_sll_epi16(a, _mm_cvtsi32_si128(c))); in v128_shl_8()
542 _mm_srl_epi16(a, _mm_cvtsi32_si128(c))); in v128_shr_u8()
546 __m128i x = _mm_cvtsi32_si128(c + 8); in v128_shr_s8()
552 return _mm_sll_epi16(a, _mm_cvtsi32_si128(c)); in v128_shl_16()
556 return _mm_srl_epi16(a, _mm_cvtsi32_si128(c)); in v128_shr_u16()
560 return _mm_sra_epi16(a, _mm_cvtsi32_si128(c)); in v128_shr_s16()
564 return _mm_sll_epi32(a, _mm_cvtsi32_si128(c)); in v128_shl_32()
568 return _mm_srl_epi32(a, _mm_cvtsi32_si128(c)); in v128_shr_u32()
572 return _mm_sra_epi32(a, _mm_cvtsi32_si128(c)); in v128_shr_s32()
576 return _mm_sll_epi64(a, _mm_cvtsi32_si128(c)); in v128_shl_64()
[all …]

1234