Home
last modified time | relevance | path

Searched refs:_mm_load_si128 (Results 1 – 25 of 49) sorted by relevance

12

/external/libhevc/common/x86/
Dihevc_itrans_recon_32x32_ssse3_intr.c260 m_temp_reg_70 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
262 m_temp_reg_71 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
264 m_temp_reg_72 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
266 m_temp_reg_73 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
268 m_temp_reg_74 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
270 m_temp_reg_75 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
272 m_temp_reg_76 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
274 m_temp_reg_77 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
277 m_temp_reg_80 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
279 m_temp_reg_81 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_32x32_ssse3()
[all …]
Dihevc_itrans_recon_16x16_ssse3_intr.c210 m_temp_reg_70 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_16x16_ssse3()
212 m_temp_reg_71 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_16x16_ssse3()
214 m_temp_reg_72 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_16x16_ssse3()
216 m_temp_reg_73 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_16x16_ssse3()
218 m_temp_reg_74 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_16x16_ssse3()
220 m_temp_reg_75 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_16x16_ssse3()
222 m_temp_reg_76 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_16x16_ssse3()
224 m_temp_reg_77 = _mm_load_si128((__m128i *)pi2_tmp_src); in ihevc_itrans_recon_16x16_ssse3()
249 m_coeff1 = _mm_load_si128((__m128i *)&g_ai2_ihevc_trans_16_even[2][0]); //89 75 in ihevc_itrans_recon_16x16_ssse3()
295 m_coeff3 = _mm_load_si128((__m128i *)&g_ai2_ihevc_trans_16_even[3][0]); //75 -18 in ihevc_itrans_recon_16x16_ssse3()
[all …]
Dihevc_itrans_recon_ssse3_intr.c1035 m_temp_reg_70 = _mm_load_si128((__m128i *)pi2_src); in ihevc_itrans_recon_8x8_ssse3()
1037 m_temp_reg_71 = _mm_load_si128((__m128i *)pi2_src); in ihevc_itrans_recon_8x8_ssse3()
1039 m_temp_reg_72 = _mm_load_si128((__m128i *)pi2_src); in ihevc_itrans_recon_8x8_ssse3()
1041 m_temp_reg_73 = _mm_load_si128((__m128i *)pi2_src); in ihevc_itrans_recon_8x8_ssse3()
1044 m_temp_reg_74 = _mm_load_si128((__m128i *)pi2_src); in ihevc_itrans_recon_8x8_ssse3()
1046 m_temp_reg_75 = _mm_load_si128((__m128i *)pi2_src); in ihevc_itrans_recon_8x8_ssse3()
1048 m_temp_reg_76 = _mm_load_si128((__m128i *)pi2_src); in ihevc_itrans_recon_8x8_ssse3()
1050 m_temp_reg_77 = _mm_load_si128((__m128i *)pi2_src); in ihevc_itrans_recon_8x8_ssse3()
1061 m_coeff2 = _mm_load_si128((__m128i *)&g_ai2_ihevc_trans_intr_even_8[3][0]); in ihevc_itrans_recon_8x8_ssse3()
1062 m_coeff1 = _mm_load_si128((__m128i *)&g_ai2_ihevc_trans_intr_even_8[0][0]); in ihevc_itrans_recon_8x8_ssse3()
[all …]
Dihevc_deblk_ssse3_intr.c147 coef_8x16b = _mm_load_si128((__m128i *)(coef_d)); in ihevc_deblk_luma_vert_ssse3()
148 mask_16x8b = _mm_load_si128((__m128i *)(shuffle_d)); in ihevc_deblk_luma_vert_ssse3()
409 coefdelta_0_8x16b = _mm_load_si128((__m128i *)coef_de1); in ihevc_deblk_luma_vert_ssse3()
431 coefdelta_0_8x16b = _mm_load_si128((__m128i *)coef_dep1); in ihevc_deblk_luma_vert_ssse3()
474 tmp3_const_8x16b = _mm_load_si128((__m128i *)(shuffle1)); in ihevc_deblk_luma_vert_ssse3()
508 tmp0_const_8x16b = _mm_load_si128((__m128i *)shuffle2); in ihevc_deblk_luma_vert_ssse3()
509 tmp1_const_8x16b = _mm_load_si128((__m128i *)shuffle3); in ihevc_deblk_luma_vert_ssse3()
606 coef_8x16b = _mm_load_si128((__m128i *)(coef_d)); in ihevc_deblk_luma_horz_ssse3()
607 mask_16x8b = _mm_load_si128((__m128i *)(shuffle_d)); in ihevc_deblk_luma_horz_ssse3()
843 coefdelta_0_8x16b = _mm_load_si128((__m128i *)coef_de1); in ihevc_deblk_luma_horz_ssse3()
[all …]
/external/libvpx/libvpx/vpx_dsp/x86/
Dfwd_txfm_sse2.c48 __m128i in0 = _mm_load_si128((const __m128i *)(input + 0 * stride)); in vpx_fdct8x8_1_sse2()
49 __m128i in1 = _mm_load_si128((const __m128i *)(input + 1 * stride)); in vpx_fdct8x8_1_sse2()
50 __m128i in2 = _mm_load_si128((const __m128i *)(input + 2 * stride)); in vpx_fdct8x8_1_sse2()
51 __m128i in3 = _mm_load_si128((const __m128i *)(input + 3 * stride)); in vpx_fdct8x8_1_sse2()
57 in0 = _mm_load_si128((const __m128i *)(input + 4 * stride)); in vpx_fdct8x8_1_sse2()
58 in1 = _mm_load_si128((const __m128i *)(input + 5 * stride)); in vpx_fdct8x8_1_sse2()
59 in2 = _mm_load_si128((const __m128i *)(input + 6 * stride)); in vpx_fdct8x8_1_sse2()
60 in3 = _mm_load_si128((const __m128i *)(input + 7 * stride)); in vpx_fdct8x8_1_sse2()
95 in0 = _mm_load_si128((const __m128i *)(input + 0 * stride + 0)); in vpx_fdct16x16_1_sse2()
96 in1 = _mm_load_si128((const __m128i *)(input + 0 * stride + 8)); in vpx_fdct16x16_1_sse2()
[all …]
Dhighbd_loopfilter_sse2.c74 blimit = _mm_unpacklo_epi8(_mm_load_si128((const __m128i *)_blimit), zero); in vpx_highbd_lpf_horizontal_16_sse2()
75 limit = _mm_unpacklo_epi8(_mm_load_si128((const __m128i *)_limit), zero); in vpx_highbd_lpf_horizontal_16_sse2()
76 thresh = _mm_unpacklo_epi8(_mm_load_si128((const __m128i *)_thresh), zero); in vpx_highbd_lpf_horizontal_16_sse2()
79 _mm_unpacklo_epi8(_mm_load_si128((const __m128i *)_blimit), zero), 2); in vpx_highbd_lpf_horizontal_16_sse2()
81 _mm_unpacklo_epi8(_mm_load_si128((const __m128i *)_limit), zero), 2); in vpx_highbd_lpf_horizontal_16_sse2()
83 _mm_unpacklo_epi8(_mm_load_si128((const __m128i *)_thresh), zero), 2); in vpx_highbd_lpf_horizontal_16_sse2()
86 _mm_unpacklo_epi8(_mm_load_si128((const __m128i *)_blimit), zero), 4); in vpx_highbd_lpf_horizontal_16_sse2()
88 _mm_unpacklo_epi8(_mm_load_si128((const __m128i *)_limit), zero), 4); in vpx_highbd_lpf_horizontal_16_sse2()
90 _mm_unpacklo_epi8(_mm_load_si128((const __m128i *)_thresh), zero), 4); in vpx_highbd_lpf_horizontal_16_sse2()
93 q4 = _mm_load_si128((__m128i *)(s + 4 * p)); in vpx_highbd_lpf_horizontal_16_sse2()
[all …]
Dsum_squares_sse2.c62 _mm_load_si128((const __m128i *)(b + 0 * stride)); in vpx_sum_squares_2d_i16_nxn_sse2()
64 _mm_load_si128((const __m128i *)(b + 1 * stride)); in vpx_sum_squares_2d_i16_nxn_sse2()
66 _mm_load_si128((const __m128i *)(b + 2 * stride)); in vpx_sum_squares_2d_i16_nxn_sse2()
68 _mm_load_si128((const __m128i *)(b + 3 * stride)); in vpx_sum_squares_2d_i16_nxn_sse2()
70 _mm_load_si128((const __m128i *)(b + 4 * stride)); in vpx_sum_squares_2d_i16_nxn_sse2()
72 _mm_load_si128((const __m128i *)(b + 5 * stride)); in vpx_sum_squares_2d_i16_nxn_sse2()
74 _mm_load_si128((const __m128i *)(b + 6 * stride)); in vpx_sum_squares_2d_i16_nxn_sse2()
76 _mm_load_si128((const __m128i *)(b + 7 * stride)); in vpx_sum_squares_2d_i16_nxn_sse2()
Dfwd_txfm_impl_sse2.h277 __m128i in0 = _mm_load_si128((const __m128i *)(input + 0 * stride)); in FDCT8x8_2D()
278 __m128i in1 = _mm_load_si128((const __m128i *)(input + 1 * stride)); in FDCT8x8_2D()
279 __m128i in2 = _mm_load_si128((const __m128i *)(input + 2 * stride)); in FDCT8x8_2D()
280 __m128i in3 = _mm_load_si128((const __m128i *)(input + 3 * stride)); in FDCT8x8_2D()
281 __m128i in4 = _mm_load_si128((const __m128i *)(input + 4 * stride)); in FDCT8x8_2D()
282 __m128i in5 = _mm_load_si128((const __m128i *)(input + 5 * stride)); in FDCT8x8_2D()
283 __m128i in6 = _mm_load_si128((const __m128i *)(input + 6 * stride)); in FDCT8x8_2D()
284 __m128i in7 = _mm_load_si128((const __m128i *)(input + 7 * stride)); in FDCT8x8_2D()
624 in00 = _mm_load_si128((const __m128i *)(in + 0 * stride)); in FDCT16x16_2D()
625 in01 = _mm_load_si128((const __m128i *)(in + 1 * stride)); in FDCT16x16_2D()
[all …]
Dquantize_sse2.c44 zbin = _mm_load_si128((const __m128i *)zbin_ptr); in vpx_quantize_b_sse2()
45 round = _mm_load_si128((const __m128i *)round_ptr); in vpx_quantize_b_sse2()
46 quant = _mm_load_si128((const __m128i *)quant_ptr); in vpx_quantize_b_sse2()
49 dequant = _mm_load_si128((const __m128i *)dequant_ptr); in vpx_quantize_b_sse2()
50 shift = _mm_load_si128((const __m128i *)quant_shift_ptr); in vpx_quantize_b_sse2()
116 iscan0 = _mm_load_si128((const __m128i *)(iscan_ptr + n_coeffs)); in vpx_quantize_b_sse2()
117 iscan1 = _mm_load_si128((const __m128i *)(iscan_ptr + n_coeffs) + 1); in vpx_quantize_b_sse2()
189 iscan0 = _mm_load_si128((const __m128i *)(iscan_ptr + n_coeffs)); in vpx_quantize_b_sse2()
190 iscan1 = _mm_load_si128((const __m128i *)(iscan_ptr + n_coeffs) + 1); in vpx_quantize_b_sse2()
Davg_intrin_sse2.c220 src[0] = _mm_load_si128((const __m128i *)src_diff); in vpx_hadamard_8x8_sse2()
221 src[1] = _mm_load_si128((const __m128i *)(src_diff += src_stride)); in vpx_hadamard_8x8_sse2()
222 src[2] = _mm_load_si128((const __m128i *)(src_diff += src_stride)); in vpx_hadamard_8x8_sse2()
223 src[3] = _mm_load_si128((const __m128i *)(src_diff += src_stride)); in vpx_hadamard_8x8_sse2()
224 src[4] = _mm_load_si128((const __m128i *)(src_diff += src_stride)); in vpx_hadamard_8x8_sse2()
225 src[5] = _mm_load_si128((const __m128i *)(src_diff += src_stride)); in vpx_hadamard_8x8_sse2()
226 src[6] = _mm_load_si128((const __m128i *)(src_diff += src_stride)); in vpx_hadamard_8x8_sse2()
227 src[7] = _mm_load_si128((const __m128i *)(src_diff += src_stride)); in vpx_hadamard_8x8_sse2()
365 __m128i src_line = _mm_load_si128((const __m128i *)ref); in vpx_int_pro_col_sse2()
372 src_line = _mm_load_si128((const __m128i *)ref); in vpx_int_pro_col_sse2()
[all …]
Dbitdepth_conversion_sse2.h23 const __m128i a_low = _mm_load_si128((const __m128i *)a); in load_tran_low()
26 return _mm_load_si128((const __m128i *)a); in load_tran_low()
Davg_pred_sse2.c26 const __m128i p = _mm_load_si128((const __m128i *)(pred + x)); in vpx_comp_avg_pred_sse2()
41 const __m128i p = _mm_load_si128((const __m128i *)pred); in vpx_comp_avg_pred_sse2()
Dhighbd_quantize_intrin_sse2.c49 coeffs = _mm_load_si128((const __m128i *)(coeff_ptr + i * 4)); in vpx_highbd_quantize_b_sse2()
67 coeffs = _mm_load_si128((const __m128i *)(coeff_ptr + i * 4)); in vpx_highbd_quantize_b_sse2()
124 coeffs = _mm_load_si128((const __m128i *)(coeff_ptr + i * 4)); in vpx_highbd_quantize_b_32x32_sse2()
Dvpx_subpixel_8t_intrin_ssse3.c78 shuffle1 = _mm_load_si128((__m128i const *)filt1_4_h8); in vpx_filter_block1d4_h8_intrin_ssse3()
79 shuffle2 = _mm_load_si128((__m128i const *)filt2_4_h8); in vpx_filter_block1d4_h8_intrin_ssse3()
148 filt1Reg = _mm_load_si128((__m128i const *)filt1_global); in vpx_filter_block1d8_h8_intrin_ssse3()
149 filt2Reg = _mm_load_si128((__m128i const *)filt2_global); in vpx_filter_block1d8_h8_intrin_ssse3()
150 filt3Reg = _mm_load_si128((__m128i const *)filt3_global); in vpx_filter_block1d8_h8_intrin_ssse3()
151 filt4Reg = _mm_load_si128((__m128i const *)filt4_global); in vpx_filter_block1d8_h8_intrin_ssse3()
364 const __m128i f_values = _mm_load_si128((const __m128i *)x_filter); in filter_horiz_w8_ssse3()
485 const __m128i f_values = _mm_load_si128((const __m128i *)filter); in filter_horiz_w4_ssse3()
603 const __m128i f_values = _mm_load_si128((const __m128i *)filter); in filter_vert_w4_ssse3()
665 const __m128i f_values = _mm_load_si128((const __m128i *)filter); in filter_vert_w8_ssse3()
[all …]
/external/libvpx/libvpx/vp8/encoder/x86/
Dvp8_quantize_sse2.c41 __m128i quant_shift0 = _mm_load_si128((__m128i *)(b->quant_shift)); in vp8_regular_quantize_b_sse2()
42 __m128i quant_shift1 = _mm_load_si128((__m128i *)(b->quant_shift + 8)); in vp8_regular_quantize_b_sse2()
43 __m128i z0 = _mm_load_si128((__m128i *)(b->coeff)); in vp8_regular_quantize_b_sse2()
44 __m128i z1 = _mm_load_si128((__m128i *)(b->coeff + 8)); in vp8_regular_quantize_b_sse2()
46 __m128i zbin0 = _mm_load_si128((__m128i *)(b->zbin)); in vp8_regular_quantize_b_sse2()
47 __m128i zbin1 = _mm_load_si128((__m128i *)(b->zbin + 8)); in vp8_regular_quantize_b_sse2()
48 __m128i round0 = _mm_load_si128((__m128i *)(b->round)); in vp8_regular_quantize_b_sse2()
49 __m128i round1 = _mm_load_si128((__m128i *)(b->round + 8)); in vp8_regular_quantize_b_sse2()
50 __m128i quant0 = _mm_load_si128((__m128i *)(b->quant)); in vp8_regular_quantize_b_sse2()
51 __m128i quant1 = _mm_load_si128((__m128i *)(b->quant + 8)); in vp8_regular_quantize_b_sse2()
[all …]
Dquantize_sse4.c36 __m128i quant_shift0 = _mm_load_si128((__m128i *)(b->quant_shift)); in vp8_regular_quantize_b_sse4_1()
37 __m128i quant_shift1 = _mm_load_si128((__m128i *)(b->quant_shift + 8)); in vp8_regular_quantize_b_sse4_1()
38 __m128i z0 = _mm_load_si128((__m128i *)(b->coeff)); in vp8_regular_quantize_b_sse4_1()
39 __m128i z1 = _mm_load_si128((__m128i *)(b->coeff + 8)); in vp8_regular_quantize_b_sse4_1()
41 __m128i zbin0 = _mm_load_si128((__m128i *)(b->zbin)); in vp8_regular_quantize_b_sse4_1()
42 __m128i zbin1 = _mm_load_si128((__m128i *)(b->zbin + 8)); in vp8_regular_quantize_b_sse4_1()
43 __m128i round0 = _mm_load_si128((__m128i *)(b->round)); in vp8_regular_quantize_b_sse4_1()
44 __m128i round1 = _mm_load_si128((__m128i *)(b->round + 8)); in vp8_regular_quantize_b_sse4_1()
45 __m128i quant0 = _mm_load_si128((__m128i *)(b->quant)); in vp8_regular_quantize_b_sse4_1()
46 __m128i quant1 = _mm_load_si128((__m128i *)(b->quant + 8)); in vp8_regular_quantize_b_sse4_1()
[all …]
Dquantize_ssse3.c43 __m128i z0 = _mm_load_si128((__m128i *)(b->coeff)); in vp8_fast_quantize_b_ssse3()
44 __m128i z1 = _mm_load_si128((__m128i *)(b->coeff + 8)); in vp8_fast_quantize_b_ssse3()
45 __m128i round0 = _mm_load_si128((__m128i *)(b->round)); in vp8_fast_quantize_b_ssse3()
46 __m128i round1 = _mm_load_si128((__m128i *)(b->round + 8)); in vp8_fast_quantize_b_ssse3()
47 __m128i quant_fast0 = _mm_load_si128((__m128i *)(b->quant_fast)); in vp8_fast_quantize_b_ssse3()
48 __m128i quant_fast1 = _mm_load_si128((__m128i *)(b->quant_fast + 8)); in vp8_fast_quantize_b_ssse3()
49 __m128i dequant0 = _mm_load_si128((__m128i *)(d->dequant)); in vp8_fast_quantize_b_ssse3()
50 __m128i dequant1 = _mm_load_si128((__m128i *)(d->dequant + 8)); in vp8_fast_quantize_b_ssse3()
57 __m128i zig_zag = _mm_load_si128((const __m128i *)pshufb_zig_zag_mask); in vp8_fast_quantize_b_ssse3()
/external/jemalloc/test/include/test/
DSFMT-sse2.h67 x = _mm_load_si128(a); in mm_recursion()
89 r1 = _mm_load_si128(&ctx->sfmt[N - 2].si); in gen_rand_all()
90 r2 = _mm_load_si128(&ctx->sfmt[N - 1].si); in gen_rand_all()
119 r1 = _mm_load_si128(&ctx->sfmt[N - 2].si); in gen_rand_array()
120 r2 = _mm_load_si128(&ctx->sfmt[N - 1].si); in gen_rand_array()
144 r = _mm_load_si128(&array[j + size - N].si); in gen_rand_array()
/external/libvpx/libvpx/vp9/encoder/x86/
Dvp9_dct_ssse3.c44 __m128i in0 = _mm_load_si128((const __m128i *)(input + 0 * stride)); in vp9_fdct8x8_quant_ssse3()
45 __m128i in1 = _mm_load_si128((const __m128i *)(input + 1 * stride)); in vp9_fdct8x8_quant_ssse3()
46 __m128i in2 = _mm_load_si128((const __m128i *)(input + 2 * stride)); in vp9_fdct8x8_quant_ssse3()
47 __m128i in3 = _mm_load_si128((const __m128i *)(input + 3 * stride)); in vp9_fdct8x8_quant_ssse3()
48 __m128i in4 = _mm_load_si128((const __m128i *)(input + 4 * stride)); in vp9_fdct8x8_quant_ssse3()
49 __m128i in5 = _mm_load_si128((const __m128i *)(input + 5 * stride)); in vp9_fdct8x8_quant_ssse3()
50 __m128i in6 = _mm_load_si128((const __m128i *)(input + 6 * stride)); in vp9_fdct8x8_quant_ssse3()
51 __m128i in7 = _mm_load_si128((const __m128i *)(input + 7 * stride)); in vp9_fdct8x8_quant_ssse3()
298 round = _mm_load_si128((const __m128i *)round_ptr); in vp9_fdct8x8_quant_ssse3()
299 quant = _mm_load_si128((const __m128i *)quant_ptr); in vp9_fdct8x8_quant_ssse3()
[all …]
Dvp9_quantize_sse2.c46 round = _mm_load_si128((const __m128i *)round_ptr); in vp9_quantize_fp_sse2()
47 quant = _mm_load_si128((const __m128i *)quant_ptr); in vp9_quantize_fp_sse2()
48 dequant = _mm_load_si128((const __m128i *)dequant_ptr); in vp9_quantize_fp_sse2()
101 iscan0 = _mm_load_si128((const __m128i *)(iscan_ptr + n_coeffs)); in vp9_quantize_fp_sse2()
102 iscan1 = _mm_load_si128((const __m128i *)(iscan_ptr + n_coeffs) + 1); in vp9_quantize_fp_sse2()
176 iscan0 = _mm_load_si128((const __m128i *)(iscan_ptr + n_coeffs)); in vp9_quantize_fp_sse2()
177 iscan1 = _mm_load_si128((const __m128i *)(iscan_ptr + n_coeffs) + 1); in vp9_quantize_fp_sse2()
Dvp9_highbd_block_error_intrin_sse2.c28 __m128i mm_coeff = _mm_load_si128((__m128i *)(coeff + i)); in vp9_highbd_block_error_sse2()
29 __m128i mm_coeff2 = _mm_load_si128((__m128i *)(coeff + i + 4)); in vp9_highbd_block_error_sse2()
30 __m128i mm_dqcoeff = _mm_load_si128((__m128i *)(dqcoeff + i)); in vp9_highbd_block_error_sse2()
31 __m128i mm_dqcoeff2 = _mm_load_si128((__m128i *)(dqcoeff + i + 4)); in vp9_highbd_block_error_sse2()
/external/swiftshader/src/Renderer/
DSurface.cpp3822 __m128i c0 = _mm_load_si128((__m128i*)(source0 + 4 * x)); in resolve()
3823 __m128i c1 = _mm_load_si128((__m128i*)(source1 + 4 * x)); in resolve()
3840 __m128i c0 = _mm_load_si128((__m128i*)(source0 + 4 * x)); in resolve()
3841 __m128i c1 = _mm_load_si128((__m128i*)(source1 + 4 * x)); in resolve()
3842 __m128i c2 = _mm_load_si128((__m128i*)(source2 + 4 * x)); in resolve()
3843 __m128i c3 = _mm_load_si128((__m128i*)(source3 + 4 * x)); in resolve()
3864 __m128i c0 = _mm_load_si128((__m128i*)(source0 + 4 * x)); in resolve()
3865 __m128i c1 = _mm_load_si128((__m128i*)(source1 + 4 * x)); in resolve()
3866 __m128i c2 = _mm_load_si128((__m128i*)(source2 + 4 * x)); in resolve()
3867 __m128i c3 = _mm_load_si128((__m128i*)(source3 + 4 * x)); in resolve()
[all …]
/external/neven/Embedded/common/src/b_BasicEm/
DMathSSE2.c165 m_XMM0 = _mm_load_si128( (__m128i *)&0[vec1L] ); in bbs_dotProduct_128SSE2()
168 m_XMM2 = _mm_load_si128( (__m128i *)&0[vec2L] ); in bbs_dotProduct_128SSE2()
170 m_XMM6 = _mm_load_si128( (__m128i *)&8[vec1L] ); in bbs_dotProduct_128SSE2()
176 m_XMM3 = _mm_load_si128( (__m128i *)&8[vec2L] ); in bbs_dotProduct_128SSE2()
188 m_XMM0 = _mm_load_si128( (__m128i *)&m_XMM5 ); in bbs_dotProduct_128SSE2()
/external/pdfium/third_party/libopenjpeg20/
Dmct.c90 __m128i r = _mm_load_si128((const __m128i *)&(c0[i])); in opj_mct_encode()
91 __m128i g = _mm_load_si128((const __m128i *)&(c1[i])); in opj_mct_encode()
92 __m128i b = _mm_load_si128((const __m128i *)&(c2[i])); in opj_mct_encode()
155 __m128i y = _mm_load_si128((const __m128i *)&(c0[i])); in opj_mct_decode()
156 __m128i u = _mm_load_si128((const __m128i *)&(c1[i])); in opj_mct_decode()
157 __m128i v = _mm_load_si128((const __m128i *)&(c2[i])); in opj_mct_decode()
234 __m128i r = _mm_load_si128((const __m128i *)&(c0[i])); in opj_mct_encode_real()
235 __m128i g = _mm_load_si128((const __m128i *)&(c1[i])); in opj_mct_encode_real()
236 __m128i b = _mm_load_si128((const __m128i *)&(c2[i])); in opj_mct_encode_real()
/external/skia/src/opts/
DSkBlitRow_opts_SSE2.cpp45 __m128i dst_pixel = _mm_load_si128(d); in S32_Blend_BlitRow32_SSE2()
86 __m128i dst_pixel = _mm_load_si128(d); in S32A_Blend_BlitRow32_SSE2()
336 __m128i dst_sse = _mm_load_si128(d); in SkBlitLCD16Row_SSE2()
404 __m128i dst_sse = _mm_load_si128(d); in SkBlitLCD16OpaqueRow_SSE2()

12