/external/libaom/libaom/av1/common/x86/ |
D | highbd_convolve_2d_sse2.c | 28 _mm_store_si128((__m128i *)(dst + 0 * 8), s[0]); in copy_64() 29 _mm_store_si128((__m128i *)(dst + 1 * 8), s[1]); in copy_64() 30 _mm_store_si128((__m128i *)(dst + 2 * 8), s[2]); in copy_64() 31 _mm_store_si128((__m128i *)(dst + 3 * 8), s[3]); in copy_64() 32 _mm_store_si128((__m128i *)(dst + 4 * 8), s[4]); in copy_64() 33 _mm_store_si128((__m128i *)(dst + 5 * 8), s[5]); in copy_64() 34 _mm_store_si128((__m128i *)(dst + 6 * 8), s[6]); in copy_64() 35 _mm_store_si128((__m128i *)(dst + 7 * 8), s[7]); in copy_64() 56 _mm_store_si128((__m128i *)(dst + 0 * 8), s[0]); in copy_128() 57 _mm_store_si128((__m128i *)(dst + 1 * 8), s[1]); in copy_128() [all …]
|
D | convolve_2d_sse2.c | 229 _mm_store_si128((__m128i *)(dst + 0 * 16), s[0]); in copy_128() 230 _mm_store_si128((__m128i *)(dst + 1 * 16), s[1]); in copy_128() 231 _mm_store_si128((__m128i *)(dst + 2 * 16), s[2]); in copy_128() 232 _mm_store_si128((__m128i *)(dst + 3 * 16), s[3]); in copy_128() 233 _mm_store_si128((__m128i *)(dst + 4 * 16), s[4]); in copy_128() 234 _mm_store_si128((__m128i *)(dst + 5 * 16), s[5]); in copy_128() 235 _mm_store_si128((__m128i *)(dst + 6 * 16), s[6]); in copy_128() 236 _mm_store_si128((__m128i *)(dst + 7 * 16), s[7]); in copy_128() 296 _mm_store_si128((__m128i *)dst, s[0]); in av1_convolve_2d_copy_sr_sse2() 298 _mm_store_si128((__m128i *)dst, s[1]); in av1_convolve_2d_copy_sr_sse2() [all …]
|
D | jnt_convolve_avx2.c | 124 _mm_store_si128((__m128i *)(&dst[i * dst_stride + j]), res_0); in av1_dist_wtd_convolve_x_avx2() 127 _mm_store_si128((__m128i *)(&dst[i * dst_stride + j + dst_stride]), in av1_dist_wtd_convolve_x_avx2() 179 _mm_store_si128((__m128i *)(&dst[i * dst_stride + j]), res_0); in av1_dist_wtd_convolve_x_avx2() 182 _mm_store_si128((__m128i *)(&dst[i * dst_stride + j + dst_stride]), in av1_dist_wtd_convolve_x_avx2() 323 _mm_store_si128((__m128i *)(&dst[i * dst_stride + j]), res_0); in av1_dist_wtd_convolve_y_avx2() 326 _mm_store_si128((__m128i *)(&dst[i * dst_stride + j + dst_stride]), in av1_dist_wtd_convolve_y_avx2() 380 _mm_store_si128((__m128i *)(&dst0[i * dst_stride0 + j]), res_0); in av1_dist_wtd_convolve_y_avx2() 381 _mm_store_si128( in av1_dist_wtd_convolve_y_avx2() 386 _mm_store_si128((__m128i *)(&dst[i * dst_stride + j]), res_lo_0); in av1_dist_wtd_convolve_y_avx2() 390 _mm_store_si128((__m128i *)(&dst[i * dst_stride + j + dst_stride]), in av1_dist_wtd_convolve_y_avx2() [all …]
|
/external/libaom/libaom/aom_dsp/x86/ |
D | highbd_intrapred_sse2.c | 57 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row0, row0)); in aom_highbd_h_predictor_8x4_sse2() 59 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row1, row1)); in aom_highbd_h_predictor_8x4_sse2() 61 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row2, row2)); in aom_highbd_h_predictor_8x4_sse2() 63 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row3, row3)); in aom_highbd_h_predictor_8x4_sse2() 80 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row0, row0)); in aom_highbd_h_predictor_8x8_sse2() 82 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row1, row1)); in aom_highbd_h_predictor_8x8_sse2() 84 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row2, row2)); in aom_highbd_h_predictor_8x8_sse2() 86 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row3, row3)); in aom_highbd_h_predictor_8x8_sse2() 88 _mm_store_si128((__m128i *)dst, _mm_unpackhi_epi64(row4, row4)); in aom_highbd_h_predictor_8x8_sse2() 90 _mm_store_si128((__m128i *)dst, _mm_unpackhi_epi64(row5, row5)); in aom_highbd_h_predictor_8x8_sse2() [all …]
|
D | adaptive_quantize_sse2.c | 78 _mm_store_si128((__m128i *)(qcoeff_ptr), zero); in aom_quantize_b_adaptive_sse2() 79 _mm_store_si128((__m128i *)(qcoeff_ptr + 4), zero); in aom_quantize_b_adaptive_sse2() 80 _mm_store_si128((__m128i *)(qcoeff_ptr + 8), zero); in aom_quantize_b_adaptive_sse2() 81 _mm_store_si128((__m128i *)(qcoeff_ptr + 12), zero); in aom_quantize_b_adaptive_sse2() 82 _mm_store_si128((__m128i *)(dqcoeff_ptr), zero); in aom_quantize_b_adaptive_sse2() 83 _mm_store_si128((__m128i *)(dqcoeff_ptr + 4), zero); in aom_quantize_b_adaptive_sse2() 84 _mm_store_si128((__m128i *)(dqcoeff_ptr + 8), zero); in aom_quantize_b_adaptive_sse2() 85 _mm_store_si128((__m128i *)(dqcoeff_ptr + 12), zero); in aom_quantize_b_adaptive_sse2() 138 _mm_store_si128((__m128i *)(qcoeff_ptr + index), zero); in aom_quantize_b_adaptive_sse2() 139 _mm_store_si128((__m128i *)(qcoeff_ptr + index + 4), zero); in aom_quantize_b_adaptive_sse2() [all …]
|
D | quantize_ssse3.c | 58 _mm_store_si128((__m128i *)(dqcoeff), dqcoeff32_0); in calculate_dqcoeff_and_store_64x64() 59 _mm_store_si128((__m128i *)(dqcoeff + 4), dqcoeff32_1); in calculate_dqcoeff_and_store_64x64() 108 _mm_store_si128((__m128i *)(qcoeff_ptr), zero); in aom_quantize_b_64x64_ssse3() 109 _mm_store_si128((__m128i *)(qcoeff_ptr + 4), zero); in aom_quantize_b_64x64_ssse3() 110 _mm_store_si128((__m128i *)(qcoeff_ptr + 8), zero); in aom_quantize_b_64x64_ssse3() 111 _mm_store_si128((__m128i *)(qcoeff_ptr + 12), zero); in aom_quantize_b_64x64_ssse3() 112 _mm_store_si128((__m128i *)(dqcoeff_ptr), zero); in aom_quantize_b_64x64_ssse3() 113 _mm_store_si128((__m128i *)(dqcoeff_ptr + 4), zero); in aom_quantize_b_64x64_ssse3() 114 _mm_store_si128((__m128i *)(dqcoeff_ptr + 8), zero); in aom_quantize_b_64x64_ssse3() 115 _mm_store_si128((__m128i *)(dqcoeff_ptr + 12), zero); in aom_quantize_b_64x64_ssse3() [all …]
|
D | highbd_adaptive_quantize_sse2.c | 170 _mm_store_si128((__m128i *)(qcoeff_ptr), zero); in aom_highbd_quantize_b_adaptive_sse2() 171 _mm_store_si128((__m128i *)(qcoeff_ptr + 4), zero); in aom_highbd_quantize_b_adaptive_sse2() 172 _mm_store_si128((__m128i *)(dqcoeff_ptr), zero); in aom_highbd_quantize_b_adaptive_sse2() 173 _mm_store_si128((__m128i *)(dqcoeff_ptr + 4), zero); in aom_highbd_quantize_b_adaptive_sse2() 195 _mm_store_si128((__m128i *)(qcoeff_ptr), qcoeff0); in aom_highbd_quantize_b_adaptive_sse2() 196 _mm_store_si128((__m128i *)(qcoeff_ptr + 4), qcoeff1); in aom_highbd_quantize_b_adaptive_sse2() 201 _mm_store_si128((__m128i *)(dqcoeff_ptr), coeff0); in aom_highbd_quantize_b_adaptive_sse2() 202 _mm_store_si128((__m128i *)(dqcoeff_ptr + 4), coeff1); in aom_highbd_quantize_b_adaptive_sse2() 225 _mm_store_si128((__m128i *)(qcoeff_ptr + index), zero); in aom_highbd_quantize_b_adaptive_sse2() 226 _mm_store_si128((__m128i *)(qcoeff_ptr + index + 4), zero); in aom_highbd_quantize_b_adaptive_sse2() [all …]
|
D | avg_intrin_sse2.c | 252 _mm_store_si128((__m128i *)coeff16, src[0]); in hadamard_8x8_sse2() 254 _mm_store_si128((__m128i *)coeff16, src[1]); in hadamard_8x8_sse2() 256 _mm_store_si128((__m128i *)coeff16, src[2]); in hadamard_8x8_sse2() 258 _mm_store_si128((__m128i *)coeff16, src[3]); in hadamard_8x8_sse2() 260 _mm_store_si128((__m128i *)coeff16, src[4]); in hadamard_8x8_sse2() 262 _mm_store_si128((__m128i *)coeff16, src[5]); in hadamard_8x8_sse2() 264 _mm_store_si128((__m128i *)coeff16, src[6]); in hadamard_8x8_sse2() 266 _mm_store_si128((__m128i *)coeff16, src[7]); in hadamard_8x8_sse2() 290 _mm_store_si128((__m128i *)coeff, src[0]); in aom_hadamard_lp_8x8_sse2() 292 _mm_store_si128((__m128i *)coeff, src[1]); in aom_hadamard_lp_8x8_sse2() [all …]
|
D | intrapred_sse2.c | 39 _mm_store_si128((__m128i *)dst, *row); in dc_store_16xh() 48 _mm_store_si128((__m128i *)dst, *row); in dc_store_32xh() 49 _mm_store_si128((__m128i *)(dst + 16), *row); in dc_store_32xh() 57 _mm_store_si128((__m128i *)dst, *row); in dc_store_64xh() 58 _mm_store_si128((__m128i *)(dst + 16), *row); in dc_store_64xh() 59 _mm_store_si128((__m128i *)(dst + 32), *row); in dc_store_64xh() 60 _mm_store_si128((__m128i *)(dst + 48), *row); in dc_store_64xh() 924 _mm_store_si128((__m128i *)dst, row0); in v_predictor_32xh() 925 _mm_store_si128((__m128i *)(dst + 16), row1); in v_predictor_32xh() 955 _mm_store_si128((__m128i *)dst, row0); in v_predictor_64xh() [all …]
|
/external/libvpx/libvpx/vpx_dsp/x86/ |
D | highbd_intrapred_intrin_ssse3.c | 60 _mm_store_si128((__m128i *)*dst, *row); in d45_store_8() 75 _mm_store_si128((__m128i *)dst, avg3); in vpx_highbd_d45_predictor_8x8_ssse3() 91 _mm_store_si128((__m128i *)*dst, *row_0); in d45_store_16() 92 _mm_store_si128((__m128i *)(*dst + 8), *row_1); in d45_store_16() 111 _mm_store_si128((__m128i *)dst, avg3_0); in vpx_highbd_d45_predictor_16x16_ssse3() 112 _mm_store_si128((__m128i *)(dst + 8), avg3_1); in vpx_highbd_d45_predictor_16x16_ssse3() 155 _mm_store_si128((__m128i *)dst, avg3_0); in vpx_highbd_d45_predictor_32x32_ssse3() 156 _mm_store_si128((__m128i *)(dst + 8), avg3_1); in vpx_highbd_d45_predictor_32x32_ssse3() 157 _mm_store_si128((__m128i *)(dst + 16), avg3_2); in vpx_highbd_d45_predictor_32x32_ssse3() 158 _mm_store_si128((__m128i *)(dst + 24), avg3_3); in vpx_highbd_d45_predictor_32x32_ssse3() [all …]
|
D | bitdepth_conversion_sse2.h | 39 _mm_store_si128((__m128i *)(b), a_1); in store_tran_low() 40 _mm_store_si128((__m128i *)(b + 4), a_2); in store_tran_low() 42 _mm_store_si128((__m128i *)(b), a); in store_tran_low() 50 _mm_store_si128((__m128i *)(a), zero); in store_zero_tran_low() 51 _mm_store_si128((__m128i *)(a + 4), zero); in store_zero_tran_low() 53 _mm_store_si128((__m128i *)(a), zero); in store_zero_tran_low()
|
D | quantize_ssse3.c | 172 _mm_store_si128((__m128i *)(qcoeff_ptr), zero); in vpx_quantize_b_32x32_ssse3() 173 _mm_store_si128((__m128i *)(qcoeff_ptr + 8), zero); in vpx_quantize_b_32x32_ssse3() 174 _mm_store_si128((__m128i *)(dqcoeff_ptr), zero); in vpx_quantize_b_32x32_ssse3() 175 _mm_store_si128((__m128i *)(dqcoeff_ptr + 8), zero); in vpx_quantize_b_32x32_ssse3() 177 _mm_store_si128((__m128i *)(qcoeff_ptr + 4), zero); in vpx_quantize_b_32x32_ssse3() 178 _mm_store_si128((__m128i *)(qcoeff_ptr + 12), zero); in vpx_quantize_b_32x32_ssse3() 179 _mm_store_si128((__m128i *)(dqcoeff_ptr + 4), zero); in vpx_quantize_b_32x32_ssse3() 180 _mm_store_si128((__m128i *)(dqcoeff_ptr + 12), zero); in vpx_quantize_b_32x32_ssse3() 226 _mm_store_si128((__m128i *)(qcoeff_ptr + index), zero); in vpx_quantize_b_32x32_ssse3() 227 _mm_store_si128((__m128i *)(qcoeff_ptr + index + 8), zero); in vpx_quantize_b_32x32_ssse3() [all …]
|
D | highbd_intrapred_intrin_sse2.c | 52 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row0, row0)); in vpx_highbd_h_predictor_8x8_sse2() 54 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row1, row1)); in vpx_highbd_h_predictor_8x8_sse2() 56 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row2, row2)); in vpx_highbd_h_predictor_8x8_sse2() 58 _mm_store_si128((__m128i *)dst, _mm_unpacklo_epi64(row3, row3)); in vpx_highbd_h_predictor_8x8_sse2() 60 _mm_store_si128((__m128i *)dst, _mm_unpackhi_epi64(row4, row4)); in vpx_highbd_h_predictor_8x8_sse2() 62 _mm_store_si128((__m128i *)dst, _mm_unpackhi_epi64(row5, row5)); in vpx_highbd_h_predictor_8x8_sse2() 64 _mm_store_si128((__m128i *)dst, _mm_unpackhi_epi64(row6, row6)); in vpx_highbd_h_predictor_8x8_sse2() 66 _mm_store_si128((__m128i *)dst, _mm_unpackhi_epi64(row7, row7)); in vpx_highbd_h_predictor_8x8_sse2() 72 _mm_store_si128((__m128i *)*dst, val); in h_store_16_unpacklo() 73 _mm_store_si128((__m128i *)(*dst + 8), val); in h_store_16_unpacklo() [all …]
|
D | quantize_ssse3.h | 43 _mm_store_si128((__m128i *)(dqcoeff), dqcoeff32_0); in calculate_dqcoeff_and_store_32x32() 44 _mm_store_si128((__m128i *)(dqcoeff + 4), dqcoeff32_1); in calculate_dqcoeff_and_store_32x32() 46 _mm_store_si128((__m128i *)(dqcoeff), in calculate_dqcoeff_and_store_32x32()
|
/external/libhevc/common/x86/ |
D | ihevc_itrans_recon_16x16_ssse3_intr.c | 271 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_16x16_ssse3() 273 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_16x16_ssse3() 290 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_16x16_ssse3() 292 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_16x16_ssse3() 309 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_16x16_ssse3() 311 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_16x16_ssse3() 326 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_16x16_ssse3() 328 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_16x16_ssse3() 344 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_16x16_ssse3() 346 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_16x16_ssse3() [all …]
|
D | ihevc_itrans_recon_32x32_ssse3_intr.c | 376 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_32x32_ssse3() 378 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_32x32_ssse3() 390 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_32x32_ssse3() 392 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_32x32_ssse3() 403 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_32x32_ssse3() 405 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_32x32_ssse3() 417 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_32x32_ssse3() 419 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_32x32_ssse3() 431 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_34); in ihevc_itrans_recon_32x32_ssse3() 433 _mm_store_si128((__m128i *)pi2_scratch, m_temp_reg_35); in ihevc_itrans_recon_32x32_ssse3() [all …]
|
/external/libaom/libaom/av1/encoder/x86/ |
D | av1_quantize_sse2.c | 43 _mm_store_si128((__m128i *)addr, y0); in write_qcoeff() 44 _mm_store_si128((__m128i *)addr + 1, y1); in write_qcoeff() 49 _mm_store_si128((__m128i *)addr + 2, y0); in write_qcoeff() 50 _mm_store_si128((__m128i *)addr + 3, y1); in write_qcoeff() 52 _mm_store_si128((__m128i *)addr, *qc0); in write_qcoeff() 53 _mm_store_si128((__m128i *)addr + 1, *qc1); in write_qcoeff() 61 _mm_store_si128((__m128i *)addr, zero); in write_zero() 62 _mm_store_si128((__m128i *)addr + 1, zero); in write_zero() 63 _mm_store_si128((__m128i *)addr + 2, zero); in write_zero() 64 _mm_store_si128((__m128i *)addr + 3, zero); in write_zero() [all …]
|
D | encodetxb_sse2.c | 94 _mm_store_si128((__m128i *)cc, count); in get_4_nz_map_contexts_2d() 128 _mm_store_si128((__m128i *)coeff_contexts, count); in get_4_nz_map_contexts_hor() 160 _mm_store_si128((__m128i *)coeff_contexts, count); in get_4_nz_map_contexts_ver() 203 _mm_store_si128((__m128i *)cc, count); in get_8_coeff_contexts_2d() 238 _mm_store_si128((__m128i *)coeff_contexts, count); in get_8_coeff_contexts_hor() 270 _mm_store_si128((__m128i *)coeff_contexts, count); in get_8_coeff_contexts_ver() 332 _mm_store_si128((__m128i *)cc, count); in get_16n_coeff_contexts_2d() 387 _mm_store_si128((__m128i *)coeff_contexts, count); in get_16n_coeff_contexts_hor() 421 _mm_store_si128((__m128i *)coeff_contexts, count); in get_16n_coeff_contexts_ver()
|
/external/jemalloc_new/test/include/test/ |
D | SFMT-sse2.h | 94 _mm_store_si128(&ctx->sfmt[i].si, r); in gen_rand_all() 101 _mm_store_si128(&ctx->sfmt[i].si, r); in gen_rand_all() 124 _mm_store_si128(&array[i].si, r); in gen_rand_array() 131 _mm_store_si128(&array[i].si, r); in gen_rand_array() 139 _mm_store_si128(&array[i].si, r); in gen_rand_array() 145 _mm_store_si128(&ctx->sfmt[j].si, r); in gen_rand_array() 150 _mm_store_si128(&array[i].si, r); in gen_rand_array() 151 _mm_store_si128(&ctx->sfmt[j++].si, r); in gen_rand_array()
|
/external/libvpx/libvpx/vp8/encoder/x86/ |
D | vp8_quantize_sse2.c | 81 _mm_store_si128((__m128i *)(x), x_minus_zbin0); in vp8_regular_quantize_b_sse2() 82 _mm_store_si128((__m128i *)(x + 8), x_minus_zbin1); in vp8_regular_quantize_b_sse2() 106 _mm_store_si128((__m128i *)(y), y0); in vp8_regular_quantize_b_sse2() 107 _mm_store_si128((__m128i *)(y + 8), y1); in vp8_regular_quantize_b_sse2() 136 _mm_store_si128((__m128i *)(d->dqcoeff), y0); in vp8_regular_quantize_b_sse2() 137 _mm_store_si128((__m128i *)(d->dqcoeff + 8), y1); in vp8_regular_quantize_b_sse2() 183 _mm_store_si128((__m128i *)(d->qcoeff), x0); in vp8_fast_quantize_b_sse2() 184 _mm_store_si128((__m128i *)(d->qcoeff + 8), x1); in vp8_fast_quantize_b_sse2() 191 _mm_store_si128((__m128i *)(d->dqcoeff), xdq0); in vp8_fast_quantize_b_sse2() 192 _mm_store_si128((__m128i *)(d->dqcoeff + 8), xdq1); in vp8_fast_quantize_b_sse2()
|
D | vp8_quantize_ssse3.c | 89 _mm_store_si128((__m128i *)(d->qcoeff), x0); in vp8_fast_quantize_b_ssse3() 90 _mm_store_si128((__m128i *)(d->qcoeff + 8), x1); in vp8_fast_quantize_b_ssse3() 97 _mm_store_si128((__m128i *)(d->dqcoeff), x0); in vp8_fast_quantize_b_ssse3() 98 _mm_store_si128((__m128i *)(d->dqcoeff + 8), x1); in vp8_fast_quantize_b_ssse3()
|
D | quantize_sse4.c | 109 _mm_store_si128((__m128i *)(d->qcoeff), qcoeff0); in vp8_regular_quantize_b_sse4_1() 110 _mm_store_si128((__m128i *)(d->qcoeff + 8), qcoeff1); in vp8_regular_quantize_b_sse4_1() 115 _mm_store_si128((__m128i *)(d->dqcoeff), dqcoeff0); in vp8_regular_quantize_b_sse4_1() 116 _mm_store_si128((__m128i *)(d->dqcoeff + 8), dqcoeff1); in vp8_regular_quantize_b_sse4_1()
|
/external/mesa3d/src/mesa/main/ |
D | streaming-load-memcpy.c | 74 _mm_store_si128(dst_cacheline + 0, temp1); in _mesa_streaming_load_memcpy() 75 _mm_store_si128(dst_cacheline + 1, temp2); in _mesa_streaming_load_memcpy() 76 _mm_store_si128(dst_cacheline + 2, temp3); in _mesa_streaming_load_memcpy() 77 _mm_store_si128(dst_cacheline + 3, temp4); in _mesa_streaming_load_memcpy()
|
/external/libvpx/libvpx/vp8/common/x86/ |
D | bilinear_filter_sse2.c | 30 _mm_store_si128((__m128i *)dst, a_lo); in horizontal_16x16() 31 _mm_store_si128((__m128i *)(dst + 8), a_hi); in horizontal_16x16() 67 _mm_store_si128((__m128i *)dst, shifted_lo); in horizontal_16x16() 68 _mm_store_si128((__m128i *)(dst + 8), shifted_hi); in horizontal_16x16() 84 _mm_store_si128((__m128i *)dst, packed); in vertical_16x16() 122 _mm_store_si128((__m128i *)dst, packed); in vertical_16x16() 152 _mm_store_si128((__m128i *)dst, a_u16); in horizontal_8xN() 177 _mm_store_si128((__m128i *)dst, shifted); in horizontal_8xN()
|
/external/XNNPACK/src/qs8-gavgpool/gen/ |
D | 7p7x-minmax-sse41-c24-acc2.c | 103 _mm_store_si128((__m128i*) b, vacc0123); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() 104 _mm_store_si128((__m128i*) (b + 4), vacc4567); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() 105 _mm_store_si128((__m128i*) (b + 8), vacc89AB); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() 106 _mm_store_si128((__m128i*) (b + 12), vaccCDEF); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() 107 _mm_store_si128((__m128i*) (b + 16), vaccGHIJ); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() 108 _mm_store_si128((__m128i*) (b + 20), vaccKLMN); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() 142 _mm_store_si128((__m128i*) b, vacc0123); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() 143 _mm_store_si128((__m128i*) (b + 4), vacc4567); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() 221 _mm_store_si128((__m128i*) b, vacc0123); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() 222 _mm_store_si128((__m128i*) (b + 4), vacc4567); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__sse41_c24_acc2() [all …]
|