/external/libvpx/libvpx/vpx_dsp/mips/ |
D | vpx_convolve_avg_msa.c | 23 LD_UB4(src, src_stride, src0, src1, src2, src3); in avg_width4_msa() 26 LD_UB4(dst, dst_stride, dst0, dst1, dst2, dst3); in avg_width4_msa() 65 LD_UB4(src, src_stride, src0, src1, src2, src3); in avg_width8_msa() 67 LD_UB4(dst, dst_stride, dst0, dst1, dst2, dst3); in avg_width8_msa() 111 LD_UB4(src, src_stride, src0, src2, src4, src6); in avg_width32_msa() 112 LD_UB4(src + 16, src_stride, src1, src3, src5, src7); in avg_width32_msa() 114 LD_UB4(dst_dup, dst_stride, dst0, dst2, dst4, dst6); in avg_width32_msa() 115 LD_UB4(dst_dup + 16, dst_stride, dst1, dst3, dst5, dst7); in avg_width32_msa() 117 LD_UB4(src, src_stride, src8, src10, src12, src14); in avg_width32_msa() 118 LD_UB4(src + 16, src_stride, src9, src11, src13, src15); in avg_width32_msa() [all …]
|
D | vpx_convolve_copy_msa.c | 40 LD_UB4(src, src_stride, src0, src1, src2, src3); in copy_width8_msa() 71 LD_UB4(src, src_stride, src0, src1, src2, src3); in copy_width8_msa() 135 LD_UB4(src, src_stride, src0, src1, src2, src3); in copy_width16_msa() 144 LD_UB4(src, src_stride, src0, src1, src2, src3); in copy_width16_msa() 160 LD_UB4(src, src_stride, src0, src1, src2, src3); in copy_width32_msa() 161 LD_UB4(src + 16, src_stride, src4, src5, src6, src7); in copy_width32_msa() 167 LD_UB4(src, src_stride, src0, src1, src2, src3); in copy_width32_msa() 168 LD_UB4(src + 16, src_stride, src4, src5, src6, src7); in copy_width32_msa() 174 LD_UB4(src, src_stride, src0, src1, src2, src3); in copy_width32_msa() 175 LD_UB4(src + 16, src_stride, src4, src5, src6, src7); in copy_width32_msa() [all …]
|
D | sad_msa.c | 57 LD_UB4(src, src_stride, src0, src1, src2, src3); in sad_8width_msa() 59 LD_UB4(ref, ref_stride, ref0, ref1, ref2, ref3); in sad_8width_msa() 141 LD_UB4(src, 16, src0, src1, src2, src3); in sad_64width_msa() 143 LD_UB4(ref, 16, ref0, ref1, ref2, ref3); in sad_64width_msa() 148 LD_UB4(src, 16, src0, src1, src2, src3); in sad_64width_msa() 150 LD_UB4(ref, 16, ref0, ref1, ref2, ref3); in sad_64width_msa() 179 LD_UB4(ref_ptr, ref_stride, ref0, ref1, ref2, ref3); in sad_4width_x3_msa() 214 LD_UB4(src, src_stride, src0, src1, src2, src3); in sad_8width_x3_msa() 216 LD_UB4(ref, ref_stride, ref00, ref11, ref22, ref33); in sad_8width_x3_msa() 307 LD_UB4(ref_ptr, ref_stride, ref0, ref1, ref2, ref3); in sad_4width_x8_msa() [all …]
|
D | variance_msa.c | 79 LD_UB4(src_ptr, src_stride, src0, src1, src2, src3); in sse_diff_8width_msa() 81 LD_UB4(ref_ptr, ref_stride, ref0, ref1, ref2, ref3); in sse_diff_8width_msa() 237 LD_UB4(src_ptr, 16, src0, src1, src2, src3); in sse_diff_64x32_msa() 239 LD_UB4(ref_ptr, 16, ref0, ref1, ref2, ref3); in sse_diff_64x32_msa() 246 LD_UB4(src_ptr, 16, src0, src1, src2, src3); in sse_diff_64x32_msa() 248 LD_UB4(ref_ptr, 16, ref0, ref1, ref2, ref3); in sse_diff_64x32_msa() 276 LD_UB4(src_ptr, 16, src0, src1, src2, src3); in sse_diff_64x64_msa() 278 LD_UB4(ref_ptr, 16, ref0, ref1, ref2, ref3); in sse_diff_64x64_msa() 285 LD_UB4(src_ptr, 16, src0, src1, src2, src3); in sse_diff_64x64_msa() 287 LD_UB4(ref_ptr, 16, ref0, ref1, ref2, ref3); in sse_diff_64x64_msa() [all …]
|
D | sub_pixel_variance_msa.c | 89 LD_UB4(src_ptr, src_stride, src0, src1, src2, src3); in avg_sse_diff_8width_msa() 91 LD_UB4(ref_ptr, ref_stride, ref0, ref1, ref2, ref3); in avg_sse_diff_8width_msa() 295 LD_UB4(sec_pred, 16, pred0, pred1, pred2, pred3); in avg_sse_diff_64x32_msa() 297 LD_UB4(src_ptr, 16, src0, src1, src2, src3); in avg_sse_diff_64x32_msa() 299 LD_UB4(ref_ptr, 16, ref0, ref1, ref2, ref3); in avg_sse_diff_64x32_msa() 308 LD_UB4(sec_pred, 16, pred0, pred1, pred2, pred3); in avg_sse_diff_64x32_msa() 310 LD_UB4(src_ptr, 16, src0, src1, src2, src3); in avg_sse_diff_64x32_msa() 312 LD_UB4(ref_ptr, 16, ref0, ref1, ref2, ref3); in avg_sse_diff_64x32_msa() 346 LD_UB4(sec_pred, 16, pred0, pred1, pred2, pred3); in avg_sse_diff_64x64_msa() 348 LD_UB4(src_ptr, 16, src0, src1, src2, src3); in avg_sse_diff_64x64_msa() [all …]
|
D | vpx_convolve8_avg_vert_msa.c | 169 LD_UB4(dst_tmp, dst_stride, dst0, dst1, dst2, dst3); in common_vt_8t_and_aver_dst_16w_mult_msa() 432 LD_UB4(src, src_stride, src1, src2, src3, src4); in common_vt_2t_and_aver_dst_16w_msa() 435 LD_UB4(dst, dst_stride, dst0, dst1, dst2, dst3); in common_vt_2t_and_aver_dst_16w_msa() 482 LD_UB4(src, src_stride, src1, src2, src3, src4); in common_vt_2t_and_aver_dst_32w_msa() 483 LD_UB4(dst, dst_stride, dst0, dst1, dst2, dst3); in common_vt_2t_and_aver_dst_32w_msa() 487 LD_UB4(src + 16, src_stride, src6, src7, src8, src9); in common_vt_2t_and_aver_dst_32w_msa() 488 LD_UB4(dst + 16, dst_stride, dst4, dst5, dst6, dst7); in common_vt_2t_and_aver_dst_32w_msa() 551 LD_UB4(src, 16, src0, src3, src6, src9); in common_vt_2t_and_aver_dst_64w_msa()
|
D | vpx_convolve8_vert_msa.c | 459 LD_UB4(src, src_stride, src1, src2, src3, src4); in common_vt_2t_16w_msa() 508 LD_UB4(src, src_stride, src1, src2, src3, src4); in common_vt_2t_32w_msa() 512 LD_UB4(src + 16, src_stride, src6, src7, src8, src9); in common_vt_2t_32w_msa() 571 LD_UB4(src, 16, src0, src3, src6, src9); in common_vt_2t_64w_msa()
|
D | vpx_convolve8_avg_horiz_msa.c | 519 LD_UB4(dst, dst_stride, dst0, dst1, dst2, dst3); in common_hz_2t_and_aver_dst_16w_msa() 544 LD_UB4(dst, dst_stride, dst0, dst1, dst2, dst3); in common_hz_2t_and_aver_dst_16w_msa() 637 LD_UB4(dst, 16, dst0, dst1, dst2, dst3); in common_hz_2t_and_aver_dst_64w_msa()
|
D | loopfilter_16_msa.c | 100 LD_UB4(filter48, 16, p2, p1, p0, q0); in hz_lpf_t16_16w() 487 LD_UB4((src - 8 * pitch), pitch, p7, p6, p5, p4); in mb_lpf_horizontal_edge() 488 LD_UB4(src + (4 * pitch), pitch, q4, q5, q6, q7); in mb_lpf_horizontal_edge() 845 LD_UB4(filter48, 16, p2, p1, p0, q0); in vt_lpf_t16_8w() 1168 LD_UB4(filter48, 16, p2, p1, p0, q0); in vt_lpf_t16_16w()
|
D | inv_txfm_msa.h | 101 LD_UB4(dst_m, dst_stride, dst0_m, dst1_m, dst2_m, dst3_m); \
|
D | idct16x16_msa.c | 276 LD_UB4(dst, dst_stride, dst0, dst1, dst2, dst3); in vpx_idct16x16_1_add_msa()
|
D | vpx_convolve8_avg_msa.c | 463 LD_UB4(dst, dst_stride, dst0, dst1, dst2, dst3); in common_hv_2ht_2vt_and_aver_dst_16w_msa()
|
D | avg_msa.c | 549 LD_UB4(ref, 16, ref0, ref1, ref2, ref3); in vpx_int_pro_col_msa()
|
D | vpx_convolve8_msa.c | 818 LD_UB4(src, 16, in0, in1, in2, in3); in transpose8x8_to_dst()
|
D | macros_msa.h | 257 #define LD_UB4(...) LD_V4(v16u8, __VA_ARGS__) macro
|
/external/libpng/mips/ |
D | filter_msa_intrinsics.c | 263 #define LD_UB4(...) LD_B4(v16u8, __VA_ARGS__) macro 377 LD_UB4(rp, 16, src0, src1, src2, src3); in png_read_filter_row_up_msa() 378 LD_UB4(pp, 16, src4, src5, src6, src7); in png_read_filter_row_up_msa() 398 LD_UB4(rp, 16, src0, src1, src2, src3); in png_read_filter_row_up_msa() 399 LD_UB4(pp, 16, src4, src5, src6, src7); in png_read_filter_row_up_msa()
|
/external/libvpx/libvpx/vp8/common/mips/msa/ |
D | idct_msa.c | 199 LD_UB4(dest, dest_stride, dest0, dest1, dest2, dest3); in dequant_idct4x4_addblk_msa() 243 LD_UB4(dest, dest_stride, dest0, dest1, dest2, dest3); in dequant_idct4x4_addblk_2x_msa() 284 LD_UB4(dest, dest_stride, dest0, dest1, dest2, dest3); in dequant_idct_addconst_2x_msa()
|
D | loopfilter_filters_msa.c | 501 LD_UB4(src - (pitch << 1), pitch, p1, p0, q0, q1); in vp8_loop_filter_simple_horizontal_edge_msa()
|
D | bilinear_filter_msa.c | 400 LD_UB4(src, src_stride, src1, src2, src3, src4); in common_vt_2t_16w_msa()
|
D | vp8_macros_msa.h | 289 #define LD_UB4(...) LD_B4(v16u8, __VA_ARGS__) macro
|
/external/libyuv/files/include/libyuv/ |
D | macros_msa.h | 168 #define LD_UB4(...) LD_B4(v16u8, __VA_ARGS__) macro
|
/external/libyuv/files/source/ |
D | row_msa.cc | 296 LD_UB4(src, 16, src3, src2, src1, src0); in MirrorRow_MSA() 313 LD_UB4(src, 16, src3, src2, src1, src0); in ARGBMirrorRow_MSA() 646 LD_UB4(src_yuy2, 16, src0, src1, src2, src3); in YUY2ToYRow_MSA() 666 LD_UB4(src_yuy2, 16, src0, src1, src2, src3); in YUY2ToUVRow_MSA() 667 LD_UB4(src_yuy2_next, 16, src4, src5, src6, src7); in YUY2ToUVRow_MSA() 693 LD_UB4(src_yuy2, 16, src0, src1, src2, src3); in YUY2ToUV422Row_MSA() 711 LD_UB4(src_uyvy, 16, src0, src1, src2, src3); in UYVYToYRow_MSA() 731 LD_UB4(src_uyvy, 16, src0, src1, src2, src3); in UYVYToUVRow_MSA() 732 LD_UB4(src_uyvy_next, 16, src4, src5, src6, src7); in UYVYToUVRow_MSA() 758 LD_UB4(src_uyvy, 16, src0, src1, src2, src3); in UYVYToUV422Row_MSA()
|
/external/webp/src/dsp/ |
D | lossless_msa.c | 26 LD_UB4(psrc, 16, src0, src1, src2, src3); \
|
D | dec_msa.c | 144 LD_UB4(dst, BPS, dest0, dest1, dest2, dest3); in TransformAC3() 629 LD_UB4(src - 2 * stride, stride, p1, p0, q0, q1); in SimpleVFilter16()
|
D | msa_macro.h | 238 #define LD_UB4(...) LD_B4(v16u8, __VA_ARGS__) macro
|