/external/libvpx/vpx_dsp/mips/ |
D | sum_squares_msa.c | 35 v8i16 src0, src1, src2, src3, src4, src5, src6, src7; in vpx_sum_squares_2d_i16_msa() local 37 LD_SH8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in vpx_sum_squares_2d_i16_msa() 40 DPADD_SH2_SW(src4, src5, src4, src5, mul0, mul1); in vpx_sum_squares_2d_i16_msa() 47 v8i16 src0, src1, src2, src3, src4, src5, src6, src7; in vpx_sum_squares_2d_i16_msa() local 49 LD_SH8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in vpx_sum_squares_2d_i16_msa() 52 DPADD_SH2_SW(src4, src5, src4, src5, mul0, mul1); in vpx_sum_squares_2d_i16_msa() 54 LD_SH8(src + 8, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in vpx_sum_squares_2d_i16_msa() 58 DPADD_SH2_SW(src4, src5, src4, src5, mul0, mul1); in vpx_sum_squares_2d_i16_msa() 60 LD_SH8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in vpx_sum_squares_2d_i16_msa() 63 DPADD_SH2_SW(src4, src5, src4, src5, mul0, mul1); in vpx_sum_squares_2d_i16_msa() [all …]
|
D | vpx_convolve_copy_msa.c | 19 v16u8 src0, src1, src2, src3, src4, src5, src6, src7; in copy_width8_msa() local 23 LD_UB8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in copy_width8_msa() 31 out5 = __msa_copy_u_d((v2i64)src5, 0); in copy_width8_msa() 52 LD_UB8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in copy_width8_msa() 60 out5 = __msa_copy_u_d((v2i64)src5, 0); in copy_width8_msa() 102 v16u8 src0, src1, src2, src3, src4, src5, src6, src7; in copy_16multx8mult_msa() local 109 LD_UB8(src_tmp, src_stride, src0, src1, src2, src3, src4, src5, src6, in copy_16multx8mult_msa() 113 ST_UB8(src0, src1, src2, src3, src4, src5, src6, src7, dst_tmp, in copy_16multx8mult_msa() 126 v16u8 src0, src1, src2, src3, src4, src5, src6, src7; in copy_width16_msa() local 130 LD_UB8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in copy_width16_msa() [all …]
|
D | avg_msa.c | 18 v16u8 src0, src1, src2, src3, src4, src5, src6, src7; in vpx_avg_8x8_msa() local 22 LD_UB8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in vpx_avg_8x8_msa() 24 HADD_UB4_UH(src4, src5, src6, src7, sum4, sum5, sum6, sum7); in vpx_avg_8x8_msa() 63 v8i16 src0, src1, src2, src3, src4, src5, src6, src7; in vpx_hadamard_8x8_msa() local 66 LD_SH8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in vpx_hadamard_8x8_msa() 67 BUTTERFLY_8(src0, src2, src4, src6, src7, src5, src3, src1, tmp0, tmp2, tmp4, in vpx_hadamard_8x8_msa() 70 src5, src7, src6, src3, src2); in vpx_hadamard_8x8_msa() 71 BUTTERFLY_8(src0, src1, src2, src3, src7, src6, src5, src4, tmp0, tmp7, tmp3, in vpx_hadamard_8x8_msa() 74 src2, src3, src4, src5, src6, src7); in vpx_hadamard_8x8_msa() 75 BUTTERFLY_8(src0, src2, src4, src6, src7, src5, src3, src1, tmp0, tmp2, tmp4, in vpx_hadamard_8x8_msa() [all …]
|
D | vpx_convolve8_vert_msa.c | 19 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_4w_msa() local 31 LD_SB7(src, src_stride, src0, src1, src2, src3, src4, src5, src6); in common_vt_8t_4w_msa() 34 ILVR_B4_SB(src1, src0, src3, src2, src5, src4, src2, src1, src10_r, src32_r, in common_vt_8t_4w_msa() 36 ILVR_B2_SB(src4, src3, src6, src5, src43_r, src65_r); in common_vt_8t_4w_msa() 70 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_8w_msa() local 81 LD_SB7(src, src_stride, src0, src1, src2, src3, src4, src5, src6); in common_vt_8t_8w_msa() 82 XORI_B7_128_SB(src0, src1, src2, src3, src4, src5, src6); in common_vt_8t_8w_msa() 84 ILVR_B4_SB(src1, src0, src3, src2, src5, src4, src2, src1, src10_r, src32_r, in common_vt_8t_8w_msa() 86 ILVR_B2_SB(src4, src3, src6, src5, src43_r, src65_r); in common_vt_8t_8w_msa() 124 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_16w_msa() local [all …]
|
D | vpx_convolve8_avg_vert_msa.c | 21 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_and_aver_dst_4w_msa() local 33 LD_SB7(src, src_stride, src0, src1, src2, src3, src4, src5, src6); in common_vt_8t_and_aver_dst_4w_msa() 36 ILVR_B4_SB(src1, src0, src3, src2, src5, src4, src2, src1, src10_r, src32_r, in common_vt_8t_and_aver_dst_4w_msa() 38 ILVR_B2_SB(src4, src3, src6, src5, src43_r, src65_r); in common_vt_8t_and_aver_dst_4w_msa() 78 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_and_aver_dst_8w_msa() local 89 LD_SB7(src, src_stride, src0, src1, src2, src3, src4, src5, src6); in common_vt_8t_and_aver_dst_8w_msa() 92 XORI_B7_128_SB(src0, src1, src2, src3, src4, src5, src6); in common_vt_8t_and_aver_dst_8w_msa() 93 ILVR_B4_SB(src1, src0, src3, src2, src5, src4, src2, src1, src10_r, src32_r, in common_vt_8t_and_aver_dst_8w_msa() 95 ILVR_B2_SB(src4, src3, src6, src5, src43_r, src65_r); in common_vt_8t_and_aver_dst_8w_msa() 137 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_and_aver_dst_16w_mult_msa() local [all …]
|
D | subtract_msa.c | 62 v16i8 src0, src1, src2, src3, src4, src5, src6, src7; in sub_blk_16x16_msa() local 68 LD_SB8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in sub_blk_16x16_msa() 100 ILVRL_B2_UB(src5, pred5, src_l0, src_l1); in sub_blk_16x16_msa() 121 v16i8 src0, src1, src2, src3, src4, src5, src6, src7; in sub_blk_32x32_msa() local 131 LD_SB2(src, 16, src4, src5); in sub_blk_32x32_msa() 164 ILVRL_B2_UB(src5, pred5, src_l0, src_l1); in sub_blk_32x32_msa() 183 v16i8 src0, src1, src2, src3, src4, src5, src6, src7; in sub_blk_64x64_msa() local 191 LD_SB4(src, 16, src4, src5, src6, src7); in sub_blk_64x64_msa() 216 ILVRL_B2_UB(src5, pred5, src_l0, src_l1); in sub_blk_64x64_msa()
|
D | vpx_convolve_avg_msa.c | 84 v16u8 src0, src1, src2, src3, src4, src5, src6, src7; in avg_width16_msa() local 88 LD_UB8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in avg_width16_msa() 94 AVER_UB4_UB(src4, dst4, src5, dst5, src6, dst6, src7, dst7, dst4, dst5, in avg_width16_msa() 105 v16u8 src0, src1, src2, src3, src4, src5, src6, src7; in avg_width32_msa() local 112 LD_UB4(src + 16, src_stride, src1, src3, src5, src7); in avg_width32_msa() 126 AVER_UB4_UB(src4, dst4, src5, dst5, src6, dst6, src7, dst7, dst4, dst5, in avg_width32_msa() 146 v16u8 src0, src1, src2, src3, src4, src5, src6, src7; in avg_width64_msa() local 154 LD_UB4(src, 16, src4, src5, src6, src7); in avg_width64_msa() 172 AVER_UB4_UB(src4, dst4, src5, dst5, src6, dst6, src7, dst7, dst4, dst5, in avg_width64_msa()
|
D | vpx_convolve8_avg_msa.c | 20 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_hv_8ht_8vt_and_aver_dst_4w_msa() local 38 LD_SB7(src, src_stride, src0, src1, src2, src3, src4, src5, src6); in common_hv_8ht_8vt_and_aver_dst_4w_msa() 39 XORI_B7_128_SB(src0, src1, src2, src3, src4, src5, src6); in common_hv_8ht_8vt_and_aver_dst_4w_msa() 46 hz_out4 = HORIZ_8TAP_FILT(src4, src5, mask0, mask1, mask2, mask3, filt_hz0, in common_hv_8ht_8vt_and_aver_dst_4w_msa() 48 hz_out5 = HORIZ_8TAP_FILT(src5, src6, mask0, mask1, mask2, mask3, filt_hz0, in common_hv_8ht_8vt_and_aver_dst_4w_msa() 98 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_hv_8ht_8vt_and_aver_dst_8w_msa() local 117 LD_SB7(src, src_stride, src0, src1, src2, src3, src4, src5, src6); in common_hv_8ht_8vt_and_aver_dst_8w_msa() 120 XORI_B7_128_SB(src0, src1, src2, src3, src4, src5, src6); in common_hv_8ht_8vt_and_aver_dst_8w_msa() 131 hz_out5 = HORIZ_8TAP_FILT(src5, src5, mask0, mask1, mask2, mask3, filt_hz0, in common_hv_8ht_8vt_and_aver_dst_8w_msa() 268 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8, mask; in common_hv_2ht_2vt_and_aver_dst_4x8_msa() local [all …]
|
/external/libvpx/vpx_dsp/loongarch/ |
D | avg_lsx.c | 18 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in vpx_hadamard_8x8_lsx() local 31 DUP2_ARG2(__lsx_vldx, src_tmp, src_stride2, src_tmp, src_stride4, src5, src6); in vpx_hadamard_8x8_lsx() 34 LSX_BUTTERFLY_8_H(src0, src2, src4, src6, src7, src5, src3, src1, tmp0, tmp2, in vpx_hadamard_8x8_lsx() 37 src4, src5, src7, src6, src3, src2); in vpx_hadamard_8x8_lsx() 38 LSX_BUTTERFLY_8_H(src0, src1, src2, src3, src7, src6, src5, src4, tmp0, tmp7, in vpx_hadamard_8x8_lsx() 41 src2, src3, src4, src5, src6, src7); in vpx_hadamard_8x8_lsx() 42 LSX_BUTTERFLY_8_H(src0, src2, src4, src6, src7, src5, src3, src1, tmp0, tmp2, in vpx_hadamard_8x8_lsx() 45 src4, src5, src7, src6, src3, src2); in vpx_hadamard_8x8_lsx() 46 LSX_BUTTERFLY_8_H(src0, src1, src2, src3, src7, src6, src5, src4, tmp0, tmp7, in vpx_hadamard_8x8_lsx()
|
D | vpx_convolve8_vert_lsx.c | 22 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_4w_lsx() local 36 DUP2_ARG2(__lsx_vldx, _src, src_stride, _src, src_stride2, src5, src6); in common_vt_8t_4w_lsx() 38 DUP4_ARG2(__lsx_vilvl_b, src1, src0, src3, src2, src5, src4, src2, src1, tmp0, in common_vt_8t_4w_lsx() 40 DUP2_ARG2(__lsx_vilvl_b, src4, src3, src6, src5, tmp4, tmp5); in common_vt_8t_4w_lsx() 81 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_8w_lsx() local 99 DUP2_ARG2(__lsx_vldx, src, src_stride, src, src_stride2, src5, src6); in common_vt_8t_8w_lsx() 104 DUP2_ARG2(__lsx_vxori_b, src4, 128, src5, 128, src4, src5); in common_vt_8t_8w_lsx() 106 DUP4_ARG2(__lsx_vilvl_b, src1, src0, src3, src2, src5, src4, src2, src1, reg0, in common_vt_8t_8w_lsx() 108 DUP2_ARG2(__lsx_vilvl_b, src4, src3, src6, src5, reg4, reg5); in common_vt_8t_8w_lsx() 152 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_16w_lsx() local [all …]
|
D | subtract_lsx.c | 49 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in sub_blk_8x8_lsx() local 72 src_ptr + src_stride2, 0, src_ptr + src_stride3, 0, src4, src5, in sub_blk_8x8_lsx() 80 DUP4_ARG2(__lsx_vilvl_b, src4, pred4, src5, pred5, src6, pred6, src7, pred7, in sub_blk_8x8_lsx() 85 src4, src5, src6, src7); in sub_blk_8x8_lsx() 92 __lsx_vstx(src5, diff_ptr, dst_stride); in sub_blk_8x8_lsx() 100 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in sub_blk_16x16_lsx() local 123 pred, pred_stride, src5, src6, src7, pred5); in sub_blk_16x16_lsx() 131 DUP4_ARG2(__lsx_vilvl_b, src4, pred4, src5, pred5, src6, pred6, src7, pred7, in sub_blk_16x16_lsx() 133 DUP4_ARG2(__lsx_vilvh_b, src4, pred4, src5, pred5, src6, pred6, src7, pred7, in sub_blk_16x16_lsx() 138 src4, src5, src6, src7); in sub_blk_16x16_lsx() [all …]
|
D | vpx_convolve_copy_lsx.c | 18 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in copy_width8_lsx() local 29 DUP2_ARG2(__lsx_vldx, src, src_stride, src, src_stride2, src5, src6); in copy_width8_lsx() 45 __lsx_vstelm_d(src5, dst, 0, 0); in copy_width8_lsx() 72 DUP2_ARG2(__lsx_vldx, src, src_stride, src, src_stride2, src5, src6); in copy_width8_lsx() 88 __lsx_vstelm_d(src5, dst, 0, 0); in copy_width8_lsx() 131 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in copy_16multx8mult_lsx() local 145 DUP2_ARG2(__lsx_vldx, src_tmp, src_stride, src_tmp, src_stride2, src5, in copy_16multx8mult_lsx() 161 __lsx_vst(src5, dst_tmp, 0); in copy_16multx8mult_lsx() 176 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in copy_width16_lsx() local 187 DUP2_ARG2(__lsx_vldx, src, src_stride, src, src_stride2, src5, src6); in copy_width16_lsx() [all …]
|
D | vpx_convolve8_avg_vert_lsx.c | 22 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_and_aver_dst_4w_lsx() local 40 DUP2_ARG2(__lsx_vldx, src_tmp0, src_stride, src_tmp0, src_stride2, src5, in common_vt_8t_and_aver_dst_4w_lsx() 43 DUP4_ARG2(__lsx_vilvl_b, src1, src0, src3, src2, src5, src4, src2, src1, tmp0, in common_vt_8t_and_aver_dst_4w_lsx() 45 DUP2_ARG2(__lsx_vilvl_b, src4, src3, src6, src5, tmp4, tmp5); in common_vt_8t_and_aver_dst_4w_lsx() 100 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_and_aver_dst_8w_lsx() local 119 DUP2_ARG2(__lsx_vldx, src_tmp0, src_stride, src_tmp0, src_stride2, src5, in common_vt_8t_and_aver_dst_8w_lsx() 124 DUP2_ARG2(__lsx_vxori_b, src4, 128, src5, 128, src4, src5); in common_vt_8t_and_aver_dst_8w_lsx() 126 DUP4_ARG2(__lsx_vilvl_b, src1, src0, src3, src2, src5, src4, src2, src1, reg0, in common_vt_8t_and_aver_dst_8w_lsx() 128 DUP2_ARG2(__lsx_vilvl_b, src4, src3, src6, src5, reg4, reg5); in common_vt_8t_and_aver_dst_8w_lsx() 183 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_vt_8t_and_aver_dst_16w_mult_lsx() local [all …]
|
D | vpx_convolve8_avg_lsx.c | 29 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_hv_8ht_8vt_and_aver_dst_4w_lsx() local 52 DUP2_ARG2(__lsx_vldx, _src, src_stride, _src, src_stride2, src5, src6); in common_hv_8ht_8vt_and_aver_dst_4w_lsx() 57 DUP2_ARG2(__lsx_vxori_b, src4, 128, src5, 128, src4, src5); in common_hv_8ht_8vt_and_aver_dst_4w_lsx() 64 tmp4 = horiz_8tap_filt(src4, src5, mask0, mask1, mask2, mask3, filt_hz0, in common_hv_8ht_8vt_and_aver_dst_4w_lsx() 66 tmp5 = horiz_8tap_filt(src5, src6, mask0, mask1, mask2, mask3, filt_hz0, in common_hv_8ht_8vt_and_aver_dst_4w_lsx() 84 src5 = __lsx_vldrepl_w(dst_tmp, 0); in common_hv_8ht_8vt_and_aver_dst_4w_lsx() 86 DUP2_ARG2(__lsx_vilvl_w, src3, src2, src5, src4, src2, src3); in common_hv_8ht_8vt_and_aver_dst_4w_lsx() 126 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_hv_8ht_8vt_and_aver_dst_8w_lsx() local 148 DUP2_ARG2(__lsx_vldx, _src, src_stride, _src, src_stride2, src5, src6); in common_hv_8ht_8vt_and_aver_dst_8w_lsx() 152 DUP2_ARG2(__lsx_vxori_b, src4, 128, src5, 128, src4, src5); in common_hv_8ht_8vt_and_aver_dst_8w_lsx() [all …]
|
D | vpx_convolve8_lsx.c | 29 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_hv_8ht_8vt_4w_lsx() local 48 src5 = __lsx_vld(src, 0); in common_hv_8ht_8vt_4w_lsx() 54 DUP2_ARG2(__lsx_vxori_b, src4, 128, src5, 128, src4, src5); in common_hv_8ht_8vt_4w_lsx() 61 tmp4 = horiz_8tap_filt(src4, src5, mask0, mask1, mask2, mask3, filt_hz0, in common_hv_8ht_8vt_4w_lsx() 63 tmp5 = horiz_8tap_filt(src5, src6, mask0, mask1, mask2, mask3, filt_hz0, in common_hv_8ht_8vt_4w_lsx() 111 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8, src9, src10; in common_hv_8ht_8vt_8w_lsx() local 129 src5 = __lsx_vld(src, 0); in common_hv_8ht_8vt_8w_lsx() 135 DUP2_ARG2(__lsx_vxori_b, src4, 128, src5, 128, src4, src5); in common_hv_8ht_8vt_8w_lsx() 148 src5 = horiz_8tap_filt(src5, src5, mask0, mask1, mask2, mask3, filt_hz0, in common_hv_8ht_8vt_8w_lsx() 155 DUP4_ARG2(__lsx_vpackev_b, src1, src0, src3, src2, src5, src4, src2, src1, in common_hv_8ht_8vt_8w_lsx() [all …]
|
D | vpx_convolve_avg_lsx.c | 80 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in avg_width16_lsx() local 97 DUP2_ARG2(__lsx_vldx, src, src_stride, src, src_stride2, src5, src6); in avg_width16_lsx() 112 DUP4_ARG2(__lsx_vavgr_bu, src4, dst4, src5, dst5, src6, dst6, src7, dst7, in avg_width16_lsx() 131 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in avg_width32_lsx() local 152 src_stride2, src_tmp, src_stride2, src2, src3, src4, src5); in avg_width32_lsx() 178 DUP4_ARG2(__lsx_vavgr_bu, src4, dst4, src5, dst5, src6, dst6, src7, dst7, in avg_width32_lsx() 213 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in avg_width64_lsx() local 222 DUP4_ARG2(__lsx_vld, src, 0, src, 16, src, 32, src, 48, src4, src5, src6, in avg_width64_lsx() 247 DUP4_ARG2(__lsx_vavgr_bu, src4, dst4, src5, dst5, src6, dst6, src7, dst7, in avg_width64_lsx()
|
/external/libyuv/source/ |
D | scale_lsx.cc | 224 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in ScaleRowDown2Box_LSX() local 232 src4, src5, src6, src7); in ScaleRowDown2Box_LSX() 233 DUP4_ARG2(__lsx_vaddwev_h_bu, src0, src4, src1, src5, src2, src6, src3, in ScaleRowDown2Box_LSX() 235 DUP4_ARG2(__lsx_vaddwod_h_bu, src0, src4, src1, src5, src2, src6, src3, in ScaleRowDown2Box_LSX() 277 __m128i src0, src1, src2, src3, src4, src5, src6, src7; in ScaleRowDown4Box_LSX() local 284 DUP4_ARG2(__lsx_vld, ptr1, 0, ptr1, 16, ptr1, 32, ptr1, 48, src4, src5, in ScaleRowDown4Box_LSX() 286 DUP4_ARG2(__lsx_vaddwev_h_bu, src0, src4, src1, src5, src2, src6, src3, in ScaleRowDown4Box_LSX() 288 DUP4_ARG2(__lsx_vaddwod_h_bu, src0, src4, src1, src5, src2, src6, src3, in ScaleRowDown4Box_LSX() 294 DUP4_ARG2(__lsx_vld, ptr3, 0, ptr3, 16, ptr3, 32, ptr3, 48, src4, src5, in ScaleRowDown4Box_LSX() 296 DUP4_ARG2(__lsx_vaddwev_h_bu, src0, src4, src1, src5, src2, src6, src3, in ScaleRowDown4Box_LSX() [all …]
|
D | scale_msa.cc | 238 v16u8 src0, src1, src2, src3, src4, src5, src6, src7, dst0, dst1; in ScaleRowDown2Box_MSA() local 247 src5 = (v16u8)__msa_ld_b((v16i8*)t, 16); in ScaleRowDown2Box_MSA() 255 vec1 += __msa_hadd_u_h(src5, src5); in ScaleRowDown2Box_MSA() 302 v16u8 src0, src1, src2, src3, src4, src5, src6, src7, dst0; in ScaleRowDown4Box_MSA() local 312 src5 = (v16u8)__msa_ld_b((v16i8*)t0, 16); in ScaleRowDown4Box_MSA() 320 vec1 += __msa_hadd_u_h(src5, src5); in ScaleRowDown4Box_MSA() 328 src5 = (v16u8)__msa_ld_b((v16i8*)t2, 16); in ScaleRowDown4Box_MSA() 336 vec1 += __msa_hadd_u_h(src5, src5); in ScaleRowDown4Box_MSA() 465 v16u8 src0, src1, src2, src3, src4, src5, out; in ScaleRowDown38_3_Box_MSA() local 483 src5 = (v16u8)__msa_ld_b((v16i8*)t1, 16); in ScaleRowDown38_3_Box_MSA() [all …]
|
/external/libpng/mips/ |
D | filter_msa_intrinsics.c | 374 v16u8 src0, src1, src2, src3, src4, src5, src6, src7; in png_read_filter_row_up_msa() local 379 LD_UB4(pp, 16, src4, src5, src6, src7); in png_read_filter_row_up_msa() 382 ADD4(src0, src4, src1, src5, src2, src6, src3, src7, in png_read_filter_row_up_msa() 400 LD_UB4(pp, 16, src4, src5, src6, src7); in png_read_filter_row_up_msa() 402 ADD4(src0, src4, src1, src5, src2, src6, src3, src7, in png_read_filter_row_up_msa() 411 LD_UB2(pp, 16, src4, src5); in png_read_filter_row_up_msa() 416 ADD3(src0, src4, src1, src5, src2, src6, src0, src1, src2); in png_read_filter_row_up_msa() 426 LD_UB2(pp, 16, src4, src5); in png_read_filter_row_up_msa() 428 ADD2(src0, src4, src1, src5, src0, src1); in png_read_filter_row_up_msa() 437 LD_UB2(pp, 16, src4, src5); in png_read_filter_row_up_msa() [all …]
|
/external/libvpx/vp8/common/mips/msa/ |
D | sixtap_filter_msa.c | 286 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, filt0, filt1, filt2; in common_hz_6t_16w_msa() local 301 LD_SB4(src + 8, src_stride, src1, src3, src5, src7); in common_hz_6t_16w_msa() 302 XORI_B8_128_SB(src0, src1, src2, src3, src4, src5, src6, src7); in common_hz_6t_16w_msa() 307 HORIZ_6TAP_8WID_4VECS_FILT(src4, src5, src6, src7, mask0, mask1, mask2, in common_hz_6t_16w_msa() 332 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8; in common_vt_6t_4w_msa() local 352 LD_SB4(src, src_stride, src5, src6, src7, src8); in common_vt_6t_4w_msa() 355 ILVR_B4_SB(src5, src4, src6, src5, src7, src6, src8, src7, src54_r, src65_r, in common_vt_6t_4w_msa() 425 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8; in common_vt_6t_16w_msa() local 447 LD_SB4(src, src_stride, src5, src6, src7, src8); in common_vt_6t_16w_msa() 450 XORI_B4_128_SB(src5, src6, src7, src8); in common_vt_6t_16w_msa() [all …]
|
D | bilinear_filter_msa.c | 54 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, mask; in common_hz_2t_4x8_msa() local 63 LD_SB8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in common_hz_2t_4x8_msa() 65 VSHF_B2_UB(src4, src5, src6, src7, mask, mask, vec2, vec3); in common_hz_2t_4x8_msa() 184 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, mask; in common_hz_2t_16w_msa() local 196 LD_SB4(src + 8, src_stride, src1, src3, src5, src7); in common_hz_2t_16w_msa() 201 VSHF_B2_UB(src4, src4, src5, src5, mask, mask, vec4, vec5); in common_hz_2t_16w_msa() 220 LD_SB4(src + 8, src_stride, src1, src3, src5, src7); in common_hz_2t_16w_msa() 225 VSHF_B2_UB(src4, src4, src5, src5, mask, mask, vec4, vec5); in common_hz_2t_16w_msa() 271 v16i8 src0, src1, src2, src3, src4, src5, src6, src7, src8; in common_vt_2t_4x8_msa() local 281 LD_SB8(src, src_stride, src0, src1, src2, src3, src4, src5, src6, src7); in common_vt_2t_4x8_msa() [all …]
|
/external/libvpx/vp8/common/loongarch/ |
D | sixtap_filter_lsx.c | 336 __m128i src0, src1, src2, src3, src4, src5, src6, src7, filt0, filt1, filt2; in common_hz_6t_16w_lsx() local 353 src_stride_x3, src1, src3, src5, src7); in common_hz_6t_16w_lsx() 356 DUP4_ARG2(__lsx_vxori_b, src4, 128, src5, 128, src6, 128, src7, 128, src4, in common_hz_6t_16w_lsx() 357 src5, src6, src7); in common_hz_6t_16w_lsx() 362 HORIZ_6TAP_8WID_4VECS_FILT(src4, src5, src6, src7, mask0, mask1, mask2, in common_hz_6t_16w_lsx() 394 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8; in common_vt_6t_4w_lsx() local 417 src5 = __lsx_vld(src, 0); in common_vt_6t_4w_lsx() 422 DUP4_ARG2(__lsx_vilvl_b, src5, src4, src6, src5, src7, src6, src8, src7, in common_vt_6t_4w_lsx() 521 __m128i src0, src1, src2, src3, src4, src5, src6, src7, src8; in common_vt_6t_16w_lsx() local 549 src_stride_x3, src5, src6, src7, src8); in common_vt_6t_16w_lsx() [all …]
|
/external/swiftshader/third_party/llvm-16.0/llvm/lib/Target/X86/ |
D | X86InstrAMX.td | 113 TILE:$src5, TILE:$src6), 116 GR16:$src3, TILE:$src4, TILE:$src5, TILE:$src6))]>; 119 TILE:$src5, TILE:$src6), 122 GR16:$src3, TILE:$src4, TILE:$src5, TILE:$src6))]>; 125 TILE:$src5, TILE:$src6), 128 GR16:$src3, TILE:$src4, TILE:$src5, TILE:$src6))]>; 131 TILE:$src5, TILE:$src6), 134 GR16:$src3, TILE:$src4, TILE:$src5, TILE:$src6))]>; 172 TILE:$src5, TILE:$src6), 176 TILE:$src5, TILE:$src6))]>; [all …]
|
/external/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Hexagon/ |
D | HexagonMapAsm2IntrinV65.gen.td | 70 …egs:$src3, HvxVR:$src4, HvxVR:$src5), (V6_vscattermwq HvxQR:$src1, IntRegs:$src2, ModRegs:$src3, H… 71 …egs:$src3, HvxVR:$src4, HvxVR:$src5), (V6_vscattermhq HvxQR:$src1, IntRegs:$src2, ModRegs:$src3, H… 74 …egs:$src3, HvxWR:$src4, HvxVR:$src5), (V6_vscattermhwq HvxQR:$src1, IntRegs:$src2, ModRegs:$src3, … 79 …egs:$src3, HvxVR:$src4, HvxVR:$src5), (V6_vscattermwq HvxQR:$src1, IntRegs:$src2, ModRegs:$src3, H… 80 …egs:$src3, HvxVR:$src4, HvxVR:$src5), (V6_vscattermhq HvxQR:$src1, IntRegs:$src2, ModRegs:$src3, H… 83 …egs:$src3, HvxWR:$src4, HvxVR:$src5), (V6_vscattermhwq HvxQR:$src1, IntRegs:$src2, ModRegs:$src3, …
|
/external/libvpx/third_party/libyuv/source/ |
D | scale_msa.cc | 238 v16u8 src0, src1, src2, src3, src4, src5, src6, src7, dst0, dst1; in ScaleRowDown2Box_MSA() local 247 src5 = (v16u8)__msa_ld_b((v16i8*)t, 16); in ScaleRowDown2Box_MSA() 255 vec1 += __msa_hadd_u_h(src5, src5); in ScaleRowDown2Box_MSA() 302 v16u8 src0, src1, src2, src3, src4, src5, src6, src7, dst0; in ScaleRowDown4Box_MSA() local 312 src5 = (v16u8)__msa_ld_b((v16i8*)t0, 16); in ScaleRowDown4Box_MSA() 320 vec1 += __msa_hadd_u_h(src5, src5); in ScaleRowDown4Box_MSA() 328 src5 = (v16u8)__msa_ld_b((v16i8*)t2, 16); in ScaleRowDown4Box_MSA() 336 vec1 += __msa_hadd_u_h(src5, src5); in ScaleRowDown4Box_MSA() 465 v16u8 src0, src1, src2, src3, src4, src5, out; in ScaleRowDown38_3_Box_MSA() local 483 src5 = (v16u8)__msa_ld_b((v16i8*)t1, 16); in ScaleRowDown38_3_Box_MSA() [all …]
|