/external/libvpx/libvpx/vpx_dsp/mips/ |
D | avg_msa.c | 87 v8i16 src11, src12, src13, src14, src15, tmp0, tmp1, tmp2, tmp3, tmp4, tmp5; in vpx_hadamard_16x16_msa() local 103 LD_SH2(src, 8, src6, src14); in vpx_hadamard_16x16_msa() 110 BUTTERFLY_8(src8, src10, src12, src14, src15, src13, src11, src9, tmp8, tmp10, in vpx_hadamard_16x16_msa() 130 src12, src13, src15, src14, src11, src10); in vpx_hadamard_16x16_msa() 131 BUTTERFLY_8(src8, src9, src10, src11, src15, src14, src13, src12, tmp8, tmp15, in vpx_hadamard_16x16_msa() 134 src9, src10, src11, src12, src13, src14, src15); in vpx_hadamard_16x16_msa() 135 BUTTERFLY_8(src8, src10, src12, src14, src15, src13, src11, src9, tmp8, tmp10, in vpx_hadamard_16x16_msa() 138 src12, src13, src15, src14, src11, src10); in vpx_hadamard_16x16_msa() 139 BUTTERFLY_8(src8, src9, src10, src11, src15, src14, src13, src12, tmp8, tmp15, in vpx_hadamard_16x16_msa() 159 LD_SH2(src, 8, src6, src14); in vpx_hadamard_16x16_msa() [all …]
|
D | vpx_convolve_avg_msa.c | 106 v16u8 src8, src9, src10, src11, src12, src13, src14, src15; in avg_width32_msa() local 117 LD_UB4(src, src_stride, src8, src10, src12, src14); in avg_width32_msa() 130 AVER_UB4_UB(src12, dst12, src13, dst13, src14, dst14, src15, dst15, dst12, in avg_width32_msa() 147 v16u8 src8, src9, src10, src11, src12, src13, src14, src15; in avg_width64_msa() local 158 LD_UB4(src, 16, src12, src13, src14, src15); in avg_width64_msa() 176 AVER_UB4_UB(src12, dst12, src13, dst13, src14, dst14, src15, dst15, dst12, in avg_width64_msa()
|
/external/libvpx/libvpx/vp8/common/mips/msa/ |
D | copymem_msa.c | 38 v16u8 src8, src9, src10, src11, src12, src13, src14, src15; in copy_16x16_msa() local 42 LD_UB8(src, src_stride, src8, src9, src10, src11, src12, src13, src14, src15); in copy_16x16_msa() 46 ST_UB8(src8, src9, src10, src11, src12, src13, src14, src15, dst, dst_stride); in copy_16x16_msa()
|
/external/libvpx/libvpx/vpx_dsp/ppc/ |
D | inv_txfm_vsx.c | 527 src11, src12, src13, src14, src15, src16, src17; in vpx_idct16x16_256_add_vsx() local 545 src11, src02, src12, src03, src13, src04, src14, src05, src15, in vpx_idct16x16_256_add_vsx() 549 TRANSPOSE8x8(src10, src11, src12, src13, src14, src15, src16, src17, tmp10, in vpx_idct16x16_256_add_vsx() 553 src04, src05, src06, src07, src10, src11, src12, src13, src14, src15, in vpx_idct16x16_256_add_vsx() 557 TRANSPOSE8x8(src10, src11, src12, src13, src14, src15, src16, src17, tmp10, in vpx_idct16x16_256_add_vsx() 587 src14, src15, src16, src17, src30, src31, src32, src33, src34, src35, in vpx_idct16x16_256_add_vsx() 599 PIXEL_ADD_STORE16(src04, src14, dest4, 4 * stride); in vpx_idct16x16_256_add_vsx()
|
/external/libvpx/libvpx/vp8/encoder/mips/msa/ |
D | denoising_msa.c | 30 v16u8 src8, src9, src10, src11, src12, src13, src14, src15; in vp8_denoiser_filter_msa() local 295 LD_UB8(sig_start, sig_stride, src8, src9, src10, src11, src12, src13, src14, in vp8_denoiser_filter_msa() 301 ST_UB8(src8, src9, src10, src11, src12, src13, src14, src15, in vp8_denoiser_filter_msa()
|
/external/valgrind/VEX/priv/ |
D | guest_s390_toIR.c | 7245 IRTemp src14 = newTemp(Ity_D64); in s390_irgen_PFPO() local 7400 assign(src14, get_dpr_dw0(4)); /* get source from FPR 4,6 */ in s390_irgen_PFPO() 7401 assign(dst14, binop(Iop_D64toF64, irrm, mkexpr(src14))); in s390_irgen_PFPO() 7404 s390_cc_thunk_putFZ(S390_CC_OP_PFPO_64, src14, gr0); in s390_irgen_PFPO()
|