/external/libvpx/libvpx/vpx_dsp/mips/ |
D | fwd_dct32x32_msa.c | 61 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in fdct8x32_1d_column_even_store() local 72 vec4, vec5, vec6, vec7, in8, in9, in10, in11); in fdct8x32_1d_column_even_store() 75 ADD4(vec0, vec7, vec1, vec6, vec2, vec5, vec3, vec4, in0, in1, in2, in3); in fdct8x32_1d_column_even_store() 87 SUB4(vec0, vec7, vec1, vec6, vec2, vec5, vec3, vec4, vec7, vec6, vec5, vec4); in fdct8x32_1d_column_even_store() 88 DOTP_CONST_PAIR(vec6, vec5, cospi_16_64, cospi_16_64, vec5, vec6); in fdct8x32_1d_column_even_store() 89 ADD2(vec4, vec5, vec7, vec6, vec0, vec1); in fdct8x32_1d_column_even_store() 95 SUB2(vec4, vec5, vec7, vec6, vec4, vec7); in fdct8x32_1d_column_even_store() 101 DOTP_CONST_PAIR(in13, in10, cospi_16_64, cospi_16_64, vec2, vec5); in fdct8x32_1d_column_even_store() 103 ADD4(in8, vec3, in9, vec2, in14, vec5, in15, vec4, in0, vec1, vec6, in2); in fdct8x32_1d_column_even_store() 117 SUB2(in9, vec2, in14, vec5, vec2, vec5); in fdct8x32_1d_column_even_store() [all …]
|
D | vpx_convolve8_avg_horiz_msa.c | 155 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_8t_and_aver_dst_16w_msa() local 176 VSHF_B4_SH(src1, src1, mask0, mask1, mask2, mask3, vec1, vec5, vec9, vec13); in common_hz_8t_and_aver_dst_16w_msa() 185 DPADD_SB4_SH(vec4, vec5, vec6, vec7, filt1, filt1, filt1, filt1, vec0, vec1, in common_hz_8t_and_aver_dst_16w_msa() 211 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_8t_and_aver_dst_32w_msa() local 234 VSHF_B4_SH(src1, src1, mask0, mask1, mask2, mask3, vec1, vec5, vec9, vec13); in common_hz_8t_and_aver_dst_32w_msa() 243 DPADD_SB4_SH(vec4, vec5, vec6, vec7, filt1, filt1, filt1, filt1, vec0, vec1, in common_hz_8t_and_aver_dst_32w_msa() 268 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_8t_and_aver_dst_64w_msa() local 292 VSHF_B4_SH(src1, src1, mask0, mask1, mask2, mask3, vec1, vec5, vec9, in common_hz_8t_and_aver_dst_64w_msa() 302 DPADD_SB4_SH(vec4, vec5, vec6, vec7, filt1, filt1, filt1, filt1, vec0, in common_hz_8t_and_aver_dst_64w_msa() 354 v8u16 vec4, vec5, vec6, vec7, filt; in common_hz_2t_and_aver_dst_4x8_msa() local [all …]
|
D | vpx_convolve8_horiz_msa.c | 344 v8u16 vec4, vec5, vec6, vec7, filt; in common_hz_2t_4x8_msa() local 355 DOTP_UB4_UH(vec0, vec1, vec2, vec3, filt0, filt0, filt0, filt0, vec4, vec5, in common_hz_2t_4x8_msa() 357 SRARI_H4_UH(vec4, vec5, vec6, vec7, FILTER_BITS); in common_hz_2t_4x8_msa() 358 PCKEV_B4_SB(vec4, vec4, vec5, vec5, vec6, vec6, vec7, vec7, res0, res1, in common_hz_2t_4x8_msa() 476 v16u8 filt0, vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_2t_16w_msa() local 493 VSHF_B2_UB(src4, src4, src5, src5, mask, mask, vec4, vec5); in common_hz_2t_16w_msa() 497 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, out4, out5, in common_hz_2t_16w_msa() 517 VSHF_B2_UB(src4, src4, src5, src5, mask, mask, vec4, vec5); in common_hz_2t_16w_msa() 521 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, out4, out5, in common_hz_2t_16w_msa() 541 v16u8 filt0, vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_2t_32w_msa() local [all …]
|
D | vpx_convolve8_vert_msa.c | 394 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_8x8mult_msa() local 412 ILVR_B4_UB(src5, src4, src6, src5, src7, src6, src8, src7, vec4, vec5, in common_vt_2t_8x8mult_msa() 421 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, tmp0, tmp1, in common_vt_2t_8x8mult_msa() 447 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_16w_msa() local 470 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in common_vt_2t_16w_msa() 476 DOTP_UB2_UH(vec4, vec5, filt0, filt0, tmp0, tmp1); in common_vt_2t_16w_msa() 495 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_32w_msa() local 523 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in common_vt_2t_32w_msa() 524 DOTP_UB2_UH(vec4, vec5, filt0, filt0, tmp0, tmp1); in common_vt_2t_32w_msa() 543 ILVL_B2_UB(src8, src7, src9, src8, vec5, vec7); in common_vt_2t_32w_msa() [all …]
|
D | vpx_convolve8_avg_vert_msa.c | 380 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_and_aver_dst_8x8mult_msa() local 398 ILVR_B4_UB(src5, src4, src6, src5, src7, src6, src8, src7, vec4, vec5, in common_vt_2t_and_aver_dst_8x8mult_msa() 407 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, tmp0, tmp1, in common_vt_2t_and_aver_dst_8x8mult_msa() 440 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_vt_2t_and_aver_dst_16w_msa() local 463 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in common_vt_2t_and_aver_dst_16w_msa() 469 DOTP_UB2_UH(vec4, vec5, filt0, filt0, tmp0, tmp1); in common_vt_2t_and_aver_dst_16w_msa() 492 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_and_aver_dst_32w_msa() local 521 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in common_vt_2t_and_aver_dst_32w_msa() 522 DOTP_UB2_UH(vec4, vec5, filt0, filt0, tmp0, tmp1); in common_vt_2t_and_aver_dst_32w_msa() 541 ILVL_B2_UB(src8, src7, src9, src8, vec5, vec7); in common_vt_2t_and_aver_dst_32w_msa() [all …]
|
D | fwd_txfm_msa.c | 20 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, cnst0, cnst1, cnst4, cnst5; in fdct8x16_1d_column() local 47 ILVR_H2_SH(in10, in13, in11, in12, vec3, vec5); in fdct8x16_1d_column() 55 stp24 = DOT_SHIFT_RIGHT_PCK_H(vec4, vec5, cnst4); in fdct8x16_1d_column() 56 stp23 = DOT_SHIFT_RIGHT_PCK_H(vec4, vec5, cnst5); in fdct8x16_1d_column() 62 ILVR_H2_SH(stp36, stp31, stp35, stp32, vec3, vec5); in fdct8x16_1d_column() 99 stp25 = DOT_SHIFT_RIGHT_PCK_H(vec4, vec5, cnst1); in fdct8x16_1d_column() 103 stp22 = DOT_SHIFT_RIGHT_PCK_H(vec4, vec5, cnst1); in fdct8x16_1d_column()
|
D | loopfilter_8_msa.c | 257 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in vpx_lpf_vertical_8_dual_msa() local 294 ILVRL_H2_SH(vec1, vec0, vec4, vec5); in vpx_lpf_vertical_8_dual_msa() 299 ST4x8_UB(vec4, vec5, src, pitch); in vpx_lpf_vertical_8_dual_msa() 333 ILVRL_B2_SH(q2, q1, vec2, vec5); in vpx_lpf_vertical_8_dual_msa() 343 ST2x4_UB(vec5, 0, src + 4, pitch); in vpx_lpf_vertical_8_dual_msa() 346 ST2x4_UB(vec5, 4, src + 4, pitch); in vpx_lpf_vertical_8_dual_msa()
|
D | vpx_convolve8_msa.c | 272 v8u16 hz_out7, hz_out8, vec4, vec5, vec6, vec7, filt; in common_hv_2ht_2vt_4x8_msa() local 299 vec4, vec5, vec6, vec7); in common_hv_2ht_2vt_4x8_msa() 300 SRARI_H4_UH(vec4, vec5, vec6, vec7, FILTER_BITS); in common_hv_2ht_2vt_4x8_msa() 301 PCKEV_B4_SB(vec4, vec4, vec5, vec5, vec6, vec6, vec7, vec7, res0, res1, in common_hv_2ht_2vt_4x8_msa()
|
D | sub_pixel_variance_msa.c | 496 v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in sub_pixel_sse_diff_16width_h_msa() local 513 VSHF_B2_UH(src4, src4, src5, src5, mask, mask, vec4, vec5); in sub_pixel_sse_diff_16width_h_msa() 517 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, in sub_pixel_sse_diff_16width_h_msa() 686 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in sub_pixel_sse_diff_16width_v_msa() local 711 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in sub_pixel_sse_diff_16width_v_msa() 716 DOTP_UB2_UH(vec4, vec5, filt0, filt0, tmp0, tmp1); in sub_pixel_sse_diff_16width_v_msa() 1139 v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in subpel_avg_ssediff_16w_h_msa() local 1158 VSHF_B2_UH(src4, src4, src5, src5, mask, mask, vec4, vec5); in subpel_avg_ssediff_16w_h_msa() 1162 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, in subpel_avg_ssediff_16w_h_msa() 1363 v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in subpel_avg_ssediff_16w_v_msa() local [all …]
|
D | loopfilter_16_msa.c | 1072 v8i16 vec0, vec1, vec2, vec3, vec4, vec5; in vpx_vt_lpf_t4_and_t8_16w() local 1093 ILVRL_H2_SH(vec1, vec0, vec4, vec5); in vpx_vt_lpf_t4_and_t8_16w() 1098 ST4x8_UB(vec4, vec5, src_org, pitch); in vpx_vt_lpf_t4_and_t8_16w() 1157 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in vpx_vt_lpf_t16_16w() local 1166 ILVRL_B2_SH(q2, q1, vec2, vec5); in vpx_vt_lpf_t16_16w() 1176 ST2x4_UB(vec5, 0, (src_org + 4), pitch); in vpx_vt_lpf_t16_16w() 1179 ST2x4_UB(vec5, 4, (src_org + 4), pitch); in vpx_vt_lpf_t16_16w()
|
/external/libvpx/libvpx/vp8/common/mips/msa/ |
D | bilinear_filter_msa.c | 65 v8u16 vec4, vec5, vec6, vec7, filt; in common_hz_2t_4x8_msa() local 76 vec4, vec5, vec6, vec7); in common_hz_2t_4x8_msa() 77 SRARI_H4_UH(vec4, vec5, vec6, vec7, VP8_FILTER_SHIFT); in common_hz_2t_4x8_msa() 78 PCKEV_B4_SB(vec4, vec4, vec5, vec5, vec6, vec6, vec7, vec7, in common_hz_2t_4x8_msa() 206 v16u8 filt0, vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_2t_16w_msa() local 222 VSHF_B2_UB(src4, src4, src5, src5, mask, mask, vec4, vec5); in common_hz_2t_16w_msa() 226 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, in common_hz_2t_16w_msa() 247 VSHF_B2_UB(src4, src4, src5, src5, mask, mask, vec4, vec5); in common_hz_2t_16w_msa() 251 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, in common_hz_2t_16w_msa() 367 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_8x8mult_msa() local [all …]
|
/external/llvm/test/Transforms/ConstProp/ |
D | InsertElement.ll | 29 %vec5 = insertelement <4 x i64> %vec3, i64 -5, i32 4 31 ret <4 x i64> %vec5
|
/external/llvm/test/CodeGen/X86/ |
D | avx512bwvl-intrinsics.ll | 88 %vec5 = insertelement <8 x i32> %vec4, i32 %res5, i32 5 91 %vec6 = insertelement <8 x i32> %vec5, i32 %res6, i32 6 117 %vec5 = insertelement <8 x i32> %vec4, i32 %res5, i32 5 120 %vec6 = insertelement <8 x i32> %vec5, i32 %res6, i32 6 148 %vec5 = insertelement <8 x i32> %vec4, i32 %res5, i32 5 151 %vec6 = insertelement <8 x i32> %vec5, i32 %res6, i32 6 177 %vec5 = insertelement <8 x i32> %vec4, i32 %res5, i32 5 180 %vec6 = insertelement <8 x i32> %vec5, i32 %res6, i32 6 208 %vec5 = insertelement <8 x i16> %vec4, i16 %res5, i32 5 211 %vec6 = insertelement <8 x i16> %vec5, i16 %res6, i32 6 [all …]
|
D | avx512vl-intrinsics.ll | 88 %vec5 = insertelement <8 x i8> %vec4, i8 %res5, i32 5 91 %vec6 = insertelement <8 x i8> %vec5, i8 %res6, i32 6 117 %vec5 = insertelement <8 x i8> %vec4, i8 %res5, i32 5 120 %vec6 = insertelement <8 x i8> %vec5, i8 %res6, i32 6 148 %vec5 = insertelement <8 x i8> %vec4, i8 %res5, i32 5 151 %vec6 = insertelement <8 x i8> %vec5, i8 %res6, i32 6 177 %vec5 = insertelement <8 x i8> %vec4, i8 %res5, i32 5 180 %vec6 = insertelement <8 x i8> %vec5, i8 %res6, i32 6 208 %vec5 = insertelement <8 x i8> %vec4, i8 %res5, i32 5 211 %vec6 = insertelement <8 x i8> %vec5, i8 %res6, i32 6 [all …]
|
D | avx512-intrinsics.ll | 1119 %vec5 = insertelement <8 x i16> %vec4, i16 %res5, i32 5 1121 %vec6 = insertelement <8 x i16> %vec5, i16 %res6, i32 6 1167 %vec5 = insertelement <8 x i16> %vec4, i16 %res5, i32 5 1169 %vec6 = insertelement <8 x i16> %vec5, i16 %res6, i32 6 1216 %vec5 = insertelement <8 x i16> %vec4, i16 %res5, i32 5 1218 %vec6 = insertelement <8 x i16> %vec5, i16 %res6, i32 6 1264 %vec5 = insertelement <8 x i16> %vec4, i16 %res5, i32 5 1266 %vec6 = insertelement <8 x i16> %vec5, i16 %res6, i32 6 1321 %vec5 = insertelement <8 x i8> %vec4, i8 %res5, i32 5 1323 %vec6 = insertelement <8 x i8> %vec5, i8 %res6, i32 6 [all …]
|
/external/hyphenation-patterns/en-GB/ |
D | hyph-en-gb.pat.txt | 479 .vec5
|