Home
last modified time | relevance | path

Searched refs:vec7 (Results 1 – 13 of 13) sorted by relevance

/external/libvpx/libvpx/vpx_dsp/mips/
Dfwd_dct32x32_msa.c61 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in fdct8x32_1d_column_even_store() local
72 vec4, vec5, vec6, vec7, in8, in9, in10, in11); in fdct8x32_1d_column_even_store()
75 ADD4(vec0, vec7, vec1, vec6, vec2, vec5, vec3, vec4, in0, in1, in2, in3); in fdct8x32_1d_column_even_store()
87 SUB4(vec0, vec7, vec1, vec6, vec2, vec5, vec3, vec4, vec7, vec6, vec5, vec4); in fdct8x32_1d_column_even_store()
89 ADD2(vec4, vec5, vec7, vec6, vec0, vec1); in fdct8x32_1d_column_even_store()
95 SUB2(vec4, vec5, vec7, vec6, vec4, vec7); in fdct8x32_1d_column_even_store()
96 DOTP_CONST_PAIR(vec7, vec4, cospi_12_64, cospi_20_64, temp1, temp0); in fdct8x32_1d_column_even_store()
105 ADD2(in0, in1, in2, in3, vec0, vec7); in fdct8x32_1d_column_even_store()
106 DOTP_CONST_PAIR(vec7, vec0, cospi_30_64, cospi_2_64, temp1, temp0); in fdct8x32_1d_column_even_store()
292 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in fdct8x32_1d_row_even_4x() local
[all …]
Dvpx_convolve8_avg_horiz_msa.c155 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_8t_and_aver_dst_16w_msa() local
179 VSHF_B4_SH(src3, src3, mask0, mask1, mask2, mask3, vec3, vec7, vec11, in common_hz_8t_and_aver_dst_16w_msa()
185 DPADD_SB4_SH(vec4, vec5, vec6, vec7, filt1, filt1, filt1, filt1, vec0, vec1, in common_hz_8t_and_aver_dst_16w_msa()
211 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_8t_and_aver_dst_32w_msa() local
237 VSHF_B4_SH(src3, src3, mask0, mask1, mask2, mask3, vec3, vec7, vec11, in common_hz_8t_and_aver_dst_32w_msa()
243 DPADD_SB4_SH(vec4, vec5, vec6, vec7, filt1, filt1, filt1, filt1, vec0, vec1, in common_hz_8t_and_aver_dst_32w_msa()
268 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_8t_and_aver_dst_64w_msa() local
296 VSHF_B4_SH(src3, src3, mask0, mask1, mask2, mask3, vec3, vec7, vec11, in common_hz_8t_and_aver_dst_64w_msa()
302 DPADD_SB4_SH(vec4, vec5, vec6, vec7, filt1, filt1, filt1, filt1, vec0, in common_hz_8t_and_aver_dst_64w_msa()
354 v8u16 vec4, vec5, vec6, vec7, filt; in common_hz_2t_and_aver_dst_4x8_msa() local
[all …]
Dvpx_convolve8_horiz_msa.c344 v8u16 vec4, vec5, vec6, vec7, filt; in common_hz_2t_4x8_msa() local
356 vec6, vec7); in common_hz_2t_4x8_msa()
357 SRARI_H4_UH(vec4, vec5, vec6, vec7, FILTER_BITS); in common_hz_2t_4x8_msa()
358 PCKEV_B4_SB(vec4, vec4, vec5, vec5, vec6, vec6, vec7, vec7, res0, res1, in common_hz_2t_4x8_msa()
476 v16u8 filt0, vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_2t_16w_msa() local
494 VSHF_B2_UB(src6, src6, src7, src7, mask, mask, vec6, vec7); in common_hz_2t_16w_msa()
497 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, out4, out5, in common_hz_2t_16w_msa()
518 VSHF_B2_UB(src6, src6, src7, src7, mask, mask, vec6, vec7); in common_hz_2t_16w_msa()
521 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, out4, out5, in common_hz_2t_16w_msa()
541 v16u8 filt0, vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_2t_32w_msa() local
[all …]
Dvpx_convolve8_vert_msa.c394 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_8x8mult_msa() local
413 vec6, vec7); in common_vt_2t_8x8mult_msa()
421 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, tmp0, tmp1, in common_vt_2t_8x8mult_msa()
447 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_16w_msa() local
470 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in common_vt_2t_16w_msa()
481 DOTP_UB2_UH(vec6, vec7, filt0, filt0, tmp2, tmp3); in common_vt_2t_16w_msa()
495 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_32w_msa() local
523 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in common_vt_2t_32w_msa()
528 DOTP_UB2_UH(vec6, vec7, filt0, filt0, tmp2, tmp3); in common_vt_2t_32w_msa()
543 ILVL_B2_UB(src8, src7, src9, src8, vec5, vec7); in common_vt_2t_32w_msa()
[all …]
Dvpx_convolve8_avg_vert_msa.c380 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_and_aver_dst_8x8mult_msa() local
399 vec6, vec7); in common_vt_2t_and_aver_dst_8x8mult_msa()
407 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, tmp0, tmp1, in common_vt_2t_and_aver_dst_8x8mult_msa()
440 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_vt_2t_and_aver_dst_16w_msa() local
463 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in common_vt_2t_and_aver_dst_16w_msa()
474 DOTP_UB2_UH(vec6, vec7, filt0, filt0, tmp2, tmp3); in common_vt_2t_and_aver_dst_16w_msa()
492 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_and_aver_dst_32w_msa() local
521 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in common_vt_2t_and_aver_dst_32w_msa()
526 DOTP_UB2_UH(vec6, vec7, filt0, filt0, tmp2, tmp3); in common_vt_2t_and_aver_dst_32w_msa()
541 ILVL_B2_UB(src8, src7, src9, src8, vec5, vec7); in common_vt_2t_and_aver_dst_32w_msa()
[all …]
Dloopfilter_8_msa.c257 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in vpx_lpf_vertical_8_dual_msa() local
332 ILVRL_H2_SH(vec1, vec0, vec6, vec7); in vpx_lpf_vertical_8_dual_msa()
345 ST4x4_UB(vec7, vec7, 0, 1, 2, 3, src, pitch); in vpx_lpf_vertical_8_dual_msa()
Dvpx_convolve8_msa.c272 v8u16 hz_out7, hz_out8, vec4, vec5, vec6, vec7, filt; in common_hv_2ht_2vt_4x8_msa() local
299 vec4, vec5, vec6, vec7); in common_hv_2ht_2vt_4x8_msa()
300 SRARI_H4_UH(vec4, vec5, vec6, vec7, FILTER_BITS); in common_hv_2ht_2vt_4x8_msa()
301 PCKEV_B4_SB(vec4, vec4, vec5, vec5, vec6, vec6, vec7, vec7, res0, res1, in common_hv_2ht_2vt_4x8_msa()
Dsub_pixel_variance_msa.c496 v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in sub_pixel_sse_diff_16width_h_msa() local
514 VSHF_B2_UH(src6, src6, src7, src7, mask, mask, vec6, vec7); in sub_pixel_sse_diff_16width_h_msa()
517 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, in sub_pixel_sse_diff_16width_h_msa()
686 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in sub_pixel_sse_diff_16width_v_msa() local
711 ILVL_B2_UB(src3, src2, src4, src3, vec5, vec7); in sub_pixel_sse_diff_16width_v_msa()
719 DOTP_UB2_UH(vec6, vec7, filt0, filt0, tmp2, tmp3); in sub_pixel_sse_diff_16width_v_msa()
1139 v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in subpel_avg_ssediff_16w_h_msa() local
1159 VSHF_B2_UH(src6, src6, src7, src7, mask, mask, vec6, vec7); in subpel_avg_ssediff_16w_h_msa()
1162 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, in subpel_avg_ssediff_16w_h_msa()
1363 v8u16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in subpel_avg_ssediff_16w_v_msa() local
[all …]
Dloopfilter_16_msa.c1157 v8i16 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in vpx_vt_lpf_t16_16w() local
1165 ILVRL_H2_SH(vec1, vec0, vec6, vec7); in vpx_vt_lpf_t16_16w()
1178 ST4x4_UB(vec7, vec7, 0, 1, 2, 3, src_org, pitch); in vpx_vt_lpf_t16_16w()
/external/libvpx/libvpx/vp8/common/mips/msa/
Dbilinear_filter_msa.c65 v8u16 vec4, vec5, vec6, vec7, filt; in common_hz_2t_4x8_msa() local
76 vec4, vec5, vec6, vec7); in common_hz_2t_4x8_msa()
77 SRARI_H4_UH(vec4, vec5, vec6, vec7, VP8_FILTER_SHIFT); in common_hz_2t_4x8_msa()
78 PCKEV_B4_SB(vec4, vec4, vec5, vec5, vec6, vec6, vec7, vec7, in common_hz_2t_4x8_msa()
206 v16u8 filt0, vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7; in common_hz_2t_16w_msa() local
223 VSHF_B2_UB(src6, src6, src7, src7, mask, mask, vec6, vec7); in common_hz_2t_16w_msa()
226 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, in common_hz_2t_16w_msa()
248 VSHF_B2_UB(src6, src6, src7, src7, mask, mask, vec6, vec7); in common_hz_2t_16w_msa()
251 DOTP_UB4_UH(vec4, vec5, vec6, vec7, filt0, filt0, filt0, filt0, in common_hz_2t_16w_msa()
367 v16u8 vec0, vec1, vec2, vec3, vec4, vec5, vec6, vec7, filt0; in common_vt_2t_8x8mult_msa() local
[all …]
/external/llvm/test/CodeGen/X86/
Davx512bwvl-intrinsics.ll94 %vec7 = insertelement <8 x i32> %vec6, i32 %res7, i32 7
95 ret <8 x i32> %vec7
123 %vec7 = insertelement <8 x i32> %vec6, i32 %res7, i32 7
124 ret <8 x i32> %vec7
154 %vec7 = insertelement <8 x i32> %vec6, i32 %res7, i32 7
155 ret <8 x i32> %vec7
183 %vec7 = insertelement <8 x i32> %vec6, i32 %res7, i32 7
184 ret <8 x i32> %vec7
214 %vec7 = insertelement <8 x i16> %vec6, i16 %res7, i32 7
215 ret <8 x i16> %vec7
[all …]
Davx512vl-intrinsics.ll94 %vec7 = insertelement <8 x i8> %vec6, i8 %res7, i32 7
95 ret <8 x i8> %vec7
123 %vec7 = insertelement <8 x i8> %vec6, i8 %res7, i32 7
124 ret <8 x i8> %vec7
154 %vec7 = insertelement <8 x i8> %vec6, i8 %res7, i32 7
155 ret <8 x i8> %vec7
183 %vec7 = insertelement <8 x i8> %vec6, i8 %res7, i32 7
184 ret <8 x i8> %vec7
214 %vec7 = insertelement <8 x i8> %vec6, i8 %res7, i32 7
215 ret <8 x i8> %vec7
[all …]
Davx512-intrinsics.ll1123 %vec7 = insertelement <8 x i16> %vec6, i16 %res7, i32 7
1124 ret <8 x i16> %vec7
1171 %vec7 = insertelement <8 x i16> %vec6, i16 %res7, i32 7
1172 ret <8 x i16> %vec7
1220 %vec7 = insertelement <8 x i16> %vec6, i16 %res7, i32 7
1221 ret <8 x i16> %vec7
1268 %vec7 = insertelement <8 x i16> %vec6, i16 %res7, i32 7
1269 ret <8 x i16> %vec7
1325 %vec7 = insertelement <8 x i8> %vec6, i8 %res7, i32 7
1326 ret <8 x i8> %vec7
[all …]