Searched refs:tmp1_l (Results 1 – 5 of 5) sorted by relevance
/third_party/ffmpeg/libavcodec/loongarch/ |
D | vp9_lpf_lsx.c | 736 v8u16 tmp0_l, tmp1_l, tmp0_h, tmp1_h; in vp9_hz_lpf_t16_16w() local 785 tmp1_l = p6_l_in + p5_l_in; in vp9_hz_lpf_t16_16w() 786 tmp1_l += p4_l_in; in vp9_hz_lpf_t16_16w() 787 tmp1_l += p3_l_in; in vp9_hz_lpf_t16_16w() 788 tmp1_l += p2_l_in; in vp9_hz_lpf_t16_16w() 789 tmp1_l += p1_l_in; in vp9_hz_lpf_t16_16w() 790 tmp1_l += p0_l_in; in vp9_hz_lpf_t16_16w() 791 tmp1_l += tmp0_l; in vp9_hz_lpf_t16_16w() 793 out_l = __lsx_vsrari_h((__m128i)tmp1_l, 4); in vp9_hz_lpf_t16_16w() 830 tmp1_l += tmp0_l; in vp9_hz_lpf_t16_16w() [all …]
|
D | hevc_idct_lsx.c | 282 tmp1_l = sum0_l; \ 288 tmp1_l = __lsx_vsub_w(tmp1_l, tmp0_l); \ 290 __lsx_vst(tmp1_l, (input + store_idx * 8), 16); \ 548 __m128i sum0_r, sum0_l, sum1_r, sum1_l, tmp0_r, tmp0_l, tmp1_r, tmp1_l; in hevc_idct_8x32_column_lsx() local 608 src3_r, filter1, src3_l, filter1, sum0_r, sum0_l, tmp1_r, tmp1_l); in hevc_idct_8x32_column_lsx() 610 sum1_l = __lsx_vsub_w(sum0_l, tmp1_l); in hevc_idct_8x32_column_lsx() 612 sum0_l = __lsx_vadd_w(sum0_l, tmp1_l); in hevc_idct_8x32_column_lsx() 621 src3_r, filter1, src3_l, filter1, sum0_r, sum0_l, tmp1_r, tmp1_l); in hevc_idct_8x32_column_lsx() 623 sum1_l = __lsx_vsub_w(sum0_l, tmp1_l); in hevc_idct_8x32_column_lsx() 625 sum0_l = __lsx_vadd_w(sum0_l, tmp1_l); in hevc_idct_8x32_column_lsx() [all …]
|
/third_party/ffmpeg/libavcodec/mips/ |
D | vp9_lpf_msa.c | 639 v8u16 tmp0_r, tmp1_r, tmp0_l, tmp1_l; in vp9_hz_lpf_t16_16w() local 689 tmp1_l = p6_l_in + p5_l_in; in vp9_hz_lpf_t16_16w() 690 tmp1_l += p4_l_in; in vp9_hz_lpf_t16_16w() 691 tmp1_l += p3_l_in; in vp9_hz_lpf_t16_16w() 692 tmp1_l += p2_l_in; in vp9_hz_lpf_t16_16w() 693 tmp1_l += p1_l_in; in vp9_hz_lpf_t16_16w() 694 tmp1_l += p0_l_in; in vp9_hz_lpf_t16_16w() 695 tmp1_l += tmp0_l; in vp9_hz_lpf_t16_16w() 696 l_out = __msa_srari_h((v8i16) tmp1_l, 4); in vp9_hz_lpf_t16_16w() 715 tmp1_l += tmp0_l; in vp9_hz_lpf_t16_16w() [all …]
|
D | h264idct_msa.c | 118 v4i32 tmp0_l, tmp1_l, tmp2_l, tmp3_l, tmp4_l, tmp5_l, tmp6_l, tmp7_l; in avc_idct8_addblk_msa() local 161 UNPCK_SH_SW(res1, tmp1_r, tmp1_l); in avc_idct8_addblk_msa() 189 vec1_l = tmp1_l - vec1_l + tmp7_l - tmp3_l; in avc_idct8_addblk_msa() 193 vec2_l = vec2_l - tmp1_l + tmp7_l + tmp5_l; in avc_idct8_addblk_msa() 195 vec3_l = tmp1_l >> 1; in avc_idct8_addblk_msa() 197 vec3_l = vec3_l + tmp3_l + tmp5_l + tmp1_l; in avc_idct8_addblk_msa() 199 tmp1_l = vec3_l >> 2; in avc_idct8_addblk_msa() 201 tmp1_l += vec0_l; in avc_idct8_addblk_msa() 218 BUTTERFLY_4(tmp6_r, tmp6_l, tmp1_l, tmp1_r, res3_r, res3_l, res4_l, res4_r); in avc_idct8_addblk_msa()
|
D | hevc_idct_msa.c | 268 tmp1_l = sum0_l; \ 273 tmp1_l -= tmp0_l; \ 274 ST_SW2(tmp1_r, tmp1_l, (input + store_idx * 8), 4); \ 447 v4i32 sum0_r, sum0_l, sum1_r, sum1_l, tmp0_r, tmp0_l, tmp1_r, tmp1_l; in hevc_idct_8x32_column_msa() local 500 filter1, sum0_r, sum0_l, tmp1_r, tmp1_l); in hevc_idct_8x32_column_msa() 503 sum1_l = sum0_l - tmp1_l; in hevc_idct_8x32_column_msa() 505 sum0_l = sum0_l + tmp1_l; in hevc_idct_8x32_column_msa() 532 tmp1_l = tmp0_l; in hevc_idct_8x32_column_msa() 537 tmp1_l -= sum0_l; in hevc_idct_8x32_column_msa() 538 ST_SW2(tmp1_r, tmp1_l, (tmp_buf_ptr + (15 - i) * 8), 4); in hevc_idct_8x32_column_msa() [all …]
|