Home
last modified time | relevance | path

Searched refs:dst4 (Results 1 – 25 of 56) sorted by relevance

123

/third_party/skia/src/core/
DSkBlitRow_D32.cpp62 auto dst4 = ( __m128i*)dst; in blit_row_s32_blend() local
65 _mm_storeu_si128(dst4, SkPMLerp_SSE2(_mm_loadu_si128(src4), in blit_row_s32_blend()
66 _mm_loadu_si128(dst4), in blit_row_s32_blend()
69 dst4++; in blit_row_s32_blend()
74 dst = ( SkPMColor*)dst4; in blit_row_s32_blend()
127 auto dst4 = ( __m128i*)dst; in blit_row_s32a_blend() local
130 _mm_storeu_si128(dst4, SkBlendARGB32_SSE2(_mm_loadu_si128(src4), in blit_row_s32a_blend()
131 _mm_loadu_si128(dst4), in blit_row_s32a_blend()
134 dst4++; in blit_row_s32a_blend()
139 dst = ( SkPMColor*)dst4; in blit_row_s32a_blend()
DSk4px.h117 dst4 = fn(Load4(src+4)); in MapSrc() local
119 dst4.store4(dst+4); in MapSrc()
147 dst4 = fn(Load4(dst+4), Load4(src+4)); in MapDstSrc() local
149 dst4.store4(dst+4); in MapDstSrc()
177 dst4 = fn(Load4(dst+4), Load4Alphas(a+4)); in MapDstAlpha() local
179 dst4.store4(dst+4); in MapDstAlpha()
209 dst4 = fn(Load4(dst+4), Load4(src+4), Load4Alphas(a+4)); in MapDstSrcAlpha() local
211 dst4.store4(dst+4); in MapDstSrcAlpha()
/third_party/ffmpeg/libavcodec/mips/
Dhevc_mc_uniw_msa.c138 v8i16 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7, offset_vec; in hevc_uniwgt_copy_6w_msa() local
158 ILVRL_B2_SH(zero, src2, dst4, dst5); in hevc_uniwgt_copy_6w_msa()
162 SLLI_4V(dst4, dst5, dst6, dst7, 6); in hevc_uniwgt_copy_6w_msa()
167 HEVC_UNIW_RND_CLIP4_MAX_SATU_H(dst4, dst5, dst6, dst7, weight_vec, in hevc_uniwgt_copy_6w_msa()
168 offset_vec, rnd_vec, dst4, dst5, dst6, in hevc_uniwgt_copy_6w_msa()
171 PCKEV_B2_UB(dst5, dst4, dst7, dst6, out2, out3); in hevc_uniwgt_copy_6w_msa()
200 v8i16 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7, offset_vec; in hevc_uniwgt_copy_8w_msa() local
238 ILVRL_B2_SH(zero, src2, dst4, dst5); in hevc_uniwgt_copy_8w_msa()
240 SLLI_2V(dst4, dst5, 6); in hevc_uniwgt_copy_8w_msa()
244 HEVC_UNIW_RND_CLIP2_MAX_SATU_H(dst4, dst5, weight_vec, offset_vec, in hevc_uniwgt_copy_8w_msa()
[all …]
Dhevc_mc_bi_msa.c143 v8i16 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in hevc_bi_copy_6w_msa() local
158 ILVRL_B2_SH(zero, src2, dst4, dst5); in hevc_bi_copy_6w_msa()
161 SLLI_4V(dst4, dst5, dst6, dst7, 6); in hevc_bi_copy_6w_msa()
164 HEVC_BI_RND_CLIP4_MAX_SATU(in4, in5, in6, in7, dst4, dst5, dst6, dst7, in hevc_bi_copy_6w_msa()
165 7, dst4, dst5, dst6, dst7); in hevc_bi_copy_6w_msa()
167 PCKEV_B2_UB(dst5, dst4, dst7, dst6, out2, out3); in hevc_bi_copy_6w_msa()
194 v8i16 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in hevc_bi_copy_8w_msa() local
226 ILVRL_B2_SH(zero, src2, dst4, dst5); in hevc_bi_copy_8w_msa()
229 SLLI_2V(dst4, dst5, 6); in hevc_bi_copy_8w_msa()
232 HEVC_BI_RND_CLIP2_MAX_SATU(in4, in5, dst4, dst5, 7, dst4, dst5); in hevc_bi_copy_8w_msa()
[all …]
Dhevcdsp_msa.c567 v8i16 filt0, filt1, filt2, filt3, dst0, dst1, dst2, dst3, dst4, dst5; in hevc_hz_8t_12w_msa() local
596 dst4 = const_vec; in hevc_hz_8t_12w_msa()
603 DPADD_SB2_SH(vec4, vec5, filt0, filt0, dst4, dst5); in hevc_hz_8t_12w_msa()
609 DPADD_SB2_SH(vec4, vec5, filt1, filt1, dst4, dst5); in hevc_hz_8t_12w_msa()
615 DPADD_SB2_SH(vec4, vec5, filt2, filt2, dst4, dst5); in hevc_hz_8t_12w_msa()
621 DPADD_SB2_SH(vec4, vec5, filt3, filt3, dst4, dst5); in hevc_hz_8t_12w_msa()
623 res0 = __msa_copy_s_d((v2i64) dst4, 0); in hevc_hz_8t_12w_msa()
624 res1 = __msa_copy_s_d((v2i64) dst4, 1); in hevc_hz_8t_12w_msa()
699 v8i16 dst0, dst1, dst2, dst3, dst4, dst5; in hevc_hz_8t_24w_msa() local
729 dst4 = const_vec; in hevc_hz_8t_24w_msa()
[all …]
Dhevc_mc_biw_msa.c244 v8i16 dst0, dst1, dst2, dst3, dst4, dst5; in hevc_biwgt_copy_8w_msa() local
277 ILVRL_B2_SH(zero, src2, dst4, dst5); in hevc_biwgt_copy_8w_msa()
280 SLLI_2V(dst4, dst5, 6); in hevc_biwgt_copy_8w_msa()
284 HEVC_BIW_RND_CLIP2_MAX_SATU(dst4, dst5, in4, in5, weight_vec, rnd_vec, in hevc_biwgt_copy_8w_msa()
285 offset_vec, dst4, dst5); in hevc_biwgt_copy_8w_msa()
286 PCKEV_B3_UB(dst1, dst0, dst3, dst2, dst5, dst4, out0, out1, out2); in hevc_biwgt_copy_8w_msa()
332 v8i16 dst0, dst1, dst2, dst3, dst4, dst5; in hevc_biwgt_copy_12w_msa() local
356 ILVR_B2_SH(zero, src0, zero, src1, dst4, dst5); in hevc_biwgt_copy_12w_msa()
358 dst4 <<= 6; in hevc_biwgt_copy_12w_msa()
363 HEVC_BIW_RND_CLIP2_MAX_SATU(dst4, dst5, in4, in5, weight_vec, rnd_vec, in hevc_biwgt_copy_12w_msa()
[all …]
Dvc1dsp_msa.c145 v4i32 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in ff_vc1_inv_trans_4x8_msa() local
204 LD_SW8(dest, linesize, dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7); in ff_vc1_inv_trans_4x8_msa()
206 zero_m, dst4, zero_m, dst5, zero_m, dst6, zero_m, dst7, in ff_vc1_inv_trans_4x8_msa()
207 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7); in ff_vc1_inv_trans_4x8_msa()
210 ILVR_H4_SW(zero_m, dst4, zero_m, dst5, zero_m, dst6, zero_m, dst7, in ff_vc1_inv_trans_4x8_msa()
211 dst4, dst5, dst6, dst7); in ff_vc1_inv_trans_4x8_msa()
222 ADD4(in_r4, dst4, in_r5, dst5, in_r6, dst6, in_r7, dst7, in ff_vc1_inv_trans_4x8_msa()
235 v4i32 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in ff_vc1_inv_trans_8x4_msa() local
283 dst4, dst5, dst6, dst7); in ff_vc1_inv_trans_8x4_msa()
306 ADD4(in4, dst4, in5, dst5, in6, dst6, in7, dst7, in4, in5, in6, in7); in ff_vc1_inv_trans_8x4_msa()
Dhevc_lpf_sao_msa.c43 v16u8 dst0, dst1, dst2, dst3, dst4, dst5; in hevc_loopfilter_luma_hor_msa() local
175 dst4 = (v16u8) (temp2 + (v8i16) q1_src); in hevc_loopfilter_luma_hor_msa()
184 dst4 = __msa_bmz_v(dst4, (v16u8) q1_src, (v16u8) q_is_pcm_vec); in hevc_loopfilter_luma_hor_msa()
189 dst2 = (v16u8) __msa_pckev_b((v16i8) dst5, (v16i8) dst4); in hevc_loopfilter_luma_hor_msa()
192 PCKEV_B2_UB(p1_src, p2_src, q0_src, p0_src, dst3, dst4); in hevc_loopfilter_luma_hor_msa()
196 dst1 = __msa_bmz_v(dst1, dst4, (v16u8) cmp3); in hevc_loopfilter_luma_hor_msa()
277 dst4 = (v16u8) __msa_bmz_v((v16u8) delta2, (v16u8) q1_src, in hevc_loopfilter_luma_hor_msa()
280 PCKEV_B2_UB(dst2, dst1, dst4, dst3, dst0, dst1); in hevc_loopfilter_luma_hor_msa()
333 dst4 = (v16u8) (temp2 + (v8i16) q1_src); in hevc_loopfilter_luma_hor_msa()
342 dst4 = __msa_bmz_v(dst4, (v16u8) q1_src, (v16u8) q_is_pcm_vec); in hevc_loopfilter_luma_hor_msa()
[all …]
Dhevc_idct_msa.c773 v16u8 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in hevc_addblk_16x16_msa() local
778 LD_UB4(temp_dst, stride, dst4, dst5, dst6, dst7); in hevc_addblk_16x16_msa()
785 UNPCK_UB_SH(dst4, dst_r0, dst_l0); in hevc_addblk_16x16_msa()
800 LD_UB4(temp_dst, stride, dst4, dst5, dst6, dst7); in hevc_addblk_16x16_msa()
815 UNPCK_UB_SH(dst4, dst_r0, dst_l0); in hevc_addblk_16x16_msa()
840 v16u8 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in hevc_addblk_32x32_msa() local
845 LD_UB2(temp_dst, 16, dst4, dst5); in hevc_addblk_32x32_msa()
854 UNPCK_UB_SH(dst4, dst_r0, dst_l0); in hevc_addblk_32x32_msa()
869 LD_UB2(temp_dst, 16, dst4, dst5); in hevc_addblk_32x32_msa()
887 UNPCK_UB_SH(dst4, dst_r0, dst_l0); in hevc_addblk_32x32_msa()
[all …]
Dh264idct_msa.c122 v16i8 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in avc_idct8_addblk_msa() local
227 LD_SB8(dst, dst_stride, dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7); in avc_idct8_addblk_msa()
230 ILVR_B4_SH(zeros, dst4, zeros, dst5, zeros, dst6, zeros, dst7, in avc_idct8_addblk_msa()
246 v16i8 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in avc_idct8_dc_addblk_msa() local
256 LD_SB8(dst, dst_stride, dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7); in avc_idct8_dc_addblk_msa()
259 ILVR_B4_SH(zeros, dst4, zeros, dst5, zeros, dst6, zeros, dst7, in avc_idct8_dc_addblk_msa()
/third_party/ffmpeg/libavcodec/loongarch/
Dhevc_mc_uniw_lsx.c54 __m128i dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7, dst8; in hevc_hv_8t_8x2_lsx() local
113 DUP2_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec4, filt0, dst4, dst5); in hevc_hv_8t_8x2_lsx()
115 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst4, vec1, filt1, dst5, vec5, filt1, in hevc_hv_8t_8x2_lsx()
116 dst6, vec9, filt1, dst4, vec2, filt2, dst4, dst5, dst6, dst4); in hevc_hv_8t_8x2_lsx()
118 dst4, vec3, filt3, dst5, vec7, filt3, dst5, dst6, dst4, dst5); in hevc_hv_8t_8x2_lsx()
121 DUP4_ARG2(__lsx_vilvl_h, dst1, dst0, dst3, dst2, dst5, dst4, dst2, in hevc_hv_8t_8x2_lsx()
123 DUP2_ARG2(__lsx_vilvl_h, dst4, dst3, dst6, dst5, dst43_r, dst65_r); in hevc_hv_8t_8x2_lsx()
124 DUP4_ARG2(__lsx_vilvh_h, dst1, dst0, dst3, dst2, dst5, dst4, dst2, in hevc_hv_8t_8x2_lsx()
126 DUP2_ARG2(__lsx_vilvh_h, dst4, dst3, dst6, dst5, dst43_l, dst65_l); in hevc_hv_8t_8x2_lsx()
Dhevc_mc_uni_lsx.c358 __m128i dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7, dst8; in hevc_hv_8t_8x2_lsx() local
416 DUP2_ARG2(__lsx_vdp2_h_bu_b, vec0, filt0, vec4, filt0, dst4, dst5); in hevc_hv_8t_8x2_lsx()
418 DUP4_ARG3(__lsx_vdp2add_h_bu_b, dst4, vec1, filt1, dst5, vec5, filt1, in hevc_hv_8t_8x2_lsx()
419 dst6, vec9, filt1, dst4, vec2, filt2, dst4, dst5, dst6, dst4); in hevc_hv_8t_8x2_lsx()
421 dst4, vec3, filt3, dst5, vec7, filt3, dst5, dst6, dst4, dst5); in hevc_hv_8t_8x2_lsx()
423 DUP4_ARG2(__lsx_vilvl_h, dst1, dst0, dst3, dst2, dst5, dst4, dst2, in hevc_hv_8t_8x2_lsx()
425 DUP4_ARG2(__lsx_vilvh_h, dst1, dst0, dst3, dst2, dst5, dst4, dst2, in hevc_hv_8t_8x2_lsx()
427 DUP2_ARG2(__lsx_vilvl_h, dst4, dst3, dst6, dst5, dst43_r, dst65_r); in hevc_hv_8t_8x2_lsx()
428 DUP2_ARG2(__lsx_vilvh_h, dst4, dst3, dst6, dst5, dst43_l, dst65_l); in hevc_hv_8t_8x2_lsx()
762 __m128i dst0, dst1, dst2, dst3, dst4; in hevc_hv_4t_8x2_lsx() local
[all …]
Dhevcdsp_lsx.c761 __m128i filt0, filt1, filt2, filt3, dst0, dst1, dst2, dst3, dst4, dst5; in hevc_hz_8t_12w_lsx() local
794 DUP2_ARG2(__lsx_vdp2_h_bu_b, vec4, filt0, vec5, filt0, dst4, dst5); in hevc_hz_8t_12w_lsx()
803 DUP2_ARG3(__lsx_vdp2add_h_bu_b, dst4, vec4, filt1, dst5, vec5, filt1, in hevc_hz_8t_12w_lsx()
804 dst4, dst5); in hevc_hz_8t_12w_lsx()
813 DUP2_ARG3(__lsx_vdp2add_h_bu_b, dst4, vec4, filt2, dst5, vec5, filt2, in hevc_hz_8t_12w_lsx()
814 dst4, dst5); in hevc_hz_8t_12w_lsx()
823 DUP2_ARG3(__lsx_vdp2add_h_bu_b, dst4, vec4, filt3, dst5, vec5, filt3, in hevc_hz_8t_12w_lsx()
824 dst4, dst5); in hevc_hz_8t_12w_lsx()
827 __lsx_vstelm_d(dst4, dst, 16, 0); in hevc_hz_8t_12w_lsx()
830 __lsx_vstelm_d(dst4, dst, 16, 1); in hevc_hz_8t_12w_lsx()
[all …]
Dhevc_mc_bi_lsx.c149 __m128i dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in hevc_bi_copy_6w_lsx() local
176 dst0, dst2, dst4, dst6); in hevc_bi_copy_6w_lsx()
183 out2 = hevc_bi_rnd_clip(in4, dst4, in5, dst5); in hevc_bi_copy_6w_lsx()
245 __m128i dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in hevc_bi_copy_8w_lsx() local
262 dst0, dst2, dst4, dst6); in hevc_bi_copy_8w_lsx()
279 out2 = hevc_bi_rnd_clip(in4, dst4, in5, dst5); in hevc_bi_copy_8w_lsx()
329 __m128i dst0, dst1, dst2, dst3, dst4, dst5; in hevc_bi_copy_12w_lsx() local
352 DUP2_ARG2(__lsx_vsllwil_hu_bu, src0, 6, src1, 6, dst4, dst5) in hevc_bi_copy_12w_lsx()
355 out2 = hevc_bi_rnd_clip(in4, dst4, in5, dst5); in hevc_bi_copy_12w_lsx()
882 __m128i dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in hevc_hv_8t_8multx1mult_lsx() local
[all …]
Dh264idct_lasx.c87 __m256i dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in ff_h264_idct8_addblk_lasx() local
208 dst, dst_stride_3x, dst4, dst5, dst6, dst7); in ff_h264_idct8_addblk_lasx()
212 DUP4_ARG2(__lasx_xvilvl_b, zero, dst4, zero, dst5, zero, dst6, zero, dst7, in ff_h264_idct8_addblk_lasx()
213 dst4, dst5, dst6, dst7); in ff_h264_idct8_addblk_lasx()
215 dst4, 0x20, dst7, dst6, 0x20, dst0, dst1, dst2, dst3); in ff_h264_idct8_addblk_lasx()
266 __m256i dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in ff_h264_idct8_dc_addblk_lasx() local
278 dst, dst_stride_3x, dst4, dst5, dst6, dst7); in ff_h264_idct8_dc_addblk_lasx()
282 DUP4_ARG1(__lasx_vext2xv_hu_bu, dst4, dst5, dst6, dst7, in ff_h264_idct8_dc_addblk_lasx()
283 dst4, dst5, dst6, dst7); in ff_h264_idct8_dc_addblk_lasx()
285 dst4, 0x20, dst7, dst6, 0x20, dst0, dst1, dst2, dst3); in ff_h264_idct8_dc_addblk_lasx()
Dhevc_lpf_sao_lsx.c47 __m128i dst0, dst1, dst2, dst3, dst4, dst5; in ff_hevc_loop_filter_luma_h_8_lsx() local
170 dst4 = __lsx_vadd_h(temp2, q1_src); in ff_hevc_loop_filter_luma_h_8_lsx()
181 DUP2_ARG3(__lsx_vbitsel_v, dst3, q0_src, q_is_pcm_vec, dst4, in ff_hevc_loop_filter_luma_h_8_lsx()
182 q1_src, q_is_pcm_vec, dst3, dst4); in ff_hevc_loop_filter_luma_h_8_lsx()
187 dst2 = __lsx_vpickev_b(dst5, dst4); in ff_hevc_loop_filter_luma_h_8_lsx()
191 dst3, dst4); in ff_hevc_loop_filter_luma_h_8_lsx()
195 DUP2_ARG3(__lsx_vbitsel_v, dst0, dst3, cmp3, dst1, dst4, cmp3, in ff_hevc_loop_filter_luma_h_8_lsx()
266 q1_src, abs_delta0, dst1, dst2, dst3, dst4); in ff_hevc_loop_filter_luma_h_8_lsx()
268 DUP2_ARG2(__lsx_vpickev_b, dst2, dst1, dst4, dst3, dst0, dst1); in ff_hevc_loop_filter_luma_h_8_lsx()
331 dst4 = __lsx_vadd_h(temp2, q1_src); in ff_hevc_loop_filter_luma_h_8_lsx()
[all …]
/third_party/openh264/codec/common/loongarch/
Dmc_horver_lsx.c398 __m128i dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in McHorVer22WidthEq8_lsx() local
434 dst4 = __lsx_vbsrl_v(dst3, 2); in McHorVer22WidthEq8_lsx()
438 LSX_TRANSPOSE8x8_H(dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7, in McHorVer22WidthEq8_lsx()
439 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7); in McHorVer22WidthEq8_lsx()
448 dst4, 0, in McHorVer22WidthEq8_lsx()
460 dst4, in McHorVer22WidthEq8_lsx()
486 __m128i dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7; in McHorVer22WidthEq4_lsx() local
521 LSX_TRANSPOSE8x8_H(dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7, in McHorVer22WidthEq4_lsx()
522 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7); in McHorVer22WidthEq4_lsx()
531 dst4, 0, in McHorVer22WidthEq4_lsx()
[all …]
/third_party/mesa3d/src/gallium/drivers/softpipe/
Dsp_quad_blend.c157 uint *dst4 = (uint *) dst; in logicop_quad() local
184 res4[j] = ~(src4[j] | dst4[j]); in logicop_quad()
188 res4[j] = ~src4[j] & dst4[j]; in logicop_quad()
196 res4[j] = src4[j] & ~dst4[j]; in logicop_quad()
200 res4[j] = ~dst4[j]; in logicop_quad()
204 res4[j] = dst4[j] ^ src4[j]; in logicop_quad()
208 res4[j] = ~(src4[j] & dst4[j]); in logicop_quad()
212 res4[j] = src4[j] & dst4[j]; in logicop_quad()
216 res4[j] = ~(src4[j] ^ dst4[j]); in logicop_quad()
220 res4[j] = dst4[j]; in logicop_quad()
[all …]
/third_party/skia/third_party/externals/dng_sdk/source/
Ddng_image.cpp382 dng_rect dst4 (dng_rect (splitV, in GetRepeat() local
387 if (dst4.NotEmpty ()) in GetRepeat()
392 temp.fArea = dst4 + (srcArea.TL () - in GetRepeat()
396 temp.fData = buffer.DirtyPixel (dst4.t, in GetRepeat()
397 dst4.l, in GetRepeat()
/third_party/ffmpeg/libavcodec/aarch64/
Dvp9mc_16bpp_neon.S123 .macro extmlal dst1, dst2, dst3, dst4, dst5, dst6, dst7, dst8, src1, src2, src3, src4, src5, src6, …
139 smlal2 \dst4\().4s, v21.8h, v0.h[\offset]
411 .macro convolve8 dst1, dst2, dst3, dst4, src1, src2, src3, src4, src5, src6, src7, src8, src9
415 smull2 \dst4\().4s, \src2\().8h, v0.h[0]
419 smlal2 \dst4\().4s, \src3\().8h, v0.h[1]
423 smlal2 \dst4\().4s, \src4\().8h, v0.h[2]
427 smlal2 \dst4\().4s, \src5\().8h, v0.h[3]
431 smlal2 \dst4\().4s, \src6\().8h, v0.h[4]
435 smlal2 \dst4\().4s, \src7\().8h, v0.h[5]
439 smlal2 \dst4\().4s, \src8\().8h, v0.h[6]
[all …]
Dvp9mc_neon.S165 .macro extmla dst1, dst2, dst3, dst4, src1, src2, src3, src4, src5, src6, offset, size
174 mla \dst4\().8h, v23.8h, v0.h[\offset]
185 .macro extmulqadd dst1, dst2, dst3, dst4, src1, src2, src3, src4, src5, src6, offset, size
210 sqadd \dst4\().8h, \dst4\().8h, v23.8h
465 .macro loadl dst1, dst2, dst3, dst4 argument
469 .ifnb \dst4
475 .ifnb \dst4
476 uxtl \dst4\().8h, v4.8b
/third_party/skia/gm/
Dpoly2poly.cpp113 const int dst4[] = { 0, 0, 96, 0, 64, 96, 0, 64 }; in onDraw() local
114 doDraw(canvas, font, &paint, src4, dst4, 4); in onDraw()
/third_party/openh264/codec/encoder/core/loongarch/
Ddct_lasx.c127 __m256i tmp0, tmp1, tmp2, tmp3, dst0, dst1, dst2, dst3, dst4, in WelsDctFourT4_lasx() local
193 dst4, dst5, dst6, dst7); in WelsDctFourT4_lasx()
194 LASX_TRANSPOSE8x8_H(dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7, in WelsDctFourT4_lasx()
195 dst0, dst1, dst2, dst3, dst4, dst5, dst6, dst7); in WelsDctFourT4_lasx()
197 dst4, dst0, 0x20, in WelsDctFourT4_lasx()
/third_party/skia/samplecode/
DSamplePolyToPoly.cpp142 const int dst4[] = { 0, 0, 96, 0, 64, 96, 0, 64 }; in onDrawContent() local
143 doDraw(canvas, &paint, font, src4, dst4, 4); in onDrawContent()
/third_party/ffmpeg/libavcodec/arm/
Dvp9mc_16bpp_neon.S165 @ into dst1 and dst3 (or dst1-dst2 and dst3-dst4 for size >= 8)
166 .macro extmlal dst1, dst2, dst3, dst4, src1, src2, src3, src4, offset, size
173 vmlal_lane \dst4, d31, \offset

123