Lines Matching refs:v16i8
41 src0 = (v16u8)__msa_ld_b((v16i8*)src_argb, 0); in ScaleARGBRowDown2_MSA()
42 src1 = (v16u8)__msa_ld_b((v16i8*)src_argb, 16); in ScaleARGBRowDown2_MSA()
59 src0 = (v16u8)__msa_ld_b((v16i8*)src_argb, 0); in ScaleARGBRowDown2Linear_MSA()
60 src1 = (v16u8)__msa_ld_b((v16i8*)src_argb, 16); in ScaleARGBRowDown2Linear_MSA()
79 v16i8 shuffler = {0, 4, 1, 5, 2, 6, 3, 7, 8, 12, 9, 13, 10, 14, 11, 15}; in ScaleARGBRowDown2Box_MSA()
82 src0 = (v16u8)__msa_ld_b((v16i8*)s, 0); in ScaleARGBRowDown2Box_MSA()
83 src1 = (v16u8)__msa_ld_b((v16i8*)s, 16); in ScaleARGBRowDown2Box_MSA()
84 src2 = (v16u8)__msa_ld_b((v16i8*)t, 0); in ScaleARGBRowDown2Box_MSA()
85 src3 = (v16u8)__msa_ld_b((v16i8*)t, 16); in ScaleARGBRowDown2Box_MSA()
86 vec0 = (v16u8)__msa_vshf_b(shuffler, (v16i8)src0, (v16i8)src0); in ScaleARGBRowDown2Box_MSA()
87 vec1 = (v16u8)__msa_vshf_b(shuffler, (v16i8)src1, (v16i8)src1); in ScaleARGBRowDown2Box_MSA()
88 vec2 = (v16u8)__msa_vshf_b(shuffler, (v16i8)src2, (v16i8)src2); in ScaleARGBRowDown2Box_MSA()
89 vec3 = (v16u8)__msa_vshf_b(shuffler, (v16i8)src3, (v16i8)src3); in ScaleARGBRowDown2Box_MSA()
98 dst0 = (v16u8)__msa_pckev_b((v16i8)reg1, (v16i8)reg0); in ScaleARGBRowDown2Box_MSA()
161 vec0 = (v16u8)__msa_ilvr_b((v16i8)src2, (v16i8)src0); in ScaleARGBRowDownEvenBox_MSA()
162 vec1 = (v16u8)__msa_ilvr_b((v16i8)src3, (v16i8)src1); in ScaleARGBRowDownEvenBox_MSA()
163 vec2 = (v16u8)__msa_ilvl_b((v16i8)src2, (v16i8)src0); in ScaleARGBRowDownEvenBox_MSA()
164 vec3 = (v16u8)__msa_ilvl_b((v16i8)src3, (v16i8)src1); in ScaleARGBRowDownEvenBox_MSA()
177 dst0 = (v16u8)__msa_pckev_b((v16i8)reg5, (v16i8)reg4); in ScaleARGBRowDownEvenBox_MSA()
194 src0 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 0); in ScaleRowDown2_MSA()
195 src1 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 16); in ScaleRowDown2_MSA()
196 src2 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 32); in ScaleRowDown2_MSA()
197 src3 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 48); in ScaleRowDown2_MSA()
198 dst0 = (v16u8)__msa_pckod_b((v16i8)src1, (v16i8)src0); in ScaleRowDown2_MSA()
199 dst1 = (v16u8)__msa_pckod_b((v16i8)src3, (v16i8)src2); in ScaleRowDown2_MSA()
215 src0 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 0); in ScaleRowDown2Linear_MSA()
216 src1 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 16); in ScaleRowDown2Linear_MSA()
217 src2 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 32); in ScaleRowDown2Linear_MSA()
218 src3 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 48); in ScaleRowDown2Linear_MSA()
219 vec0 = (v16u8)__msa_pckev_b((v16i8)src1, (v16i8)src0); in ScaleRowDown2Linear_MSA()
220 vec2 = (v16u8)__msa_pckev_b((v16i8)src3, (v16i8)src2); in ScaleRowDown2Linear_MSA()
221 vec1 = (v16u8)__msa_pckod_b((v16i8)src1, (v16i8)src0); in ScaleRowDown2Linear_MSA()
222 vec3 = (v16u8)__msa_pckod_b((v16i8)src3, (v16i8)src2); in ScaleRowDown2Linear_MSA()
242 src0 = (v16u8)__msa_ld_b((v16i8*)s, 0); in ScaleRowDown2Box_MSA()
243 src1 = (v16u8)__msa_ld_b((v16i8*)s, 16); in ScaleRowDown2Box_MSA()
244 src2 = (v16u8)__msa_ld_b((v16i8*)s, 32); in ScaleRowDown2Box_MSA()
245 src3 = (v16u8)__msa_ld_b((v16i8*)s, 48); in ScaleRowDown2Box_MSA()
246 src4 = (v16u8)__msa_ld_b((v16i8*)t, 0); in ScaleRowDown2Box_MSA()
247 src5 = (v16u8)__msa_ld_b((v16i8*)t, 16); in ScaleRowDown2Box_MSA()
248 src6 = (v16u8)__msa_ld_b((v16i8*)t, 32); in ScaleRowDown2Box_MSA()
249 src7 = (v16u8)__msa_ld_b((v16i8*)t, 48); in ScaleRowDown2Box_MSA()
262 dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0); in ScaleRowDown2Box_MSA()
263 dst1 = (v16u8)__msa_pckev_b((v16i8)vec3, (v16i8)vec2); in ScaleRowDown2Box_MSA()
280 src0 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 0); in ScaleRowDown4_MSA()
281 src1 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 16); in ScaleRowDown4_MSA()
282 src2 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 32); in ScaleRowDown4_MSA()
283 src3 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 48); in ScaleRowDown4_MSA()
284 vec0 = (v16u8)__msa_pckev_b((v16i8)src1, (v16i8)src0); in ScaleRowDown4_MSA()
285 vec1 = (v16u8)__msa_pckev_b((v16i8)src3, (v16i8)src2); in ScaleRowDown4_MSA()
286 dst0 = (v16u8)__msa_pckod_b((v16i8)vec1, (v16i8)vec0); in ScaleRowDown4_MSA()
307 src0 = (v16u8)__msa_ld_b((v16i8*)s, 0); in ScaleRowDown4Box_MSA()
308 src1 = (v16u8)__msa_ld_b((v16i8*)s, 16); in ScaleRowDown4Box_MSA()
309 src2 = (v16u8)__msa_ld_b((v16i8*)s, 32); in ScaleRowDown4Box_MSA()
310 src3 = (v16u8)__msa_ld_b((v16i8*)s, 48); in ScaleRowDown4Box_MSA()
311 src4 = (v16u8)__msa_ld_b((v16i8*)t0, 0); in ScaleRowDown4Box_MSA()
312 src5 = (v16u8)__msa_ld_b((v16i8*)t0, 16); in ScaleRowDown4Box_MSA()
313 src6 = (v16u8)__msa_ld_b((v16i8*)t0, 32); in ScaleRowDown4Box_MSA()
314 src7 = (v16u8)__msa_ld_b((v16i8*)t0, 48); in ScaleRowDown4Box_MSA()
323 src0 = (v16u8)__msa_ld_b((v16i8*)t1, 0); in ScaleRowDown4Box_MSA()
324 src1 = (v16u8)__msa_ld_b((v16i8*)t1, 16); in ScaleRowDown4Box_MSA()
325 src2 = (v16u8)__msa_ld_b((v16i8*)t1, 32); in ScaleRowDown4Box_MSA()
326 src3 = (v16u8)__msa_ld_b((v16i8*)t1, 48); in ScaleRowDown4Box_MSA()
327 src4 = (v16u8)__msa_ld_b((v16i8*)t2, 0); in ScaleRowDown4Box_MSA()
328 src5 = (v16u8)__msa_ld_b((v16i8*)t2, 16); in ScaleRowDown4Box_MSA()
329 src6 = (v16u8)__msa_ld_b((v16i8*)t2, 32); in ScaleRowDown4Box_MSA()
330 src7 = (v16u8)__msa_ld_b((v16i8*)t2, 48); in ScaleRowDown4Box_MSA()
349 dst0 = (v16u8)__msa_pckev_b((v16i8)vec1, (v16i8)vec0); in ScaleRowDown4Box_MSA()
367 v16i8 mask = {0, 3, 6, 8, 11, 14, 16, 19, 22, 24, 27, 30, 0, 0, 0, 0}; in ScaleRowDown38_MSA()
374 src0 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 0); in ScaleRowDown38_MSA()
375 src1 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 16); in ScaleRowDown38_MSA()
376 vec0 = (v16u8)__msa_vshf_b(mask, (v16i8)src1, (v16i8)src0); in ScaleRowDown38_MSA()
400 v16i8 dst_mask = {0, 2, 16, 4, 6, 18, 8, 10, 20, 12, 14, 22, 0, 0, 0, 0}; in ScaleRowDown38_2_Box_MSA()
408 src0 = (v16u8)__msa_ld_b((v16i8*)s, 0); in ScaleRowDown38_2_Box_MSA()
409 src1 = (v16u8)__msa_ld_b((v16i8*)s, 16); in ScaleRowDown38_2_Box_MSA()
410 src2 = (v16u8)__msa_ld_b((v16i8*)t, 0); in ScaleRowDown38_2_Box_MSA()
411 src3 = (v16u8)__msa_ld_b((v16i8*)t, 16); in ScaleRowDown38_2_Box_MSA()
412 vec0 = (v8u16)__msa_ilvr_b((v16i8)src2, (v16i8)src0); in ScaleRowDown38_2_Box_MSA()
413 vec1 = (v8u16)__msa_ilvl_b((v16i8)src2, (v16i8)src0); in ScaleRowDown38_2_Box_MSA()
414 vec2 = (v8u16)__msa_ilvr_b((v16i8)src3, (v16i8)src1); in ScaleRowDown38_2_Box_MSA()
415 vec3 = (v8u16)__msa_ilvl_b((v16i8)src3, (v16i8)src1); in ScaleRowDown38_2_Box_MSA()
444 out = (v16u8)__msa_vshf_b(dst_mask, (v16i8)vec1, (v16i8)vec0); in ScaleRowDown38_2_Box_MSA()
470 v16i8 dst_mask = {0, 2, 16, 4, 6, 18, 8, 10, 20, 12, 14, 22, 0, 0, 0, 0}; in ScaleRowDown38_3_Box_MSA()
478 src0 = (v16u8)__msa_ld_b((v16i8*)s, 0); in ScaleRowDown38_3_Box_MSA()
479 src1 = (v16u8)__msa_ld_b((v16i8*)s, 16); in ScaleRowDown38_3_Box_MSA()
480 src2 = (v16u8)__msa_ld_b((v16i8*)t0, 0); in ScaleRowDown38_3_Box_MSA()
481 src3 = (v16u8)__msa_ld_b((v16i8*)t0, 16); in ScaleRowDown38_3_Box_MSA()
482 src4 = (v16u8)__msa_ld_b((v16i8*)t1, 0); in ScaleRowDown38_3_Box_MSA()
483 src5 = (v16u8)__msa_ld_b((v16i8*)t1, 16); in ScaleRowDown38_3_Box_MSA()
484 vec0 = (v8u16)__msa_ilvr_b((v16i8)src2, (v16i8)src0); in ScaleRowDown38_3_Box_MSA()
485 vec1 = (v8u16)__msa_ilvl_b((v16i8)src2, (v16i8)src0); in ScaleRowDown38_3_Box_MSA()
486 vec2 = (v8u16)__msa_ilvr_b((v16i8)src3, (v16i8)src1); in ScaleRowDown38_3_Box_MSA()
487 vec3 = (v8u16)__msa_ilvl_b((v16i8)src3, (v16i8)src1); in ScaleRowDown38_3_Box_MSA()
488 vec4 = (v8u16)__msa_ilvr_b((v16i8)zero, (v16i8)src4); in ScaleRowDown38_3_Box_MSA()
489 vec5 = (v8u16)__msa_ilvl_b((v16i8)zero, (v16i8)src4); in ScaleRowDown38_3_Box_MSA()
490 vec6 = (v8u16)__msa_ilvr_b((v16i8)zero, (v16i8)src5); in ScaleRowDown38_3_Box_MSA()
491 vec7 = (v8u16)__msa_ilvl_b((v16i8)zero, (v16i8)src5); in ScaleRowDown38_3_Box_MSA()
524 out = (v16u8)__msa_vshf_b(dst_mask, (v16i8)vec1, (v16i8)vec0); in ScaleRowDown38_3_Box_MSA()
540 v16i8 zero = {0}; in ScaleAddRow_MSA()
548 dst0 += (v8u16)__msa_ilvr_b(zero, (v16i8)src0); in ScaleAddRow_MSA()
549 dst1 += (v8u16)__msa_ilvl_b(zero, (v16i8)src0); in ScaleAddRow_MSA()
627 dst0 = (v16u8)__msa_pckev_b((v16i8)reg1, (v16i8)reg0); in ScaleFilterCols_MSA()
691 reg0 = (v16u8)__msa_shf_b((v16i8)reg0, 0); in ScaleARGBFilterCols_MSA()
692 reg1 = (v16u8)__msa_shf_b((v16i8)reg1, 0); in ScaleARGBFilterCols_MSA()
695 mult0 = (v16u8)__msa_ilvr_b((v16i8)reg0, (v16i8)reg2); in ScaleARGBFilterCols_MSA()
696 mult1 = (v16u8)__msa_ilvl_b((v16i8)reg0, (v16i8)reg2); in ScaleARGBFilterCols_MSA()
697 mult2 = (v16u8)__msa_ilvr_b((v16i8)reg1, (v16i8)reg3); in ScaleARGBFilterCols_MSA()
698 mult3 = (v16u8)__msa_ilvl_b((v16i8)reg1, (v16i8)reg3); in ScaleARGBFilterCols_MSA()
705 reg4 = (v16u8)__msa_ilvr_b((v16i8)src2, (v16i8)src0); in ScaleARGBFilterCols_MSA()
706 reg5 = (v16u8)__msa_ilvl_b((v16i8)src2, (v16i8)src0); in ScaleARGBFilterCols_MSA()
707 reg6 = (v16u8)__msa_ilvr_b((v16i8)src3, (v16i8)src1); in ScaleARGBFilterCols_MSA()
708 reg7 = (v16u8)__msa_ilvl_b((v16i8)src3, (v16i8)src1); in ScaleARGBFilterCols_MSA()
717 dst0 = (v16u8)__msa_pckev_b((v16i8)tmp1, (v16i8)tmp0); in ScaleARGBFilterCols_MSA()
718 dst1 = (v16u8)__msa_pckev_b((v16i8)tmp3, (v16i8)tmp2); in ScaleARGBFilterCols_MSA()
733 v16i8 mask0 = {0, 1, 3, 4, 5, 7, 8, 9, 11, 12, 13, 15, 16, 17, 19, 20}; in ScaleRowDown34_MSA()
734 v16i8 mask1 = {5, 7, 8, 9, 11, 12, 13, 15, 16, 17, 19, 20, 21, 23, 24, 25}; in ScaleRowDown34_MSA()
735 v16i8 mask2 = {11, 12, 13, 15, 16, 17, 19, 20, in ScaleRowDown34_MSA()
741 src0 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 0); in ScaleRowDown34_MSA()
742 src1 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 16); in ScaleRowDown34_MSA()
743 src2 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 32); in ScaleRowDown34_MSA()
744 src3 = (v16u8)__msa_ld_b((v16i8*)src_ptr, 48); in ScaleRowDown34_MSA()
745 vec0 = (v16u8)__msa_vshf_b(mask0, (v16i8)src1, (v16i8)src0); in ScaleRowDown34_MSA()
746 vec1 = (v16u8)__msa_vshf_b(mask1, (v16i8)src2, (v16i8)src1); in ScaleRowDown34_MSA()
747 vec2 = (v16u8)__msa_vshf_b(mask2, (v16i8)src3, (v16i8)src2); in ScaleRowDown34_MSA()
748 __msa_st_b((v16i8)vec0, dst, 0); in ScaleRowDown34_MSA()
749 __msa_st_b((v16i8)vec1, dst, 16); in ScaleRowDown34_MSA()
750 __msa_st_b((v16i8)vec2, dst, 32); in ScaleRowDown34_MSA()
771 v16i8 mask0 = {0, 1, 1, 2, 2, 3, 4, 5, 5, 6, 6, 7, 8, 9, 9, 10}; in ScaleRowDown34_0_Box_MSA()
772 v16i8 mask1 = {10, 11, 12, 13, 13, 14, 14, 15, in ScaleRowDown34_0_Box_MSA()
774 v16i8 mask2 = {5, 6, 6, 7, 8, 9, 9, 10, 10, 11, 12, 13, 13, 14, 14, 15}; in ScaleRowDown34_0_Box_MSA()
782 src0 = (v16u8)__msa_ld_b((v16i8*)s, 0); in ScaleRowDown34_0_Box_MSA()
783 src1 = (v16u8)__msa_ld_b((v16i8*)s, 16); in ScaleRowDown34_0_Box_MSA()
784 src2 = (v16u8)__msa_ld_b((v16i8*)s, 32); in ScaleRowDown34_0_Box_MSA()
785 src3 = (v16u8)__msa_ld_b((v16i8*)s, 48); in ScaleRowDown34_0_Box_MSA()
786 src4 = (v16u8)__msa_ld_b((v16i8*)t, 0); in ScaleRowDown34_0_Box_MSA()
787 src5 = (v16u8)__msa_ld_b((v16i8*)t, 16); in ScaleRowDown34_0_Box_MSA()
788 src6 = (v16u8)__msa_ld_b((v16i8*)t, 32); in ScaleRowDown34_0_Box_MSA()
789 src7 = (v16u8)__msa_ld_b((v16i8*)t, 48); in ScaleRowDown34_0_Box_MSA()
790 vec0 = (v16u8)__msa_vshf_b(mask0, (v16i8)src0, (v16i8)src0); in ScaleRowDown34_0_Box_MSA()
791 vec1 = (v16u8)__msa_vshf_b(mask1, (v16i8)src1, (v16i8)src0); in ScaleRowDown34_0_Box_MSA()
792 vec2 = (v16u8)__msa_vshf_b(mask2, (v16i8)src1, (v16i8)src1); in ScaleRowDown34_0_Box_MSA()
793 vec3 = (v16u8)__msa_vshf_b(mask0, (v16i8)src2, (v16i8)src2); in ScaleRowDown34_0_Box_MSA()
794 vec4 = (v16u8)__msa_vshf_b(mask1, (v16i8)src3, (v16i8)src2); in ScaleRowDown34_0_Box_MSA()
795 vec5 = (v16u8)__msa_vshf_b(mask2, (v16i8)src3, (v16i8)src3); in ScaleRowDown34_0_Box_MSA()
796 vec6 = (v16u8)__msa_vshf_b(mask0, (v16i8)src4, (v16i8)src4); in ScaleRowDown34_0_Box_MSA()
797 vec7 = (v16u8)__msa_vshf_b(mask1, (v16i8)src5, (v16i8)src4); in ScaleRowDown34_0_Box_MSA()
798 vec8 = (v16u8)__msa_vshf_b(mask2, (v16i8)src5, (v16i8)src5); in ScaleRowDown34_0_Box_MSA()
799 vec9 = (v16u8)__msa_vshf_b(mask0, (v16i8)src6, (v16i8)src6); in ScaleRowDown34_0_Box_MSA()
800 vec10 = (v16u8)__msa_vshf_b(mask1, (v16i8)src7, (v16i8)src6); in ScaleRowDown34_0_Box_MSA()
801 vec11 = (v16u8)__msa_vshf_b(mask2, (v16i8)src7, (v16i8)src7); in ScaleRowDown34_0_Box_MSA()
838 dst0 = (v16u8)__msa_pckev_b((v16i8)reg1, (v16i8)reg0); in ScaleRowDown34_0_Box_MSA()
839 dst1 = (v16u8)__msa_pckev_b((v16i8)reg3, (v16i8)reg2); in ScaleRowDown34_0_Box_MSA()
840 dst2 = (v16u8)__msa_pckev_b((v16i8)reg5, (v16i8)reg4); in ScaleRowDown34_0_Box_MSA()
841 __msa_st_b((v16i8)dst0, d, 0); in ScaleRowDown34_0_Box_MSA()
842 __msa_st_b((v16i8)dst1, d, 16); in ScaleRowDown34_0_Box_MSA()
843 __msa_st_b((v16i8)dst2, d, 32); in ScaleRowDown34_0_Box_MSA()
865 v16i8 mask0 = {0, 1, 1, 2, 2, 3, 4, 5, 5, 6, 6, 7, 8, 9, 9, 10}; in ScaleRowDown34_1_Box_MSA()
866 v16i8 mask1 = {10, 11, 12, 13, 13, 14, 14, 15, in ScaleRowDown34_1_Box_MSA()
868 v16i8 mask2 = {5, 6, 6, 7, 8, 9, 9, 10, 10, 11, 12, 13, 13, 14, 14, 15}; in ScaleRowDown34_1_Box_MSA()
876 src0 = (v16u8)__msa_ld_b((v16i8*)s, 0); in ScaleRowDown34_1_Box_MSA()
877 src1 = (v16u8)__msa_ld_b((v16i8*)s, 16); in ScaleRowDown34_1_Box_MSA()
878 src2 = (v16u8)__msa_ld_b((v16i8*)s, 32); in ScaleRowDown34_1_Box_MSA()
879 src3 = (v16u8)__msa_ld_b((v16i8*)s, 48); in ScaleRowDown34_1_Box_MSA()
880 src4 = (v16u8)__msa_ld_b((v16i8*)t, 0); in ScaleRowDown34_1_Box_MSA()
881 src5 = (v16u8)__msa_ld_b((v16i8*)t, 16); in ScaleRowDown34_1_Box_MSA()
882 src6 = (v16u8)__msa_ld_b((v16i8*)t, 32); in ScaleRowDown34_1_Box_MSA()
883 src7 = (v16u8)__msa_ld_b((v16i8*)t, 48); in ScaleRowDown34_1_Box_MSA()
884 vec0 = (v16u8)__msa_vshf_b(mask0, (v16i8)src0, (v16i8)src0); in ScaleRowDown34_1_Box_MSA()
885 vec1 = (v16u8)__msa_vshf_b(mask1, (v16i8)src1, (v16i8)src0); in ScaleRowDown34_1_Box_MSA()
886 vec2 = (v16u8)__msa_vshf_b(mask2, (v16i8)src1, (v16i8)src1); in ScaleRowDown34_1_Box_MSA()
887 vec3 = (v16u8)__msa_vshf_b(mask0, (v16i8)src2, (v16i8)src2); in ScaleRowDown34_1_Box_MSA()
888 vec4 = (v16u8)__msa_vshf_b(mask1, (v16i8)src3, (v16i8)src2); in ScaleRowDown34_1_Box_MSA()
889 vec5 = (v16u8)__msa_vshf_b(mask2, (v16i8)src3, (v16i8)src3); in ScaleRowDown34_1_Box_MSA()
890 vec6 = (v16u8)__msa_vshf_b(mask0, (v16i8)src4, (v16i8)src4); in ScaleRowDown34_1_Box_MSA()
891 vec7 = (v16u8)__msa_vshf_b(mask1, (v16i8)src5, (v16i8)src4); in ScaleRowDown34_1_Box_MSA()
892 vec8 = (v16u8)__msa_vshf_b(mask2, (v16i8)src5, (v16i8)src5); in ScaleRowDown34_1_Box_MSA()
893 vec9 = (v16u8)__msa_vshf_b(mask0, (v16i8)src6, (v16i8)src6); in ScaleRowDown34_1_Box_MSA()
894 vec10 = (v16u8)__msa_vshf_b(mask1, (v16i8)src7, (v16i8)src6); in ScaleRowDown34_1_Box_MSA()
895 vec11 = (v16u8)__msa_vshf_b(mask2, (v16i8)src7, (v16i8)src7); in ScaleRowDown34_1_Box_MSA()
932 dst0 = (v16u8)__msa_pckev_b((v16i8)reg1, (v16i8)reg0); in ScaleRowDown34_1_Box_MSA()
933 dst1 = (v16u8)__msa_pckev_b((v16i8)reg3, (v16i8)reg2); in ScaleRowDown34_1_Box_MSA()
934 dst2 = (v16u8)__msa_pckev_b((v16i8)reg5, (v16i8)reg4); in ScaleRowDown34_1_Box_MSA()
935 __msa_st_b((v16i8)dst0, d, 0); in ScaleRowDown34_1_Box_MSA()
936 __msa_st_b((v16i8)dst1, d, 16); in ScaleRowDown34_1_Box_MSA()
937 __msa_st_b((v16i8)dst2, d, 32); in ScaleRowDown34_1_Box_MSA()