Home
last modified time | relevance | path

Searched refs:vmlal_u8 (Results 1 – 25 of 47) sorted by relevance

12

/external/libvpx/libvpx/vp8/common/arm/neon/
Dbilinearpredict_neon.c88 d0 = vmlal_u8(d0, vreinterpret_u8_u32(c2.val[0]), filter1); in vp8_bilinear_predict4x4_neon()
89 d1 = vmlal_u8(d1, vreinterpret_u8_u32(c3.val[0]), filter1); in vp8_bilinear_predict4x4_neon()
90 d2 = vmlal_u8(d2, b4, filter1); in vp8_bilinear_predict4x4_neon()
111 b0 = vmlal_u8(b0, a0, filter1); in vp8_bilinear_predict4x4_neon()
112 b1 = vmlal_u8(b1, a1, filter1); in vp8_bilinear_predict4x4_neon()
167 q6u16 = vmlal_u8(q6u16, d3u8, d1u8); in vp8_bilinear_predict8x4_neon()
168 q7u16 = vmlal_u8(q7u16, d5u8, d1u8); in vp8_bilinear_predict8x4_neon()
169 q8u16 = vmlal_u8(q8u16, d7u8, d1u8); in vp8_bilinear_predict8x4_neon()
170 q9u16 = vmlal_u8(q9u16, d9u8, d1u8); in vp8_bilinear_predict8x4_neon()
171 q10u16 = vmlal_u8(q10u16, d11u8, d1u8); in vp8_bilinear_predict8x4_neon()
[all …]
Dsixtappredict_neon.c54 *c = vmlal_u8(*c, vreinterpret_u8_u32(a_shuf.val[0]), filter); in filter_add_accumulate()
55 *d = vmlal_u8(*d, vreinterpret_u8_u32(b_shuf.val[0]), filter); in filter_add_accumulate()
142 c0 = vmlal_u8(c0, b2, filter2); in yonly4x4()
143 c1 = vmlal_u8(c1, b4, filter2); in yonly4x4()
144 c2 = vmlal_u8(c2, b3, filter3); in yonly4x4()
145 c3 = vmlal_u8(c3, b5, filter3); in yonly4x4()
311 d89 = vmlal_u8(d89, vget_low_u8(s4), filter0); in vp8_sixtap_predict4x4_neon()
329 d89 = vmlal_u8(d89, s4_f2, filter2); in vp8_sixtap_predict4x4_neon()
376 c0 = vmlal_u8(c0, b2, filter2); in vp8_sixtap_predict4x4_neon()
377 c1 = vmlal_u8(c1, b4, filter2); in vp8_sixtap_predict4x4_neon()
[all …]
/external/libhevc/common/arm/
Dihevc_inter_pred_filters_luma_vert_w16inp.s155 vmlal.s16 q4,d0,d22 @mul_res1 = vmlal_u8(mul_res1, src_tmp1, coeffabs_0)@
157 vmlal.s16 q4,d2,d24 @mul_res1 = vmlal_u8(mul_res1, src_tmp3, coeffabs_2)@
159 vmlal.s16 q4,d3,d25 @mul_res1 = vmlal_u8(mul_res1, src_tmp4, coeffabs_3)@
161 vmlal.s16 q4,d4,d26 @mul_res1 = vmlal_u8(mul_res1, src_tmp1, coeffabs_4)@
163 vmlal.s16 q4,d5,d27 @mul_res1 = vmlal_u8(mul_res1, src_tmp2, coeffabs_5)@
164 vmlal.s16 q4,d6,d28 @mul_res1 = vmlal_u8(mul_res1, src_tmp3, coeffabs_6)@
165 vmlal.s16 q4,d7,d29 @mul_res1 = vmlal_u8(mul_res1, src_tmp4, coeffabs_7)@
171 vmlal.s16 q5,d1,d22 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_0)@
173 vmlal.s16 q5,d3,d24 @mul_res2 = vmlal_u8(mul_res2, src_tmp4, coeffabs_2)@
175 vmlal.s16 q5,d4,d25 @mul_res2 = vmlal_u8(mul_res2, src_tmp1, coeffabs_3)@
[all …]
Dihevc_inter_pred_luma_vert_w16inp_w16out.s165 vmlal.s16 q4,d0,d22 @mul_res1 = vmlal_u8(mul_res1, src_tmp1, coeffabs_0)@
167 vmlal.s16 q4,d2,d24 @mul_res1 = vmlal_u8(mul_res1, src_tmp3, coeffabs_2)@
169 vmlal.s16 q4,d3,d25 @mul_res1 = vmlal_u8(mul_res1, src_tmp4, coeffabs_3)@
171 vmlal.s16 q4,d4,d26 @mul_res1 = vmlal_u8(mul_res1, src_tmp1, coeffabs_4)@
173 vmlal.s16 q4,d5,d27 @mul_res1 = vmlal_u8(mul_res1, src_tmp2, coeffabs_5)@
174 vmlal.s16 q4,d6,d28 @mul_res1 = vmlal_u8(mul_res1, src_tmp3, coeffabs_6)@
175 vmlal.s16 q4,d7,d29 @mul_res1 = vmlal_u8(mul_res1, src_tmp4, coeffabs_7)@
181 vmlal.s16 q5,d1,d22 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_0)@
183 vmlal.s16 q5,d3,d24 @mul_res2 = vmlal_u8(mul_res2, src_tmp4, coeffabs_2)@
185 vmlal.s16 q5,d4,d25 @mul_res2 = vmlal_u8(mul_res2, src_tmp1, coeffabs_3)@
[all …]
Dihevc_inter_pred_filters_luma_vert.s170 vmlal.u8 q4,d3,d25 @mul_res1 = vmlal_u8(mul_res1, src_tmp4, coeffabs_3)@
172 vmlal.u8 q4,d4,d26 @mul_res1 = vmlal_u8(mul_res1, src_tmp1, coeffabs_4)@
176 vmlal.u8 q4,d6,d28 @mul_res1 = vmlal_u8(mul_res1, src_tmp3, coeffabs_6)@
191 vmlal.u8 q5,d4,d25 @mul_res2 = vmlal_u8(mul_res2, src_tmp1, coeffabs_3)@
193 vmlal.u8 q5,d5,d26 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_4)@
198 vmlal.u8 q5,d7,d28 @mul_res2 = vmlal_u8(mul_res2, src_tmp4, coeffabs_6)@
256 vmlal.u8 q4,d3,d25 @mul_res1 = vmlal_u8(mul_res1, src_tmp4, coeffabs_3)@
259 vmlal.u8 q4,d4,d26 @mul_res1 = vmlal_u8(mul_res1, src_tmp1, coeffabs_4)@
264 vmlal.u8 q4,d6,d28 @mul_res1 = vmlal_u8(mul_res1, src_tmp3, coeffabs_6)@
281 vmlal.u8 q5,d4,d25 @mul_res2 = vmlal_u8(mul_res2, src_tmp1, coeffabs_3)@
[all …]
/external/libjpeg-turbo/simd/arm/
Djdsample-neon.c88 vmlal_u8(vmovl_u8(vget_low_u8(s1)), vget_low_u8(s0), three_u8); in jsimd_h2v1_fancy_upsample_neon()
90 vmlal_u8(vmovl_u8(vget_high_u8(s1)), vget_high_u8(s0), three_u8); in jsimd_h2v1_fancy_upsample_neon()
92 vmlal_u8(vmovl_u8(vget_low_u8(s0)), vget_low_u8(s1), three_u8); in jsimd_h2v1_fancy_upsample_neon()
94 vmlal_u8(vmovl_u8(vget_high_u8(s0)), vget_high_u8(s1), three_u8); in jsimd_h2v1_fancy_upsample_neon()
127 vmlal_u8(vmovl_u8(vget_low_u8(s1)), vget_low_u8(s0), three_u8); in jsimd_h2v1_fancy_upsample_neon()
129 vmlal_u8(vmovl_u8(vget_high_u8(s1)), vget_high_u8(s0), three_u8); in jsimd_h2v1_fancy_upsample_neon()
131 vmlal_u8(vmovl_u8(vget_low_u8(s0)), vget_low_u8(s1), three_u8); in jsimd_h2v1_fancy_upsample_neon()
133 vmlal_u8(vmovl_u8(vget_high_u8(s0)), vget_high_u8(s1), three_u8); in jsimd_h2v1_fancy_upsample_neon()
247 uint16x8_t s0colsum0_l = vmlal_u8(vmovl_u8(vget_low_u8(s0A)), in jsimd_h2v2_fancy_upsample_neon()
249 uint16x8_t s0colsum0_h = vmlal_u8(vmovl_u8(vget_high_u8(s0A)), in jsimd_h2v2_fancy_upsample_neon()
[all …]
/external/libaom/libaom/av1/common/arm/
Dblend_a64_hmask_neon.c53 vmlal_u8(res_low, vget_low_u8(max_minus_m_q), vget_low_u8(tmp1_q)); in aom_blend_a64_hmask_neon()
55 res_high = vmlal_u8(res_high, vget_high_u8(max_minus_m_q), in aom_blend_a64_hmask_neon()
79 res = vmlal_u8(res, max_minus_m, tmp1); in aom_blend_a64_hmask_neon()
98 res = vmlal_u8(res, max_minus_m, tmp1); in aom_blend_a64_hmask_neon()
122 res = vmlal_u8(res, max_minus_m, tmp1); in aom_blend_a64_hmask_neon()
Dblend_a64_vmask_neon.c50 res_low = vmlal_u8(res_low, max_minus_m, vget_low_u8(tmp1_q)); in aom_blend_a64_vmask_neon()
52 res_high = vmlal_u8(res_high, max_minus_m, vget_high_u8(tmp1_q)); in aom_blend_a64_vmask_neon()
73 res = vmlal_u8(res, max_minus_m, tmp1); in aom_blend_a64_vmask_neon()
97 res = vmlal_u8(res, max_minus_m, tmp1); in aom_blend_a64_vmask_neon()
129 res = vmlal_u8(res, max_minus_m, tmp1); in aom_blend_a64_vmask_neon()
/external/libvpx/config/arm-neon/vpx_dsp/arm/
Dvpx_convolve8_avg_vert_filter_type2_neon.asm.S98 vmlal.u8 q4, d3, d25 @mul_res1 = vmlal_u8(mul_res1,
101 vmlal.u8 q4, d4, d26 @mul_res1 = vmlal_u8(mul_res1,
107 vmlal.u8 q4, d6, d28 @mul_res1 = vmlal_u8(mul_res1,
123 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2,
129 vmlal.u8 q5, d5, d26 @mul_res2 = vmlal_u8(mul_res2,
135 vmlal.u8 q5, d7, d28 @mul_res2 = vmlal_u8(mul_res2,
196 vmlal.u8 q4, d3, d25 @mul_res1 = vmlal_u8(mul_res1,
202 vmlal.u8 q4, d4, d26 @mul_res1 = vmlal_u8(mul_res1,
207 vmlal.u8 q4, d6, d28 @mul_res1 = vmlal_u8(mul_res1,
224 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2,
[all …]
Dvpx_convolve8_avg_vert_filter_type1_neon.asm.S97 vmlal.u8 q4, d3, d25 @mul_res1 = vmlal_u8(mul_res1,
100 vmlal.u8 q4, d4, d26 @mul_res1 = vmlal_u8(mul_res1,
106 vmlsl.u8 q4, d6, d28 @mul_res1 = vmlal_u8(mul_res1,
122 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2,
128 vmlal.u8 q5, d5, d26 @mul_res2 = vmlal_u8(mul_res2,
134 vmlsl.u8 q5, d7, d28 @mul_res2 = vmlal_u8(mul_res2,
195 vmlal.u8 q4, d3, d25 @mul_res1 = vmlal_u8(mul_res1,
201 vmlal.u8 q4, d4, d26 @mul_res1 = vmlal_u8(mul_res1,
206 vmlsl.u8 q4, d6, d28 @mul_res1 = vmlal_u8(mul_res1,
223 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2,
[all …]
Dvpx_convolve8_vert_filter_type1_neon.asm.S98 vmlal.u8 q4, d3, d25 @mul_res1 = vmlal_u8(mul_res1,
101 vmlal.u8 q4, d4, d26 @mul_res1 = vmlal_u8(mul_res1,
107 vmlsl.u8 q4, d6, d28 @mul_res1 = vmlal_u8(mul_res1,
123 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2,
128 vmlal.u8 q5, d5, d26 @mul_res2 = vmlal_u8(mul_res2,
134 vmlsl.u8 q5, d7, d28 @mul_res2 = vmlal_u8(mul_res2,
191 vmlal.u8 q4, d3, d25 @mul_res1 = vmlal_u8(mul_res1,
196 vmlal.u8 q4, d4, d26 @mul_res1 = vmlal_u8(mul_res1,
201 vmlsl.u8 q4, d6, d28 @mul_res1 = vmlal_u8(mul_res1,
216 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2,
[all …]
Dvpx_convolve8_vert_filter_type2_neon.asm.S98 vmlal.u8 q4, d3, d25 @mul_res1 = vmlal_u8(mul_res1,
101 vmlal.u8 q4, d4, d26 @mul_res1 = vmlal_u8(mul_res1,
107 vmlal.u8 q4, d6, d28 @mul_res1 = vmlal_u8(mul_res1,
123 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2,
128 vmlal.u8 q5, d5, d26 @mul_res2 = vmlal_u8(mul_res2,
134 vmlal.u8 q5, d7, d28 @mul_res2 = vmlal_u8(mul_res2,
192 vmlal.u8 q4, d3, d25 @mul_res1 = vmlal_u8(mul_res1,
197 vmlal.u8 q4, d4, d26 @mul_res1 = vmlal_u8(mul_res1,
202 vmlal.u8 q4, d6, d28 @mul_res1 = vmlal_u8(mul_res1,
217 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2,
[all …]
Dvpx_convolve8_horiz_filter_type2_neon.asm.S113 vmlal.u8 q4, d1, d25 @mul_res = vmlal_u8(src[0_1],
125 vmlal.u8 q4, d4, d28 @mul_res = vmlal_u8(src[0_4],
131 vmlal.u8 q4, d6, d30 @mul_res = vmlal_u8(src[0_6],
145 vmlal.u8 q5, d16, d28 @mul_res = vmlal_u8(src[0_4],
150 vmlal.u8 q5, d18, d30 @mul_res = vmlal_u8(src[0_6],
158 vmlal.u8 q5, d13, d25 @mul_res = vmlal_u8(src[0_1],
202 vmlal.u8 q4, d2, d25 @mul_res = vmlal_u8(src[0_1],
211 vmlal.u8 q4, d12, d28 @mul_res = vmlal_u8(src[0_4],
216 vmlal.u8 q4, d16, d30 @mul_res = vmlal_u8(src[0_6],
246 vmlal.u8 q5, d2, d25 @mul_res = vmlal_u8(src[0_1],
[all …]
Dvpx_convolve8_avg_horiz_filter_type1_neon.asm.S113 vmlsl.u8 q4, d1, d25 @mul_res = vmlal_u8(src[0_1],
125 vmlal.u8 q4, d4, d28 @mul_res = vmlal_u8(src[0_4],
131 vmlsl.u8 q4, d6, d30 @mul_res = vmlal_u8(src[0_6],
145 vmlal.u8 q5, d16, d28 @mul_res = vmlal_u8(src[0_4],
153 vmlsl.u8 q5, d18, d30 @mul_res = vmlal_u8(src[0_6],
161 vmlsl.u8 q5, d13, d25 @mul_res = vmlal_u8(src[0_1],
206 vmlsl.u8 q4, d2, d25 @mul_res = vmlal_u8(src[0_1],
215 vmlal.u8 q4, d12, d28 @mul_res = vmlal_u8(src[0_4],
220 vmlsl.u8 q4, d16, d30 @mul_res = vmlal_u8(src[0_6],
251 vmlsl.u8 q5, d2, d25 @mul_res = vmlal_u8(src[0_1],
[all …]
Dvpx_convolve8_horiz_filter_type1_neon.asm.S113 vmlsl.u8 q4, d1, d25 @mul_res = vmlal_u8(src[0_1],
125 vmlal.u8 q4, d4, d28 @mul_res = vmlal_u8(src[0_4],
131 vmlsl.u8 q4, d6, d30 @mul_res = vmlal_u8(src[0_6],
145 vmlal.u8 q5, d16, d28 @mul_res = vmlal_u8(src[0_4],
150 vmlsl.u8 q5, d18, d30 @mul_res = vmlal_u8(src[0_6],
158 vmlsl.u8 q5, d13, d25 @mul_res = vmlal_u8(src[0_1],
202 vmlsl.u8 q4, d2, d25 @mul_res = vmlal_u8(src[0_1],
211 vmlal.u8 q4, d12, d28 @mul_res = vmlal_u8(src[0_4],
216 vmlsl.u8 q4, d16, d30 @mul_res = vmlal_u8(src[0_6],
246 vmlsl.u8 q5, d2, d25 @mul_res = vmlal_u8(src[0_1],
[all …]
Dvpx_convolve8_avg_horiz_filter_type2_neon.asm.S114 vmlal.u8 q4, d1, d25 @mul_res = vmlal_u8(src[0_1],
126 vmlal.u8 q4, d4, d28 @mul_res = vmlal_u8(src[0_4],
132 vmlal.u8 q4, d6, d30 @mul_res = vmlal_u8(src[0_6],
146 vmlal.u8 q5, d16, d28 @mul_res = vmlal_u8(src[0_4],
154 vmlal.u8 q5, d18, d30 @mul_res = vmlal_u8(src[0_6],
162 vmlal.u8 q5, d13, d25 @mul_res = vmlal_u8(src[0_1],
207 vmlal.u8 q4, d2, d25 @mul_res = vmlal_u8(src[0_1],
216 vmlal.u8 q4, d12, d28 @mul_res = vmlal_u8(src[0_4],
221 vmlal.u8 q4, d16, d30 @mul_res = vmlal_u8(src[0_6],
252 vmlal.u8 q5, d2, d25 @mul_res = vmlal_u8(src[0_1],
[all …]
/external/libvpx/libvpx/vpx_dsp/arm/
Dvpx_convolve8_avg_vert_filter_type1_neon.asm91 vmlal.u8 q4, d3, d25 ;mul_res1 = vmlal_u8(mul_res1,
94 vmlal.u8 q4, d4, d26 ;mul_res1 = vmlal_u8(mul_res1,
100 vmlsl.u8 q4, d6, d28 ;mul_res1 = vmlal_u8(mul_res1,
116 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2,
122 vmlal.u8 q5, d5, d26 ;mul_res2 = vmlal_u8(mul_res2,
128 vmlsl.u8 q5, d7, d28 ;mul_res2 = vmlal_u8(mul_res2,
189 vmlal.u8 q4, d3, d25 ;mul_res1 = vmlal_u8(mul_res1,
195 vmlal.u8 q4, d4, d26 ;mul_res1 = vmlal_u8(mul_res1,
200 vmlsl.u8 q4, d6, d28 ;mul_res1 = vmlal_u8(mul_res1,
217 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2,
[all …]
Dvpx_convolve8_avg_vert_filter_type2_neon.asm92 vmlal.u8 q4, d3, d25 ;mul_res1 = vmlal_u8(mul_res1,
95 vmlal.u8 q4, d4, d26 ;mul_res1 = vmlal_u8(mul_res1,
101 vmlal.u8 q4, d6, d28 ;mul_res1 = vmlal_u8(mul_res1,
117 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2,
123 vmlal.u8 q5, d5, d26 ;mul_res2 = vmlal_u8(mul_res2,
129 vmlal.u8 q5, d7, d28 ;mul_res2 = vmlal_u8(mul_res2,
190 vmlal.u8 q4, d3, d25 ;mul_res1 = vmlal_u8(mul_res1,
196 vmlal.u8 q4, d4, d26 ;mul_res1 = vmlal_u8(mul_res1,
201 vmlal.u8 q4, d6, d28 ;mul_res1 = vmlal_u8(mul_res1,
218 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2,
[all …]
Dvpx_convolve8_vert_filter_type1_neon.asm92 vmlal.u8 q4, d3, d25 ;mul_res1 = vmlal_u8(mul_res1,
95 vmlal.u8 q4, d4, d26 ;mul_res1 = vmlal_u8(mul_res1,
101 vmlsl.u8 q4, d6, d28 ;mul_res1 = vmlal_u8(mul_res1,
117 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2,
122 vmlal.u8 q5, d5, d26 ;mul_res2 = vmlal_u8(mul_res2,
128 vmlsl.u8 q5, d7, d28 ;mul_res2 = vmlal_u8(mul_res2,
185 vmlal.u8 q4, d3, d25 ;mul_res1 = vmlal_u8(mul_res1,
190 vmlal.u8 q4, d4, d26 ;mul_res1 = vmlal_u8(mul_res1,
195 vmlsl.u8 q4, d6, d28 ;mul_res1 = vmlal_u8(mul_res1,
210 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2,
[all …]
Dvpx_convolve8_vert_filter_type2_neon.asm92 vmlal.u8 q4, d3, d25 ;mul_res1 = vmlal_u8(mul_res1,
95 vmlal.u8 q4, d4, d26 ;mul_res1 = vmlal_u8(mul_res1,
101 vmlal.u8 q4, d6, d28 ;mul_res1 = vmlal_u8(mul_res1,
117 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2,
122 vmlal.u8 q5, d5, d26 ;mul_res2 = vmlal_u8(mul_res2,
128 vmlal.u8 q5, d7, d28 ;mul_res2 = vmlal_u8(mul_res2,
186 vmlal.u8 q4, d3, d25 ;mul_res1 = vmlal_u8(mul_res1,
191 vmlal.u8 q4, d4, d26 ;mul_res1 = vmlal_u8(mul_res1,
196 vmlal.u8 q4, d6, d28 ;mul_res1 = vmlal_u8(mul_res1,
211 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2,
[all …]
Dvpx_convolve8_horiz_filter_type2_neon.asm107 vmlal.u8 q4, d1, d25 ;mul_res = vmlal_u8(src[0_1],
119 vmlal.u8 q4, d4, d28 ;mul_res = vmlal_u8(src[0_4],
125 vmlal.u8 q4, d6, d30 ;mul_res = vmlal_u8(src[0_6],
139 vmlal.u8 q5, d16, d28 ;mul_res = vmlal_u8(src[0_4],
144 vmlal.u8 q5, d18, d30 ;mul_res = vmlal_u8(src[0_6],
152 vmlal.u8 q5, d13, d25 ;mul_res = vmlal_u8(src[0_1],
196 vmlal.u8 q4, d2, d25 ;mul_res = vmlal_u8(src[0_1],
205 vmlal.u8 q4, d12, d28 ;mul_res = vmlal_u8(src[0_4],
210 vmlal.u8 q4, d16, d30 ;mul_res = vmlal_u8(src[0_6],
240 vmlal.u8 q5, d2, d25 ;mul_res = vmlal_u8(src[0_1],
[all …]
Dvpx_convolve8_horiz_filter_type1_neon.asm107 vmlsl.u8 q4, d1, d25 ;mul_res = vmlal_u8(src[0_1],
119 vmlal.u8 q4, d4, d28 ;mul_res = vmlal_u8(src[0_4],
125 vmlsl.u8 q4, d6, d30 ;mul_res = vmlal_u8(src[0_6],
139 vmlal.u8 q5, d16, d28 ;mul_res = vmlal_u8(src[0_4],
144 vmlsl.u8 q5, d18, d30 ;mul_res = vmlal_u8(src[0_6],
152 vmlsl.u8 q5, d13, d25 ;mul_res = vmlal_u8(src[0_1],
196 vmlsl.u8 q4, d2, d25 ;mul_res = vmlal_u8(src[0_1],
205 vmlal.u8 q4, d12, d28 ;mul_res = vmlal_u8(src[0_4],
210 vmlsl.u8 q4, d16, d30 ;mul_res = vmlal_u8(src[0_6],
240 vmlsl.u8 q5, d2, d25 ;mul_res = vmlal_u8(src[0_1],
[all …]
Dvpx_convolve8_avg_horiz_filter_type2_neon.asm108 vmlal.u8 q4, d1, d25 ;mul_res = vmlal_u8(src[0_1],
120 vmlal.u8 q4, d4, d28 ;mul_res = vmlal_u8(src[0_4],
126 vmlal.u8 q4, d6, d30 ;mul_res = vmlal_u8(src[0_6],
140 vmlal.u8 q5, d16, d28 ;mul_res = vmlal_u8(src[0_4],
148 vmlal.u8 q5, d18, d30 ;mul_res = vmlal_u8(src[0_6],
156 vmlal.u8 q5, d13, d25 ;mul_res = vmlal_u8(src[0_1],
201 vmlal.u8 q4, d2, d25 ;mul_res = vmlal_u8(src[0_1],
210 vmlal.u8 q4, d12, d28 ;mul_res = vmlal_u8(src[0_4],
215 vmlal.u8 q4, d16, d30 ;mul_res = vmlal_u8(src[0_6],
246 vmlal.u8 q5, d2, d25 ;mul_res = vmlal_u8(src[0_1],
[all …]
/external/rust/crates/libz-sys/src/zlib-ng/arch/arm/
Dadler32_neon.c37 sum2 = vmlal_u8(sum2, vget_high_u8(t0), vget_high_u8(d0)); in NEON_accum32()
38 sum2 = vmlal_u8(sum2, vget_low_u8(t1), vget_low_u8(d1)); in NEON_accum32()
39 sum2 = vmlal_u8(sum2, vget_high_u8(t1), vget_high_u8(d1)); in NEON_accum32()
52 sum2 = vmlal_u8(sum2, vget_high_u8(t1), vget_high_u8(d0)); in NEON_accum32()
/external/libgav1/libgav1/src/dsp/arm/
Dobmc_neon.cc46 vrshrn_n_u16(vmlal_u8(weighted_pred, obmc_pred_mask, obmc_pred_val), 6); in WriteObmcLine4()
83 vrshrn_n_u16(vmlal_u8(weighted_pred, obmc_pred_mask, obmc_pred_val), 6); in OverlapBlend2xH_NEON()
132 vrshrn_n_u16(vmlal_u8(weighted_pred, obmc_pred_mask, obmc_pred_val), 6); in OverlapBlendFromLeft8xH_NEON()
179 vrshrn_n_u16(vmlal_u8(weighted_pred_lo, vget_low_u8(obmc_pred_mask), in OverlapBlendFromLeft_NEON()
185 vrshrn_n_u16(vmlal_u8(weighted_pred_hi, vget_high_u8(obmc_pred_mask), in OverlapBlendFromLeft_NEON()
302 vrshrn_n_u16(vmlal_u8(weighted_pred, obmc_pred_mask, obmc_pred_val), 6); in OverlapBlendFromTop8xH_NEON()
351 vrshrn_n_u16(vmlal_u8(weighted_pred_lo, obmc_pred_mask, in OverlapBlendFromTop_NEON()
357 vrshrn_n_u16(vmlal_u8(weighted_pred_hi, obmc_pred_mask, in OverlapBlendFromTop_NEON()

12