/external/libvpx/libvpx/vp8/common/arm/neon/ |
D | sixtappredict_neon.c | 65 *c = vmlsl_u8(*c, vreinterpret_u8_u32(a_shuf.val[0]), filter); in filter_sub_accumulate() 66 *d = vmlsl_u8(*d, vreinterpret_u8_u32(b_shuf.val[0]), filter); in filter_sub_accumulate() 132 c0 = vmlsl_u8(c0, b4, filter4); in yonly4x4() 133 c1 = vmlsl_u8(c1, b6, filter4); in yonly4x4() 134 c2 = vmlsl_u8(c2, b1, filter1); in yonly4x4() 135 c3 = vmlsl_u8(c3, b3, filter1); in yonly4x4() 317 d89 = vmlsl_u8(d89, s4_f1, filter1); in vp8_sixtap_predict4x4_neon() 323 d89 = vmlsl_u8(d89, s4_f4, filter4); in vp8_sixtap_predict4x4_neon() 371 c0 = vmlsl_u8(c0, b4, filter4); in vp8_sixtap_predict4x4_neon() 372 c1 = vmlsl_u8(c1, b6, filter4); in vp8_sixtap_predict4x4_neon() [all …]
|
/external/libhevc/common/arm/ |
D | ihevc_inter_pred_chroma_horz.s | 177 vmlsl.u8 q15,d0,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 179 vmlal.u8 q15,d4,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 217 vmlsl.u8 q11,d8,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 228 vmlal.u8 q11,d12,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 240 vmlsl.u8 q10,d9,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 243 vmlal.u8 q10,d13,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 253 vmlsl.u8 q15,d0,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 256 vmlal.u8 q15,d4,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 302 vmlsl.u8 q11,d8,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 304 vmlal.u8 q11,d12,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ [all …]
|
D | ihevc_inter_pred_filters_luma_vert.s | 166 vmlsl.u8 q4,d0,d22 @mul_res1 = vmlsl_u8(mul_res1, src_tmp1, coeffabs_0)@ 168 vmlsl.u8 q4,d2,d24 @mul_res1 = vmlsl_u8(mul_res1, src_tmp3, coeffabs_2)@ 174 vmlsl.u8 q4,d5,d27 @mul_res1 = vmlsl_u8(mul_res1, src_tmp2, coeffabs_5)@ 178 vmlsl.u8 q4,d7,d29 @mul_res1 = vmlsl_u8(mul_res1, src_tmp4, coeffabs_7)@ 185 vmlsl.u8 q5,d1,d22 @mul_res2 = vmlsl_u8(mul_res2, src_tmp2, coeffabs_0)@ 188 vmlsl.u8 q5,d3,d24 @mul_res2 = vmlsl_u8(mul_res2, src_tmp4, coeffabs_2)@ 195 vmlsl.u8 q5,d6,d27 @mul_res2 = vmlsl_u8(mul_res2, src_tmp3, coeffabs_5)@ 201 vmlsl.u8 q5,d16,d29 @mul_res2 = vmlsl_u8(mul_res2, src_tmp1, coeffabs_7)@ 250 vmlsl.u8 q4,d0,d22 @mul_res1 = vmlsl_u8(mul_res1, src_tmp1, coeffabs_0)@ 253 vmlsl.u8 q4,d2,d24 @mul_res1 = vmlsl_u8(mul_res1, src_tmp3, coeffabs_2)@ [all …]
|
D | ihevc_inter_pred_chroma_horz_w16out.s | 196 vmlsl.u8 q15,d0,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 198 vmlal.u8 q15,d4,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 236 vmlsl.u8 q11,d8,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 243 vmlal.u8 q11,d12,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 256 vmlsl.u8 q10,d9,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 259 vmlal.u8 q10,d13,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 269 vmlsl.u8 q15,d0,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 272 vmlal.u8 q15,d4,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 311 vmlsl.u8 q11,d8,d24 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 313 vmlal.u8 q11,d12,d26 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ [all …]
|
D | ihevc_inter_pred_filters_luma_horz.s | 225 vmlsl.u8 q4,d0,d24 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 227 vmlsl.u8 q4,d2,d26 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 231 vmlsl.u8 q4,d5,d29 @mul_res = vmlsl_u8(src[0_5], coeffabs_5)@ 235 vmlsl.u8 q4,d7,d31 @mul_res = vmlsl_u8(src[0_7], coeffabs_7)@ 240 vmlsl.u8 q5,d14,d26 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 244 vmlsl.u8 q5,d17,d29 @mul_res = vmlsl_u8(src[0_5], coeffabs_5)@ 247 vmlsl.u8 q5,d19,d31 @mul_res = vmlsl_u8(src[0_7], coeffabs_7)@ 249 vmlsl.u8 q5,d12,d24 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 307 vmlsl.u8 q4,d0,d24 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 309 vmlsl.u8 q4,d4,d26 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ [all …]
|
D | ihevc_inter_pred_luma_horz_w16out.s | 389 vmlsl.u8 q4,d0,d24 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 391 vmlsl.u8 q4,d2,d26 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 395 vmlsl.u8 q4,d5,d29 @mul_res = vmlsl_u8(src[0_5], coeffabs_5)@ 399 vmlsl.u8 q4,d7,d31 @mul_res = vmlsl_u8(src[0_7], coeffabs_7)@ 404 vmlsl.u8 q5,d14,d26 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ 408 vmlsl.u8 q5,d17,d29 @mul_res = vmlsl_u8(src[0_5], coeffabs_5)@ 411 vmlsl.u8 q5,d19,d31 @mul_res = vmlsl_u8(src[0_7], coeffabs_7)@ 413 vmlsl.u8 q5,d12,d24 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 470 vmlsl.u8 q4,d0,d24 @mul_res = vmlsl_u8(src[0_0], coeffabs_0)@ 472 vmlsl.u8 q4,d4,d26 @mul_res = vmlsl_u8(src[0_2], coeffabs_2)@ [all …]
|
D | ihevc_inter_pred_chroma_vert_w16out.s | 152 …vmlsl.u8 q3,d5,d0 @vmlsl_u8(mul_res1, vreinterpret_u8_u32(src_tmp1), coeffab…
|
D | ihevc_inter_pred_chroma_vert.s | 151 …vmlsl.u8 q3,d5,d0 @vmlsl_u8(mul_res1, vreinterpret_u8_u32(src_tmp1), coeffab…
|
D | ihevc_intra_pred_filters_neon_intr.c | 562 prod_val_1 = vmlsl_u8(prod_two_nt_src_0_val_t, const_col_i_val, pu1_src_0_val_t); in ihevc_intra_pred_ref_filtering_neonintr() 566 … prod_val_3 = vmlsl_u8(prod_two_nt_src_2_nt_val_t, const_col_i_val, pu1_src_2_nt_val_t); in ihevc_intra_pred_ref_filtering_neonintr()
|
/external/libvpx/config/arm-neon/vpx_dsp/arm/ |
D | vpx_convolve8_vert_filter_type1_neon.asm.S | 93 vmlsl.u8 q4, d0, d22 @mul_res1 = vmlsl_u8(mul_res1, 96 vmlal.u8 q4, d2, d24 @mul_res1 = vmlsl_u8(mul_res1, 105 vmlal.u8 q4, d5, d27 @mul_res1 = vmlsl_u8(mul_res1, 111 vmlsl.u8 q4, d7, d29 @mul_res1 = vmlsl_u8(mul_res1, 118 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, 121 vmlal.u8 q5, d3, d24 @mul_res2 = vmlsl_u8(mul_res2, 132 vmlal.u8 q5, d6, d27 @mul_res2 = vmlsl_u8(mul_res2, 138 vmlsl.u8 q5, d16, d29 @mul_res2 = vmlsl_u8(mul_res2, 186 vmlsl.u8 q4, d0, d22 @mul_res1 = vmlsl_u8(mul_res1, 189 vmlal.u8 q4, d2, d24 @mul_res1 = vmlsl_u8(mul_res1, [all …]
|
D | vpx_convolve8_avg_vert_filter_type2_neon.asm.S | 93 vmlsl.u8 q4, d0, d22 @mul_res1 = vmlsl_u8(mul_res1, 96 vmlsl.u8 q4, d2, d24 @mul_res1 = vmlsl_u8(mul_res1, 105 vmlsl.u8 q4, d5, d27 @mul_res1 = vmlsl_u8(mul_res1, 111 vmlsl.u8 q4, d7, d29 @mul_res1 = vmlsl_u8(mul_res1, 118 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, 121 vmlsl.u8 q5, d3, d24 @mul_res2 = vmlsl_u8(mul_res2, 133 vmlsl.u8 q5, d6, d27 @mul_res2 = vmlsl_u8(mul_res2, 139 vmlsl.u8 q5, d16, d29 @mul_res2 = vmlsl_u8(mul_res2, 190 vmlsl.u8 q4, d0, d22 @mul_res1 = vmlsl_u8(mul_res1, 194 vmlsl.u8 q4, d2, d24 @mul_res1 = vmlsl_u8(mul_res1, [all …]
|
D | vpx_convolve8_vert_filter_type2_neon.asm.S | 93 vmlsl.u8 q4, d0, d22 @mul_res1 = vmlsl_u8(mul_res1, 96 vmlsl.u8 q4, d2, d24 @mul_res1 = vmlsl_u8(mul_res1, 105 vmlsl.u8 q4, d5, d27 @mul_res1 = vmlsl_u8(mul_res1, 111 vmlsl.u8 q4, d7, d29 @mul_res1 = vmlsl_u8(mul_res1, 118 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, 121 vmlsl.u8 q5, d3, d24 @mul_res2 = vmlsl_u8(mul_res2, 132 vmlsl.u8 q5, d6, d27 @mul_res2 = vmlsl_u8(mul_res2, 138 vmlsl.u8 q5, d16, d29 @mul_res2 = vmlsl_u8(mul_res2, 187 vmlsl.u8 q4, d0, d22 @mul_res1 = vmlsl_u8(mul_res1, 190 vmlsl.u8 q4, d2, d24 @mul_res1 = vmlsl_u8(mul_res1, [all …]
|
D | vpx_convolve8_avg_vert_filter_type1_neon.asm.S | 92 vmlsl.u8 q4, d0, d22 @mul_res1 = vmlsl_u8(mul_res1, 95 vmlal.u8 q4, d2, d24 @mul_res1 = vmlsl_u8(mul_res1, 104 vmlal.u8 q4, d5, d27 @mul_res1 = vmlsl_u8(mul_res1, 110 vmlsl.u8 q4, d7, d29 @mul_res1 = vmlsl_u8(mul_res1, 117 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, 120 vmlal.u8 q5, d3, d24 @mul_res2 = vmlsl_u8(mul_res2, 132 vmlal.u8 q5, d6, d27 @mul_res2 = vmlsl_u8(mul_res2, 138 vmlsl.u8 q5, d16, d29 @mul_res2 = vmlsl_u8(mul_res2, 189 vmlsl.u8 q4, d0, d22 @mul_res1 = vmlsl_u8(mul_res1, 193 vmlal.u8 q4, d2, d24 @mul_res1 = vmlsl_u8(mul_res1, [all …]
|
D | vpx_convolve8_avg_horiz_filter_type2_neon.asm.S | 121 vmlsl.u8 q4, d0, d24 @mul_res = vmlsl_u8(src[0_0], 124 vmlsl.u8 q4, d2, d26 @mul_res = vmlsl_u8(src[0_2], 130 vmlsl.u8 q4, d5, d29 @mul_res = vmlsl_u8(src[0_5], 136 vmlsl.u8 q4, d7, d31 @mul_res = vmlsl_u8(src[0_7], 143 vmlsl.u8 q5, d14, d26 @mul_res = vmlsl_u8(src[0_2], 150 vmlsl.u8 q5, d17, d29 @mul_res = vmlsl_u8(src[0_5], 157 vmlsl.u8 q5, d19, d31 @mul_res = vmlsl_u8(src[0_7], 161 vmlsl.u8 q5, d12, d24 @mul_res = vmlsl_u8(src[0_0], 205 vmlsl.u8 q4, d0, d24 @mul_res = vmlsl_u8(src[0_0], 211 vmlsl.u8 q4, d4, d26 @mul_res = vmlsl_u8(src[0_2], [all …]
|
D | vpx_convolve8_horiz_filter_type2_neon.asm.S | 120 vmlsl.u8 q4, d0, d24 @mul_res = vmlsl_u8(src[0_0], 123 vmlsl.u8 q4, d2, d26 @mul_res = vmlsl_u8(src[0_2], 129 vmlsl.u8 q4, d5, d29 @mul_res = vmlsl_u8(src[0_5], 135 vmlsl.u8 q4, d7, d31 @mul_res = vmlsl_u8(src[0_7], 142 vmlsl.u8 q5, d14, d26 @mul_res = vmlsl_u8(src[0_2], 149 vmlsl.u8 q5, d17, d29 @mul_res = vmlsl_u8(src[0_5], 153 vmlsl.u8 q5, d19, d31 @mul_res = vmlsl_u8(src[0_7], 157 vmlsl.u8 q5, d12, d24 @mul_res = vmlsl_u8(src[0_0], 200 vmlsl.u8 q4, d0, d24 @mul_res = vmlsl_u8(src[0_0], 206 vmlsl.u8 q4, d4, d26 @mul_res = vmlsl_u8(src[0_2], [all …]
|
D | vpx_convolve8_horiz_filter_type1_neon.asm.S | 120 vmlsl.u8 q4, d0, d24 @mul_res = vmlsl_u8(src[0_0], 123 vmlal.u8 q4, d2, d26 @mul_res = vmlsl_u8(src[0_2], 129 vmlal.u8 q4, d5, d29 @mul_res = vmlsl_u8(src[0_5], 135 vmlsl.u8 q4, d7, d31 @mul_res = vmlsl_u8(src[0_7], 142 vmlal.u8 q5, d14, d26 @mul_res = vmlsl_u8(src[0_2], 149 vmlal.u8 q5, d17, d29 @mul_res = vmlsl_u8(src[0_5], 153 vmlsl.u8 q5, d19, d31 @mul_res = vmlsl_u8(src[0_7], 157 vmlsl.u8 q5, d12, d24 @mul_res = vmlsl_u8(src[0_0], 200 vmlsl.u8 q4, d0, d24 @mul_res = vmlsl_u8(src[0_0], 206 vmlal.u8 q4, d4, d26 @mul_res = vmlsl_u8(src[0_2], [all …]
|
D | vpx_convolve8_avg_horiz_filter_type1_neon.asm.S | 120 vmlsl.u8 q4, d0, d24 @mul_res = vmlsl_u8(src[0_0], 123 vmlal.u8 q4, d2, d26 @mul_res = vmlsl_u8(src[0_2], 129 vmlal.u8 q4, d5, d29 @mul_res = vmlsl_u8(src[0_5], 135 vmlsl.u8 q4, d7, d31 @mul_res = vmlsl_u8(src[0_7], 142 vmlal.u8 q5, d14, d26 @mul_res = vmlsl_u8(src[0_2], 149 vmlal.u8 q5, d17, d29 @mul_res = vmlsl_u8(src[0_5], 156 vmlsl.u8 q5, d19, d31 @mul_res = vmlsl_u8(src[0_7], 160 vmlsl.u8 q5, d12, d24 @mul_res = vmlsl_u8(src[0_0], 204 vmlsl.u8 q4, d0, d24 @mul_res = vmlsl_u8(src[0_0], 210 vmlal.u8 q4, d4, d26 @mul_res = vmlsl_u8(src[0_2], [all …]
|
/external/libvpx/libvpx/vpx_dsp/arm/ |
D | vpx_convolve8_vert_filter_type1_neon.asm | 86 vmlsl.u8 q4, d0, d22 ;mul_res1 = vmlsl_u8(mul_res1, 89 vmlal.u8 q4, d2, d24 ;mul_res1 = vmlsl_u8(mul_res1, 98 vmlal.u8 q4, d5, d27 ;mul_res1 = vmlsl_u8(mul_res1, 104 vmlsl.u8 q4, d7, d29 ;mul_res1 = vmlsl_u8(mul_res1, 111 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, 114 vmlal.u8 q5, d3, d24 ;mul_res2 = vmlsl_u8(mul_res2, 125 vmlal.u8 q5, d6, d27 ;mul_res2 = vmlsl_u8(mul_res2, 131 vmlsl.u8 q5, d16, d29 ;mul_res2 = vmlsl_u8(mul_res2, 179 vmlsl.u8 q4, d0, d22 ;mul_res1 = vmlsl_u8(mul_res1, 182 vmlal.u8 q4, d2, d24 ;mul_res1 = vmlsl_u8(mul_res1, [all …]
|
D | vpx_convolve8_avg_vert_filter_type2_neon.asm | 86 vmlsl.u8 q4, d0, d22 ;mul_res1 = vmlsl_u8(mul_res1, 89 vmlsl.u8 q4, d2, d24 ;mul_res1 = vmlsl_u8(mul_res1, 98 vmlsl.u8 q4, d5, d27 ;mul_res1 = vmlsl_u8(mul_res1, 104 vmlsl.u8 q4, d7, d29 ;mul_res1 = vmlsl_u8(mul_res1, 111 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, 114 vmlsl.u8 q5, d3, d24 ;mul_res2 = vmlsl_u8(mul_res2, 126 vmlsl.u8 q5, d6, d27 ;mul_res2 = vmlsl_u8(mul_res2, 132 vmlsl.u8 q5, d16, d29 ;mul_res2 = vmlsl_u8(mul_res2, 183 vmlsl.u8 q4, d0, d22 ;mul_res1 = vmlsl_u8(mul_res1, 187 vmlsl.u8 q4, d2, d24 ;mul_res1 = vmlsl_u8(mul_res1, [all …]
|
D | vpx_convolve8_vert_filter_type2_neon.asm | 86 vmlsl.u8 q4, d0, d22 ;mul_res1 = vmlsl_u8(mul_res1, 89 vmlsl.u8 q4, d2, d24 ;mul_res1 = vmlsl_u8(mul_res1, 98 vmlsl.u8 q4, d5, d27 ;mul_res1 = vmlsl_u8(mul_res1, 104 vmlsl.u8 q4, d7, d29 ;mul_res1 = vmlsl_u8(mul_res1, 111 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, 114 vmlsl.u8 q5, d3, d24 ;mul_res2 = vmlsl_u8(mul_res2, 125 vmlsl.u8 q5, d6, d27 ;mul_res2 = vmlsl_u8(mul_res2, 131 vmlsl.u8 q5, d16, d29 ;mul_res2 = vmlsl_u8(mul_res2, 180 vmlsl.u8 q4, d0, d22 ;mul_res1 = vmlsl_u8(mul_res1, 183 vmlsl.u8 q4, d2, d24 ;mul_res1 = vmlsl_u8(mul_res1, [all …]
|
D | vpx_convolve8_avg_vert_filter_type1_neon.asm | 85 vmlsl.u8 q4, d0, d22 ;mul_res1 = vmlsl_u8(mul_res1, 88 vmlal.u8 q4, d2, d24 ;mul_res1 = vmlsl_u8(mul_res1, 97 vmlal.u8 q4, d5, d27 ;mul_res1 = vmlsl_u8(mul_res1, 103 vmlsl.u8 q4, d7, d29 ;mul_res1 = vmlsl_u8(mul_res1, 110 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, 113 vmlal.u8 q5, d3, d24 ;mul_res2 = vmlsl_u8(mul_res2, 125 vmlal.u8 q5, d6, d27 ;mul_res2 = vmlsl_u8(mul_res2, 131 vmlsl.u8 q5, d16, d29 ;mul_res2 = vmlsl_u8(mul_res2, 182 vmlsl.u8 q4, d0, d22 ;mul_res1 = vmlsl_u8(mul_res1, 186 vmlal.u8 q4, d2, d24 ;mul_res1 = vmlsl_u8(mul_res1, [all …]
|
D | vpx_convolve8_horiz_filter_type2_neon.asm | 113 vmlsl.u8 q4, d0, d24 ;mul_res = vmlsl_u8(src[0_0], 116 vmlsl.u8 q4, d2, d26 ;mul_res = vmlsl_u8(src[0_2], 122 vmlsl.u8 q4, d5, d29 ;mul_res = vmlsl_u8(src[0_5], 128 vmlsl.u8 q4, d7, d31 ;mul_res = vmlsl_u8(src[0_7], 135 vmlsl.u8 q5, d14, d26 ;mul_res = vmlsl_u8(src[0_2], 142 vmlsl.u8 q5, d17, d29 ;mul_res = vmlsl_u8(src[0_5], 146 vmlsl.u8 q5, d19, d31 ;mul_res = vmlsl_u8(src[0_7], 150 vmlsl.u8 q5, d12, d24 ;mul_res = vmlsl_u8(src[0_0], 193 vmlsl.u8 q4, d0, d24 ;mul_res = vmlsl_u8(src[0_0], 199 vmlsl.u8 q4, d4, d26 ;mul_res = vmlsl_u8(src[0_2], [all …]
|
D | vpx_convolve8_horiz_filter_type1_neon.asm | 113 vmlsl.u8 q4, d0, d24 ;mul_res = vmlsl_u8(src[0_0], 116 vmlal.u8 q4, d2, d26 ;mul_res = vmlsl_u8(src[0_2], 122 vmlal.u8 q4, d5, d29 ;mul_res = vmlsl_u8(src[0_5], 128 vmlsl.u8 q4, d7, d31 ;mul_res = vmlsl_u8(src[0_7], 135 vmlal.u8 q5, d14, d26 ;mul_res = vmlsl_u8(src[0_2], 142 vmlal.u8 q5, d17, d29 ;mul_res = vmlsl_u8(src[0_5], 146 vmlsl.u8 q5, d19, d31 ;mul_res = vmlsl_u8(src[0_7], 150 vmlsl.u8 q5, d12, d24 ;mul_res = vmlsl_u8(src[0_0], 193 vmlsl.u8 q4, d0, d24 ;mul_res = vmlsl_u8(src[0_0], 199 vmlal.u8 q4, d4, d26 ;mul_res = vmlsl_u8(src[0_2], [all …]
|
D | vpx_convolve8_avg_horiz_filter_type2_neon.asm | 114 vmlsl.u8 q4, d0, d24 ;mul_res = vmlsl_u8(src[0_0], 117 vmlsl.u8 q4, d2, d26 ;mul_res = vmlsl_u8(src[0_2], 123 vmlsl.u8 q4, d5, d29 ;mul_res = vmlsl_u8(src[0_5], 129 vmlsl.u8 q4, d7, d31 ;mul_res = vmlsl_u8(src[0_7], 136 vmlsl.u8 q5, d14, d26 ;mul_res = vmlsl_u8(src[0_2], 143 vmlsl.u8 q5, d17, d29 ;mul_res = vmlsl_u8(src[0_5], 150 vmlsl.u8 q5, d19, d31 ;mul_res = vmlsl_u8(src[0_7], 154 vmlsl.u8 q5, d12, d24 ;mul_res = vmlsl_u8(src[0_0], 198 vmlsl.u8 q4, d0, d24 ;mul_res = vmlsl_u8(src[0_0], 204 vmlsl.u8 q4, d4, d26 ;mul_res = vmlsl_u8(src[0_2], [all …]
|
D | vpx_convolve8_avg_horiz_filter_type1_neon.asm | 113 vmlsl.u8 q4, d0, d24 ;mul_res = vmlsl_u8(src[0_0], 116 vmlal.u8 q4, d2, d26 ;mul_res = vmlsl_u8(src[0_2], 122 vmlal.u8 q4, d5, d29 ;mul_res = vmlsl_u8(src[0_5], 128 vmlsl.u8 q4, d7, d31 ;mul_res = vmlsl_u8(src[0_7], 135 vmlal.u8 q5, d14, d26 ;mul_res = vmlsl_u8(src[0_2], 142 vmlal.u8 q5, d17, d29 ;mul_res = vmlsl_u8(src[0_5], 149 vmlsl.u8 q5, d19, d31 ;mul_res = vmlsl_u8(src[0_7], 153 vmlsl.u8 q5, d12, d24 ;mul_res = vmlsl_u8(src[0_0], 197 vmlsl.u8 q4, d0, d24 ;mul_res = vmlsl_u8(src[0_0], 203 vmlal.u8 q4, d4, d26 ;mul_res = vmlsl_u8(src[0_2], [all …]
|