Home
last modified time | relevance | path

Searched refs:vsum2x45 (Results 1 – 25 of 88) sorted by relevance

1234

/external/XNNPACK/src/qs8-gemm/gen/
D3x8c8-minmax-rndnu-neon-mull.c160 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mull() local
168 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mull()
209 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mull() local
211 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mull()
D3x8c16-minmax-rndnu-neon-mlal.c184 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c16__neon_mlal() local
191 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c16__neon_mlal()
232 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c16__neon_mlal() local
234 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c16__neon_mlal()
D4x8c8-minmax-rndnu-neon-mull.c191 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mull() local
203 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mull()
246 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mull() local
248 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mull()
D3x8c8-minmax-rndnu-neon-mlal.c261 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mlal() local
269 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mlal()
310 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mlal() local
312 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mlal()
D4x8c16-minmax-rndnu-neon-mlal.c223 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c16__neon_mlal() local
234 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c16__neon_mlal()
277 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c16__neon_mlal() local
279 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c16__neon_mlal()
D3x8c4s2-minmax-rndnu-neon-mull.c162 const int32x2_t vsum2x45 = vpadd_s32(vget_low_s32(vacc2x45), vget_high_s32(vacc2x45)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c4s2__neon_mull() local
164 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c4s2__neon_mull()
D4x8c8-minmax-rndnu-neon-mlal.c318 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mlal() local
330 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mlal()
373 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mlal() local
375 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mlal()
D3x16c8-minmax-rndnu-neon-mull.c248 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local
264 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
335 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local
337 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
D3x16c16-minmax-rndnu-neon-mlal.c296 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local
311 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
382 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local
384 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
D3x8c4-minmax-rndnu-neon-mull-ld1r.c211 const int32x2_t vsum2x45 = vpadd_s32(vget_low_s32(vacc2x45), vget_high_s32(vacc2x45)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c4__neon_mull_ld1r() local
213 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c4__neon_mull_ld1r()
D3x8c4-minmax-rndnu-neon-mull-dup.c208 const int32x2_t vsum2x45 = vpadd_s32(vget_low_s32(vacc2x45), vget_high_s32(vacc2x45)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c4__neon_mull_dup() local
210 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c4__neon_mull_dup()
D3x8c4-minmax-rndnu-neon-mull-ld2r.c208 const int32x2_t vsum2x45 = vpadd_s32(vget_low_s32(vacc2x45), vget_high_s32(vacc2x45)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c4__neon_mull_ld2r() local
210 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c4__neon_mull_ld2r()
D4x8c4s2-minmax-rndnu-neon-mull.c192 const int32x2_t vsum2x45 = vpadd_s32(vget_low_s32(vacc2x45), vget_high_s32(vacc2x45)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c4s2__neon_mull() local
194 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c4s2__neon_mull()
/external/XNNPACK/src/qs8-igemm/gen/
D3x8c8-minmax-rndnu-neon-mull.c178 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mull() local
186 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mull()
227 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mull() local
229 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mull()
D3x8c16-minmax-rndnu-neon-mlal.c202 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c16__neon_mlal() local
209 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c16__neon_mlal()
250 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c16__neon_mlal() local
252 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c16__neon_mlal()
D4x8c8-minmax-rndnu-neon-mull.c211 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mull() local
223 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mull()
266 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mull() local
268 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mull()
D4x8c16-minmax-rndnu-neon-mlal.c243 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c16__neon_mlal() local
254 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c16__neon_mlal()
297 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c16__neon_mlal() local
299 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c16__neon_mlal()
D3x8c8-minmax-rndnu-neon-mlal.c279 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mlal() local
287 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mlal()
328 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mlal() local
330 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mlal()
D4x8c8-minmax-rndnu-neon-mlal.c338 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mlal() local
350 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mlal()
393 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mlal() local
395 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mlal()
D3x16c8-minmax-rndnu-neon-mull.c266 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local
282 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
353 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local
355 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
D3x8c4s2-minmax-rndnu-neon-mull.c180 const int32x2_t vsum2x45 = vpadd_s32(vget_low_s32(vacc2x45), vget_high_s32(vacc2x45)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c4s2__neon_mull() local
182 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c4s2__neon_mull()
D3x16c16-minmax-rndnu-neon-mlal.c314 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local
329 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
400 const int32x2_t vsum2x45 = vpadd_s32(vpsum2x4, vpsum2x5); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local
402 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67 ); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
D4x8c4s2-minmax-rndnu-neon-mull.c212 const int32x2_t vsum2x45 = vpadd_s32(vget_low_s32(vacc2x45), vget_high_s32(vacc2x45)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c4s2__neon_mull() local
214 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c4s2__neon_mull()
D3x8c4-minmax-rndnu-neon-mull-ld1r.c228 const int32x2_t vsum2x45 = vpadd_s32(vget_low_s32(vacc2x45), vget_high_s32(vacc2x45)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c4__neon_mull_ld1r() local
230 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c4__neon_mull_ld1r()
D3x8c4-minmax-rndnu-neon-mull-dup.c225 const int32x2_t vsum2x45 = vpadd_s32(vget_low_s32(vacc2x45), vget_high_s32(vacc2x45)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c4__neon_mull_dup() local
227 int32x4_t vacc2x4567 = vcombine_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c4__neon_mull_dup()

1234