Home
last modified time | relevance | path

Searched refs:vsum1x89 (Results 1 – 25 of 66) sorted by relevance

123

/external/XNNPACK/src/qs8-igemm/gen/
D2x16c8-minmax-rndnu-neon-mull.c203 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mull() local
214 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mull()
263 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mull() local
265 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mull()
D2x16c16-minmax-rndnu-neon-mlal.c235 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c16__neon_mlal() local
245 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c16__neon_mlal()
294 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c16__neon_mlal() local
296 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c16__neon_mlal()
D2x16c8-minmax-rndnu-neon-mlal.c342 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local
353 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
402 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local
404 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
D3x16c8-minmax-rndnu-neon-mull.c260 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local
279 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
332 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local
334 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
D3x16c16-minmax-rndnu-neon-mlal.c308 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local
326 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
379 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local
381 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
D2x16c4s2-minmax-rndnu-neon-mull.c203 const int32x2_t vsum1x89 = vpadd_s32(vget_low_s32(vacc1x89), vget_high_s32(vacc1x89)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c4s2__neon_mull() local
205 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c4s2__neon_mull()
D4x16c8-minmax-rndnu-neon-mull.c317 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mull() local
344 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mull()
401 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mull() local
403 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mull()
D3x16c8-minmax-rndnu-neon-mlal.c449 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local
468 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
521 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local
523 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
D2x16c4-minmax-rndnu-neon-mull-ld2r.c257 const int32x2_t vsum1x89 = vpadd_s32(vget_low_s32(vacc1x89), vget_high_s32(vacc1x89)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c4__neon_mull_ld2r() local
259 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c4__neon_mull_ld2r()
D4x16c16-minmax-rndnu-neon-mlal.c381 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c16__neon_mlal() local
407 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c16__neon_mlal()
464 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c16__neon_mlal() local
466 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c16__neon_mlal()
D2x16c4-minmax-rndnu-neon-mull-dup.c257 const int32x2_t vsum1x89 = vpadd_s32(vget_low_s32(vacc1x89), vget_high_s32(vacc1x89)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c4__neon_mull_dup() local
259 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c4__neon_mull_dup()
D2x16c4-minmax-rndnu-neon-mull-ld1r.c259 const int32x2_t vsum1x89 = vpadd_s32(vget_low_s32(vacc1x89), vget_high_s32(vacc1x89)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c4__neon_mull_ld1r() local
261 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c4__neon_mull_ld1r()
/external/XNNPACK/src/qs8-gemm/gen/
D2x16c8-minmax-rndnu-neon-mull.c187 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mull() local
198 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mull()
247 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mull() local
249 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mull()
D2x16c16-minmax-rndnu-neon-mlal.c219 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c16__neon_mlal() local
229 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c16__neon_mlal()
278 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c16__neon_mlal() local
280 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c16__neon_mlal()
D3x16c8-minmax-rndnu-neon-mull.c242 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local
261 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
314 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local
316 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
D2x16c8-minmax-rndnu-neon-mlal.c326 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local
337 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
386 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local
388 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
D3x16c16-minmax-rndnu-neon-mlal.c290 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local
308 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
361 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local
363 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
D2x16c4s2-minmax-rndnu-neon-mull.c187 const int32x2_t vsum1x89 = vpadd_s32(vget_low_s32(vacc1x89), vget_high_s32(vacc1x89)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c4s2__neon_mull() local
189 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c4s2__neon_mull()
D4x16c8-minmax-rndnu-neon-mull.c297 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mull() local
324 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mull()
381 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mull() local
383 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mull()
D3x16c8-minmax-rndnu-neon-mlal.c431 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local
450 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
503 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local
505 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
D2x16c4-minmax-rndnu-neon-mull-dup.c242 const int32x2_t vsum1x89 = vpadd_s32(vget_low_s32(vacc1x89), vget_high_s32(vacc1x89)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c4__neon_mull_dup() local
244 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c4__neon_mull_dup()
D2x16c4-minmax-rndnu-neon-mull-ld1r.c244 const int32x2_t vsum1x89 = vpadd_s32(vget_low_s32(vacc1x89), vget_high_s32(vacc1x89)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c4__neon_mull_ld1r() local
246 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c4__neon_mull_ld1r()
D2x16c4-minmax-rndnu-neon-mull-ld2r.c242 const int32x2_t vsum1x89 = vpadd_s32(vget_low_s32(vacc1x89), vget_high_s32(vacc1x89)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c4__neon_mull_ld2r() local
244 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c4__neon_mull_ld2r()
D4x16c16-minmax-rndnu-neon-mlal.c361 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c16__neon_mlal() local
387 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c16__neon_mlal()
444 const int32x2_t vsum1x89 = vpadd_s32(vpsum1x8, vpsum1x9); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c16__neon_mlal() local
446 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB ); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c16__neon_mlal()
D3x16c4s2-minmax-rndnu-neon-mull.c239 const int32x2_t vsum1x89 = vpadd_s32(vget_low_s32(vacc1x89), vget_high_s32(vacc1x89)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c4s2__neon_mull() local
241 int32x4_t vacc1x89AB = vcombine_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c4s2__neon_mull()

123