/external/XNNPACK/src/qs8-gemm/gen/ |
D | 3x8c8-minmax-rndnu-neon-mull.c | 208 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mull() local 210 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mull()
|
D | 3x8c16-minmax-rndnu-neon-mlal.c | 231 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c16__neon_mlal() local 233 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c16__neon_mlal()
|
D | 4x8c8-minmax-rndnu-neon-mull.c | 245 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mull() local 247 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mull()
|
D | 3x8c8-minmax-rndnu-neon-mlal.c | 309 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mlal() local 311 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c8__neon_mlal()
|
D | 4x8c16-minmax-rndnu-neon-mlal.c | 276 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c16__neon_mlal() local 278 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c16__neon_mlal()
|
D | 4x8c8-minmax-rndnu-neon-mlal.c | 372 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mlal() local 374 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c8__neon_mlal()
|
D | 3x16c8-minmax-rndnu-neon-mull.c | 334 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local 336 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
|
D | 3x16c16-minmax-rndnu-neon-mlal.c | 381 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local 383 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
|
D | 4x16c8-minmax-rndnu-neon-mull.c | 401 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mull() local 403 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mull()
|
D | 3x16c8-minmax-rndnu-neon-mlal.c | 523 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local 525 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
|
D | 4x16c16-minmax-rndnu-neon-mlal.c | 464 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c16__neon_mlal() local 466 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c16__neon_mlal()
|
D | 4x16c8-minmax-rndnu-neon-mlal.c | 640 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal() local 642 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 3x8c8-minmax-rndnu-neon-mull.c | 226 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mull() local 228 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mull()
|
D | 3x8c16-minmax-rndnu-neon-mlal.c | 249 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c16__neon_mlal() local 251 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c16__neon_mlal()
|
D | 4x8c8-minmax-rndnu-neon-mull.c | 265 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mull() local 267 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mull()
|
D | 4x8c16-minmax-rndnu-neon-mlal.c | 296 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c16__neon_mlal() local 298 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c16__neon_mlal()
|
D | 3x8c8-minmax-rndnu-neon-mlal.c | 327 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mlal() local 329 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c8__neon_mlal()
|
D | 4x8c8-minmax-rndnu-neon-mlal.c | 392 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mlal() local 394 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c8__neon_mlal()
|
D | 3x16c8-minmax-rndnu-neon-mull.c | 352 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local 354 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
|
D | 3x16c16-minmax-rndnu-neon-mlal.c | 399 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local 401 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
|
D | 4x16c8-minmax-rndnu-neon-mull.c | 421 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mull() local 423 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mull()
|
D | 3x16c8-minmax-rndnu-neon-mlal.c | 541 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local 543 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
|
D | 4x16c16-minmax-rndnu-neon-mlal.c | 484 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c16__neon_mlal() local 486 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c16__neon_mlal()
|
D | 4x16c8-minmax-rndnu-neon-mlal.c | 660 const int32x2_t vpsum2x7 = vadd_s32(vget_low_s32(vacc2x7), vget_high_s32(vacc2x7)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal() local 662 const int32x2_t vsum2x67 = vpadd_s32(vpsum2x6, vpsum2x7); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
|