/external/XNNPACK/src/qs8-gemm/gen/ |
D | 1x16c8-minmax-rndnu-neon-mull.c | 156 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mull() local 159 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mull()
|
D | 1x16c16-minmax-rndnu-neon-mlal.c | 171 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c16__neon_mlal() local 174 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c16__neon_mlal()
|
D | 1x16c8-minmax-rndnu-neon-mlal.c | 245 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mlal() local 248 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c8__neon_mlal()
|
D | 2x16c8-minmax-rndnu-neon-mull.c | 223 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mull() local 226 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mull()
|
D | 2x16c16-minmax-rndnu-neon-mlal.c | 254 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c16__neon_mlal() local 257 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c16__neon_mlal()
|
D | 3x16c8-minmax-rndnu-neon-mull.c | 290 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local 293 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
|
D | 2x16c8-minmax-rndnu-neon-mlal.c | 362 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local 365 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
|
D | 3x16c16-minmax-rndnu-neon-mlal.c | 337 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local 340 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
|
D | 4x16c8-minmax-rndnu-neon-mull.c | 357 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mull() local 360 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mull()
|
D | 3x16c8-minmax-rndnu-neon-mlal.c | 479 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local 482 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
|
D | 4x16c16-minmax-rndnu-neon-mlal.c | 420 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c16__neon_mlal() local 423 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c16__neon_mlal()
|
D | 4x16c8-minmax-rndnu-neon-mlal.c | 596 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal() local 599 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 1x16c8-minmax-rndnu-neon-mull.c | 170 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mull() local 173 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mull()
|
D | 1x16c16-minmax-rndnu-neon-mlal.c | 185 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c16__neon_mlal() local 188 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c16__neon_mlal()
|
D | 1x16c8-minmax-rndnu-neon-mlal.c | 259 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mlal() local 262 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c8__neon_mlal()
|
D | 2x16c8-minmax-rndnu-neon-mull.c | 239 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mull() local 242 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mull()
|
D | 2x16c16-minmax-rndnu-neon-mlal.c | 270 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c16__neon_mlal() local 273 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c16__neon_mlal()
|
D | 2x16c8-minmax-rndnu-neon-mlal.c | 378 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal() local 381 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c8__neon_mlal()
|
D | 3x16c8-minmax-rndnu-neon-mull.c | 308 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull() local 311 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mull()
|
D | 3x16c16-minmax-rndnu-neon-mlal.c | 355 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal() local 358 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c16__neon_mlal()
|
D | 4x16c8-minmax-rndnu-neon-mull.c | 377 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mull() local 380 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mull()
|
D | 3x16c8-minmax-rndnu-neon-mlal.c | 497 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal() local 500 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c8__neon_mlal()
|
D | 4x16c16-minmax-rndnu-neon-mlal.c | 440 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c16__neon_mlal() local 443 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c16__neon_mlal()
|
D | 4x16c8-minmax-rndnu-neon-mlal.c | 616 const int32x2_t vpsum0xD = vadd_s32(vget_low_s32(vacc0x13), vget_high_s32(vacc0x13)); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal() local 619 const int32x2_t vsum0xCD = vpadd_s32(vpsum0xC, vpsum0xD); in xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c8__neon_mlal()
|