/external/XNNPACK/src/qs8-gemm/gen/ |
D | 1x8c8-minmax-neon-mull-padal.c | 103 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mull_padal() local 106 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mull_padal()
|
D | 1x8c16-minmax-neon-mlal-padal.c | 111 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_1x8c16__neon_mlal_padal() local 114 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x8c16__neon_mlal_padal()
|
D | 1x8c8-minmax-neon-mlal-padal.c | 152 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mlal_padal() local 155 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mlal_padal()
|
D | 2x8c8-minmax-neon-mull-padal.c | 140 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mull_padal() local 143 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mull_padal()
|
D | 1x16c8-minmax-neon-mull-padal.c | 141 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mull_padal() local 144 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mull_padal()
|
D | 2x8c16-minmax-neon-mlal-padal.c | 156 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_2x8c16__neon_mlal_padal() local 159 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_2x8c16__neon_mlal_padal()
|
D | 1x16c16-minmax-neon-mlal-padal.c | 157 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_1x16c16__neon_mlal_padal() local 160 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x16c16__neon_mlal_padal()
|
D | 2x8c8-minmax-neon-mlal-padal.c | 215 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mlal_padal() local 218 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mlal_padal()
|
D | 3x8c8-minmax-neon-mull-padal.c | 177 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mull_padal() local 180 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mull_padal()
|
D | 3x8c16-minmax-neon-mlal-padal.c | 201 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_3x8c16__neon_mlal_padal() local 204 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_3x8c16__neon_mlal_padal()
|
D | 1x16c8-minmax-neon-mlal-padal.c | 230 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mlal_padal() local 233 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mlal_padal()
|
D | 4x8c8-minmax-neon-mull-padal.c | 214 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal() local 217 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 1x8c8-minmax-neon-mull-padal.c | 117 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mull_padal() local 120 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mull_padal()
|
D | 1x8c16-minmax-neon-mlal-padal.c | 125 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_1x8c16__neon_mlal_padal() local 128 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x8c16__neon_mlal_padal()
|
D | 1x8c8-minmax-neon-mlal-padal.c | 166 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mlal_padal() local 169 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mlal_padal()
|
D | 2x8c8-minmax-neon-mull-padal.c | 156 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mull_padal() local 159 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mull_padal()
|
D | 1x16c8-minmax-neon-mull-padal.c | 155 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mull_padal() local 158 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mull_padal()
|
D | 2x8c16-minmax-neon-mlal-padal.c | 172 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_2x8c16__neon_mlal_padal() local 175 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_2x8c16__neon_mlal_padal()
|
D | 1x16c16-minmax-neon-mlal-padal.c | 171 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_1x16c16__neon_mlal_padal() local 174 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x16c16__neon_mlal_padal()
|
D | 2x8c8-minmax-neon-mlal-padal.c | 231 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mlal_padal() local 234 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mlal_padal()
|
D | 3x8c8-minmax-neon-mull-padal.c | 195 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mull_padal() local 198 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mull_padal()
|
D | 1x16c8-minmax-neon-mlal-padal.c | 244 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mlal_padal() local 247 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mlal_padal()
|
D | 3x8c16-minmax-neon-mlal-padal.c | 219 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_3x8c16__neon_mlal_padal() local 222 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_3x8c16__neon_mlal_padal()
|
D | 4x8c8-minmax-neon-mull-padal.c | 234 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal() local 237 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal()
|
D | 2x16c8-minmax-neon-mull-padal.c | 224 const int32x2_t vpsum0x5 = vadd_s32(vget_low_s32(vacc0x5), vget_high_s32(vacc0x5)); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal() local 227 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
|