/external/XNNPACK/src/qs8-gemm/gen/ |
D | 1x8c8-minmax-neon-mull-padal.c | 90 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mull_padal() local 93 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mull_padal() 106 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mull_padal() local 108 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mull_padal()
|
D | 1x8c16-minmax-neon-mlal-padal.c | 98 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_1x8c16__neon_mlal_padal() local 101 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_1x8c16__neon_mlal_padal() 114 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x8c16__neon_mlal_padal() local 116 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_1x8c16__neon_mlal_padal()
|
D | 1x8c8-minmax-neon-mlal-padal.c | 139 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mlal_padal() local 142 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mlal_padal() 155 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mlal_padal() local 157 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_1x8c8__neon_mlal_padal()
|
D | 2x8c8-minmax-neon-mull-padal.c | 121 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mull_padal() local 128 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mull_padal() 143 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mull_padal() local 145 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mull_padal()
|
D | 1x16c8-minmax-neon-mull-padal.c | 122 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mull_padal() local 129 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mull_padal() 144 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mull_padal() local 146 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mull_padal()
|
D | 2x8c16-minmax-neon-mlal-padal.c | 137 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_2x8c16__neon_mlal_padal() local 144 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_2x8c16__neon_mlal_padal() 159 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_2x8c16__neon_mlal_padal() local 161 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_2x8c16__neon_mlal_padal()
|
D | 1x16c16-minmax-neon-mlal-padal.c | 138 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_1x16c16__neon_mlal_padal() local 145 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_1x16c16__neon_mlal_padal() 160 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x16c16__neon_mlal_padal() local 162 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_1x16c16__neon_mlal_padal()
|
D | 2x8c8-minmax-neon-mlal-padal.c | 196 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mlal_padal() local 203 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mlal_padal() 218 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mlal_padal() local 220 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_2x8c8__neon_mlal_padal()
|
D | 3x8c8-minmax-neon-mull-padal.c | 152 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mull_padal() local 163 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mull_padal() 180 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mull_padal() local 182 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_3x8c8__neon_mull_padal()
|
D | 3x8c16-minmax-neon-mlal-padal.c | 176 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_3x8c16__neon_mlal_padal() local 187 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_3x8c16__neon_mlal_padal() 204 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_3x8c16__neon_mlal_padal() local 206 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_3x8c16__neon_mlal_padal()
|
D | 1x16c8-minmax-neon-mlal-padal.c | 211 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mlal_padal() local 218 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mlal_padal() 233 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mlal_padal() local 235 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_1x16c8__neon_mlal_padal()
|
D | 4x8c8-minmax-neon-mull-padal.c | 183 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal() local 198 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal() 217 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal() local 219 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_gemm_minmax_ukernel_4x8c8__neon_mull_padal()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 1x8c8-minmax-neon-mull-padal.c | 104 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mull_padal() local 107 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mull_padal() 120 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mull_padal() local 122 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mull_padal()
|
D | 1x8c16-minmax-neon-mlal-padal.c | 112 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_1x8c16__neon_mlal_padal() local 115 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_1x8c16__neon_mlal_padal() 128 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x8c16__neon_mlal_padal() local 130 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_1x8c16__neon_mlal_padal()
|
D | 1x8c8-minmax-neon-mlal-padal.c | 153 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mlal_padal() local 156 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mlal_padal() 169 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mlal_padal() local 171 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_1x8c8__neon_mlal_padal()
|
D | 2x8c8-minmax-neon-mull-padal.c | 137 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mull_padal() local 144 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mull_padal() 159 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mull_padal() local 161 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mull_padal()
|
D | 1x16c8-minmax-neon-mull-padal.c | 136 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mull_padal() local 143 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mull_padal() 158 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mull_padal() local 160 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mull_padal()
|
D | 2x8c16-minmax-neon-mlal-padal.c | 153 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_2x8c16__neon_mlal_padal() local 160 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_2x8c16__neon_mlal_padal() 175 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_2x8c16__neon_mlal_padal() local 177 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_2x8c16__neon_mlal_padal()
|
D | 1x16c16-minmax-neon-mlal-padal.c | 152 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_1x16c16__neon_mlal_padal() local 159 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_1x16c16__neon_mlal_padal() 174 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x16c16__neon_mlal_padal() local 176 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_1x16c16__neon_mlal_padal()
|
D | 2x8c8-minmax-neon-mlal-padal.c | 212 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mlal_padal() local 219 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mlal_padal() 234 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mlal_padal() local 236 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_2x8c8__neon_mlal_padal()
|
D | 3x8c8-minmax-neon-mull-padal.c | 170 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mull_padal() local 181 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mull_padal() 198 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mull_padal() local 200 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_3x8c8__neon_mull_padal()
|
D | 1x16c8-minmax-neon-mlal-padal.c | 225 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mlal_padal() local 232 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mlal_padal() 247 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mlal_padal() local 249 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_1x16c8__neon_mlal_padal()
|
D | 3x8c16-minmax-neon-mlal-padal.c | 194 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_3x8c16__neon_mlal_padal() local 205 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_3x8c16__neon_mlal_padal() 222 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_3x8c16__neon_mlal_padal() local 224 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_3x8c16__neon_mlal_padal()
|
D | 4x8c8-minmax-neon-mull-padal.c | 203 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal() local 218 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal() 237 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal() local 239 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_4x8c8__neon_mull_padal()
|
D | 2x16c8-minmax-neon-mull-padal.c | 193 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal() local 208 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal() 227 const int32x2_t vsum0x45 = vpadd_s32(vpsum0x4, vpsum0x5); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal() local 229 int32x4_t vacc0x4567 = vcombine_s32(vsum0x45, vsum0x67 ); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
|