Lines Matching refs:vpaddq_s32
233 const int32x4_t vsum0x01 = vpaddq_s32(vacc0x0, vacc0x1); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
234 const int32x4_t vsum0x23 = vpaddq_s32(vacc0x2, vacc0x3); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
235 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
236 const int32x4_t vsum0x67 = vpaddq_s32(vacc0x6, vacc0x7); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
237 const int32x4_t vsum1x01 = vpaddq_s32(vacc1x0, vacc1x1); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
238 const int32x4_t vsum1x23 = vpaddq_s32(vacc1x2, vacc1x3); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
239 const int32x4_t vsum1x45 = vpaddq_s32(vacc1x4, vacc1x5); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
240 const int32x4_t vsum1x67 = vpaddq_s32(vacc1x6, vacc1x7); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
241 const int32x4_t vsum2x01 = vpaddq_s32(vacc2x0, vacc2x1); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
242 const int32x4_t vsum2x23 = vpaddq_s32(vacc2x2, vacc2x3); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
243 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
244 const int32x4_t vsum2x67 = vpaddq_s32(vacc2x6, vacc2x7); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
245 const int32x4_t vsum3x01 = vpaddq_s32(vacc3x0, vacc3x1); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
246 const int32x4_t vsum3x23 = vpaddq_s32(vacc3x2, vacc3x3); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
247 const int32x4_t vsum3x45 = vpaddq_s32(vacc3x4, vacc3x5); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
248 const int32x4_t vsum3x67 = vpaddq_s32(vacc3x6, vacc3x7); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
249 int32x4_t vacc0x0123 = vpaddq_s32(vsum0x01, vsum0x23); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
250 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
251 int32x4_t vacc1x0123 = vpaddq_s32(vsum1x01, vsum1x23); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
252 int32x4_t vacc1x4567 = vpaddq_s32(vsum1x45, vsum1x67); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
253 int32x4_t vacc2x0123 = vpaddq_s32(vsum2x01, vsum2x23); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
254 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
255 int32x4_t vacc3x0123 = vpaddq_s32(vsum3x01, vsum3x23); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()
256 int32x4_t vacc3x4567 = vpaddq_s32(vsum3x45, vsum3x67); in xnn_qs8_igemm_minmax_ukernel_4x8c16__neon_mlal_padal()