• Home
  • Raw
  • Download

Lines Matching refs:vpaddq_s32

191     const int32x4_t vsum0x01 = vpaddq_s32(vacc0x0, vacc0x1);  in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
192 const int32x4_t vsum0x23 = vpaddq_s32(vacc0x2, vacc0x3); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
193 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
194 const int32x4_t vsum0x67 = vpaddq_s32(vacc0x6, vacc0x7); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
195 const int32x4_t vsum0x89 = vpaddq_s32(vacc0x8, vacc0x9); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
196 const int32x4_t vsum0xAB = vpaddq_s32(vacc0x10, vacc0x11); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
197 const int32x4_t vsum0xCD = vpaddq_s32(vacc0x12, vacc0x13); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
198 const int32x4_t vsum0xEF = vpaddq_s32(vacc0x14, vacc0x15); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
199 const int32x4_t vsum1x01 = vpaddq_s32(vacc1x0, vacc1x1); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
200 const int32x4_t vsum1x23 = vpaddq_s32(vacc1x2, vacc1x3); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
201 const int32x4_t vsum1x45 = vpaddq_s32(vacc1x4, vacc1x5); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
202 const int32x4_t vsum1x67 = vpaddq_s32(vacc1x6, vacc1x7); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
203 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
204 const int32x4_t vsum1xAB = vpaddq_s32(vacc1x10, vacc1x11); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
205 const int32x4_t vsum1xCD = vpaddq_s32(vacc1x12, vacc1x13); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
206 const int32x4_t vsum1xEF = vpaddq_s32(vacc1x14, vacc1x15); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
207 int32x4_t vacc0x0123 = vpaddq_s32(vsum0x01, vsum0x23); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
208 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
209 int32x4_t vacc0x89AB = vpaddq_s32(vsum0x89, vsum0xAB); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
210 int32x4_t vacc0xCDEF = vpaddq_s32(vsum0xCD, vsum0xEF); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
211 int32x4_t vacc1x0123 = vpaddq_s32(vsum1x01, vsum1x23); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
212 int32x4_t vacc1x4567 = vpaddq_s32(vsum1x45, vsum1x67); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
213 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()
214 int32x4_t vacc1xCDEF = vpaddq_s32(vsum1xCD, vsum1xEF); in xnn_qs8_igemm_minmax_ukernel_2x16c8__neon_mull_padal()