• Home
  • Raw
  • Download

Lines Matching refs:vpaddq_s32

175     const int32x4_t vsum0x01 = vpaddq_s32(vacc0x0, vacc0x1);  in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
176 const int32x4_t vsum0x23 = vpaddq_s32(vacc0x2, vacc0x3); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
177 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
178 const int32x4_t vsum0x67 = vpaddq_s32(vacc0x6, vacc0x7); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
179 const int32x4_t vsum0x89 = vpaddq_s32(vacc0x8, vacc0x9); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
180 const int32x4_t vsum0xAB = vpaddq_s32(vacc0x10, vacc0x11); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
181 const int32x4_t vsum0xCD = vpaddq_s32(vacc0x12, vacc0x13); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
182 const int32x4_t vsum0xEF = vpaddq_s32(vacc0x14, vacc0x15); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
183 const int32x4_t vsum1x01 = vpaddq_s32(vacc1x0, vacc1x1); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
184 const int32x4_t vsum1x23 = vpaddq_s32(vacc1x2, vacc1x3); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
185 const int32x4_t vsum1x45 = vpaddq_s32(vacc1x4, vacc1x5); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
186 const int32x4_t vsum1x67 = vpaddq_s32(vacc1x6, vacc1x7); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
187 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
188 const int32x4_t vsum1xAB = vpaddq_s32(vacc1x10, vacc1x11); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
189 const int32x4_t vsum1xCD = vpaddq_s32(vacc1x12, vacc1x13); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
190 const int32x4_t vsum1xEF = vpaddq_s32(vacc1x14, vacc1x15); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
191 int32x4_t vacc0x0123 = vpaddq_s32(vsum0x01, vsum0x23); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
192 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
193 int32x4_t vacc0x89AB = vpaddq_s32(vsum0x89, vsum0xAB); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
194 int32x4_t vacc0xCDEF = vpaddq_s32(vsum0xCD, vsum0xEF); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
195 int32x4_t vacc1x0123 = vpaddq_s32(vsum1x01, vsum1x23); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
196 int32x4_t vacc1x4567 = vpaddq_s32(vsum1x45, vsum1x67); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
197 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()
198 int32x4_t vacc1xCDEF = vpaddq_s32(vsum1xCD, vsum1xEF); in xnn_qs8_gemm_minmax_ukernel_2x16c8__neon_mull_padal()