• Home
  • Raw
  • Download

Lines Matching refs:vpaddq_s32

544     const int32x4_t vsum0x01 = vpaddq_s32(vacc0x0, vacc0x1);  in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
545 const int32x4_t vsum0x23 = vpaddq_s32(vacc0x2, vacc0x3); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
546 const int32x4_t vsum0x45 = vpaddq_s32(vacc0x4, vacc0x5); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
547 const int32x4_t vsum0x67 = vpaddq_s32(vacc0x6, vacc0x7); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
548 const int32x4_t vsum0x89 = vpaddq_s32(vacc0x8, vacc0x9); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
549 const int32x4_t vsum0xAB = vpaddq_s32(vacc0x10, vacc0x11); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
550 const int32x4_t vsum0xCD = vpaddq_s32(vacc0x12, vacc0x13); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
551 const int32x4_t vsum0xEF = vpaddq_s32(vacc0x14, vacc0x15); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
552 const int32x4_t vsum1x01 = vpaddq_s32(vacc1x0, vacc1x1); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
553 const int32x4_t vsum1x23 = vpaddq_s32(vacc1x2, vacc1x3); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
554 const int32x4_t vsum1x45 = vpaddq_s32(vacc1x4, vacc1x5); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
555 const int32x4_t vsum1x67 = vpaddq_s32(vacc1x6, vacc1x7); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
556 const int32x4_t vsum1x89 = vpaddq_s32(vacc1x8, vacc1x9); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
557 const int32x4_t vsum1xAB = vpaddq_s32(vacc1x10, vacc1x11); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
558 const int32x4_t vsum1xCD = vpaddq_s32(vacc1x12, vacc1x13); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
559 const int32x4_t vsum1xEF = vpaddq_s32(vacc1x14, vacc1x15); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
560 const int32x4_t vsum2x01 = vpaddq_s32(vacc2x0, vacc2x1); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
561 const int32x4_t vsum2x23 = vpaddq_s32(vacc2x2, vacc2x3); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
562 const int32x4_t vsum2x45 = vpaddq_s32(vacc2x4, vacc2x5); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
563 const int32x4_t vsum2x67 = vpaddq_s32(vacc2x6, vacc2x7); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
564 const int32x4_t vsum2x89 = vpaddq_s32(vacc2x8, vacc2x9); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
565 const int32x4_t vsum2xAB = vpaddq_s32(vacc2x10, vacc2x11); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
566 const int32x4_t vsum2xCD = vpaddq_s32(vacc2x12, vacc2x13); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
567 const int32x4_t vsum2xEF = vpaddq_s32(vacc2x14, vacc2x15); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
568 const int32x4_t vsum3x01 = vpaddq_s32(vacc3x0, vacc3x1); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
569 const int32x4_t vsum3x23 = vpaddq_s32(vacc3x2, vacc3x3); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
570 const int32x4_t vsum3x45 = vpaddq_s32(vacc3x4, vacc3x5); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
571 const int32x4_t vsum3x67 = vpaddq_s32(vacc3x6, vacc3x7); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
572 const int32x4_t vsum3x89 = vpaddq_s32(vacc3x8, vacc3x9); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
573 const int32x4_t vsum3xAB = vpaddq_s32(vacc3x10, vacc3x11); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
574 const int32x4_t vsum3xCD = vpaddq_s32(vacc3x12, vacc3x13); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
575 const int32x4_t vsum3xEF = vpaddq_s32(vacc3x14, vacc3x15); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
576 int32x4_t vacc0x0123 = vpaddq_s32(vsum0x01, vsum0x23); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
577 int32x4_t vacc0x4567 = vpaddq_s32(vsum0x45, vsum0x67); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
578 int32x4_t vacc0x89AB = vpaddq_s32(vsum0x89, vsum0xAB); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
579 int32x4_t vacc0xCDEF = vpaddq_s32(vsum0xCD, vsum0xEF); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
580 int32x4_t vacc1x0123 = vpaddq_s32(vsum1x01, vsum1x23); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
581 int32x4_t vacc1x4567 = vpaddq_s32(vsum1x45, vsum1x67); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
582 int32x4_t vacc1x89AB = vpaddq_s32(vsum1x89, vsum1xAB); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
583 int32x4_t vacc1xCDEF = vpaddq_s32(vsum1xCD, vsum1xEF); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
584 int32x4_t vacc2x0123 = vpaddq_s32(vsum2x01, vsum2x23); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
585 int32x4_t vacc2x4567 = vpaddq_s32(vsum2x45, vsum2x67); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
586 int32x4_t vacc2x89AB = vpaddq_s32(vsum2x89, vsum2xAB); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
587 int32x4_t vacc2xCDEF = vpaddq_s32(vsum2xCD, vsum2xEF); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
588 int32x4_t vacc3x0123 = vpaddq_s32(vsum3x01, vsum3x23); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
589 int32x4_t vacc3x4567 = vpaddq_s32(vsum3x45, vsum3x67); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
590 int32x4_t vacc3x89AB = vpaddq_s32(vsum3x89, vsum3xAB); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()
591 int32x4_t vacc3xCDEF = vpaddq_s32(vsum3xCD, vsum3xEF); in xnn_qs8_igemm_minmax_ukernel_4x16c8__neon_mlal_padal()