Lines Matching refs:vacc1x0123
51 int32x4_t vacc1x0123 = vacc0x0123; in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup() local
65 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c0)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
73 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c1)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
81 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c2)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
89 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c3)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
97 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c4)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
105 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c5)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
113 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c6)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
121 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c7)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
136 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c0)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
146 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c1)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
156 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c2)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
166 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c3)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
176 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c4)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
186 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c5)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
196 vacc1x0123 = vaddw_s16(vacc1x0123, vget_low_s16(vprod1x01234567c6)); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
208 vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
215 vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
220 vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift); in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
226 …const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), v… in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()
231 …const int16x8_t vacc1x01234567 = vqaddq_s16(vcombine_s16(vqmovn_s32(vacc1x0123), vqmovn_s32(vacc1x… in xnn_qs8_gemm_minmax_ukernel_2x8__neon_mull_addw_dup()