• Home
  • Raw
  • Download

Lines Matching refs:va0x0

71         int8x8_t va0x0 = vld1_s8(a0); a0 += 8;  in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()  local
85 int16x8_t vprod0x0123c0 = vmull_s8(vb0123c0x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
92 int16x8_t vprod0x4567c0 = vmull_s8(vb4567c0x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
99 va0x0 = vext_s8(va0x0, va0x0, 2); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
103 int16x8_t vprod0x0123c1 = vmull_s8(vb0123c1x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
110 int16x8_t vprod0x4567c1 = vmull_s8(vb4567c1x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
117 va0x0 = vext_s8(va0x0, va0x0, 2); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
121 int16x8_t vprod0x0123c2 = vmull_s8(vb0123c2x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
128 int16x8_t vprod0x4567c2 = vmull_s8(vb4567c2x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
135 va0x0 = vext_s8(va0x0, va0x0, 2); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
139 int16x8_t vprod0x0123c3 = vmull_s8(vb0123c3x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
146 int16x8_t vprod0x4567c3 = vmull_s8(vb4567c3x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
157 int8x8_t va0x0 = vld1_s8(a0); a0 += 8; in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal() local
169 int16x8_t vprod0x0123c0 = vmull_s8(vb0123c0x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
173 int16x8_t vprod0x4567c0 = vmull_s8(vb4567c0x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
177 va0x0 = vext_s8(va0x0, va0x0, 2); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
179 int16x8_t vprod0x0123c1 = vmull_s8(vb0123c1x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
183 int16x8_t vprod0x4567c1 = vmull_s8(vb4567c1x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
187 va0x0 = vext_s8(va0x0, va0x0, 2); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
189 int16x8_t vprod0x0123c2 = vmull_s8(vb0123c2x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
193 int16x8_t vprod0x4567c2 = vmull_s8(vb4567c2x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
197 va0x0 = vext_s8(va0x0, va0x0, 2); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
199 int16x8_t vprod0x0123c3 = vmull_s8(vb0123c3x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()
203 int16x8_t vprod0x4567c3 = vmull_s8(vb4567c3x0, va0x0); in xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2s4__neonv8_mlal()