Lines Matching refs:int64x2_t
39 const int64x2_t vleft_shift = vld1q_dup_s64(¶ms->neon.left_shift); in xnn_q8_avgpool_ukernel_up9__neon()
108 const int64x2_t vproduct01 = vmull_s32(vget_low_s32(vacc_lo), vget_low_s32(vmultiplier)); in xnn_q8_avgpool_ukernel_up9__neon()
109 const int64x2_t vproduct23 = vmull_high_s32(vacc_lo, vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
110 const int64x2_t vproduct45 = vmull_s32(vget_low_s32(vacc_hi), vget_low_s32(vmultiplier)); in xnn_q8_avgpool_ukernel_up9__neon()
111 const int64x2_t vproduct67 = vmull_high_s32(vacc_hi, vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
113 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_q8_avgpool_ukernel_up9__neon()
114 const int64x2_t vadjusted_product23 = vaddw_high_s32(vproduct23, vneg_mask_lo); in xnn_q8_avgpool_ukernel_up9__neon()
115 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_q8_avgpool_ukernel_up9__neon()
116 const int64x2_t vadjusted_product67 = vaddw_high_s32(vproduct67, vneg_mask_hi); in xnn_q8_avgpool_ukernel_up9__neon()
118 const int64x2_t vproduct01 = vmull_s32(vget_low_s32(vacc_lo), vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
119 const int64x2_t vproduct23 = vmull_s32(vget_high_s32(vacc_lo), vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
120 const int64x2_t vproduct45 = vmull_s32(vget_low_s32(vacc_hi), vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
121 const int64x2_t vproduct67 = vmull_s32(vget_high_s32(vacc_hi), vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
123 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_q8_avgpool_ukernel_up9__neon()
124 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_q8_avgpool_ukernel_up9__neon()
125 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_q8_avgpool_ukernel_up9__neon()
126 const int64x2_t vadjusted_product67 = vaddw_s32(vproduct67, vget_high_s32(vneg_mask_hi)); in xnn_q8_avgpool_ukernel_up9__neon()
129 const int64x2_t vscaled_acc01 = vrshlq_s64(vadjusted_product01, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon()
130 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon()
131 const int64x2_t vscaled_acc45 = vrshlq_s64(vadjusted_product45, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon()
132 const int64x2_t vscaled_acc67 = vrshlq_s64(vadjusted_product67, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon()
181 const int64x2_t vproduct01 = vmull_s32(vget_low_s32(vacc_lo), vget_low_s32(vmultiplier)); in xnn_q8_avgpool_ukernel_up9__neon()
182 const int64x2_t vproduct23 = vmull_high_s32(vacc_lo, vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
183 const int64x2_t vproduct45 = vmull_s32(vget_low_s32(vacc_hi), vget_low_s32(vmultiplier)); in xnn_q8_avgpool_ukernel_up9__neon()
184 const int64x2_t vproduct67 = vmull_high_s32(vacc_hi, vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
186 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_q8_avgpool_ukernel_up9__neon()
187 const int64x2_t vadjusted_product23 = vaddw_high_s32(vproduct23, vneg_mask_lo); in xnn_q8_avgpool_ukernel_up9__neon()
188 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_q8_avgpool_ukernel_up9__neon()
189 const int64x2_t vadjusted_product67 = vaddw_high_s32(vproduct67, vneg_mask_hi); in xnn_q8_avgpool_ukernel_up9__neon()
191 const int64x2_t vproduct01 = vmull_s32(vget_low_s32(vacc_lo), vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
192 const int64x2_t vproduct23 = vmull_s32(vget_high_s32(vacc_lo), vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
193 const int64x2_t vproduct45 = vmull_s32(vget_low_s32(vacc_hi), vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
194 const int64x2_t vproduct67 = vmull_s32(vget_high_s32(vacc_hi), vmultiplier); in xnn_q8_avgpool_ukernel_up9__neon()
196 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_q8_avgpool_ukernel_up9__neon()
197 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_q8_avgpool_ukernel_up9__neon()
198 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_q8_avgpool_ukernel_up9__neon()
199 const int64x2_t vadjusted_product67 = vaddw_s32(vproduct67, vget_high_s32(vneg_mask_hi)); in xnn_q8_avgpool_ukernel_up9__neon()
202 const int64x2_t vscaled_acc01 = vrshlq_s64(vadjusted_product01, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon()
203 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon()
204 const int64x2_t vscaled_acc45 = vrshlq_s64(vadjusted_product45, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon()
205 const int64x2_t vscaled_acc67 = vrshlq_s64(vadjusted_product67, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon()