• Home
  • Raw
  • Download

Lines Matching refs:int64x2_t

62   const int64x2_t vleft_shift = vld1q_dup_s64(&params->neon.left_shift);  in xnn_q8_gavgpool_ukernel_up7__neon()
90 const int64x2_t vproduct01 = vmull_s32(vget_low_s32(vacc_lo), vget_low_s32(vmultiplier)); in xnn_q8_gavgpool_ukernel_up7__neon()
91 const int64x2_t vproduct23 = vmull_high_s32(vacc_lo, vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
92 const int64x2_t vproduct45 = vmull_s32(vget_low_s32(vacc_hi), vget_low_s32(vmultiplier)); in xnn_q8_gavgpool_ukernel_up7__neon()
93 const int64x2_t vproduct67 = vmull_high_s32(vacc_hi, vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
95 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_q8_gavgpool_ukernel_up7__neon()
96 const int64x2_t vadjusted_product23 = vaddw_high_s32(vproduct23, vneg_mask_lo); in xnn_q8_gavgpool_ukernel_up7__neon()
97 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_q8_gavgpool_ukernel_up7__neon()
98 const int64x2_t vadjusted_product67 = vaddw_high_s32(vproduct67, vneg_mask_hi); in xnn_q8_gavgpool_ukernel_up7__neon()
100 const int64x2_t vproduct01 = vmull_s32(vget_low_s32(vacc_lo), vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
101 const int64x2_t vproduct23 = vmull_s32(vget_high_s32(vacc_lo), vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
102 const int64x2_t vproduct45 = vmull_s32(vget_low_s32(vacc_hi), vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
103 const int64x2_t vproduct67 = vmull_s32(vget_high_s32(vacc_hi), vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
105 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_q8_gavgpool_ukernel_up7__neon()
106 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_q8_gavgpool_ukernel_up7__neon()
107 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_q8_gavgpool_ukernel_up7__neon()
108 const int64x2_t vadjusted_product67 = vaddw_s32(vproduct67, vget_high_s32(vneg_mask_hi)); in xnn_q8_gavgpool_ukernel_up7__neon()
111 const int64x2_t vscaled_acc01 = vrshlq_s64(vadjusted_product01, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon()
112 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon()
113 const int64x2_t vscaled_acc45 = vrshlq_s64(vadjusted_product45, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon()
114 const int64x2_t vscaled_acc67 = vrshlq_s64(vadjusted_product67, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon()
160 const int64x2_t vproduct01 = vmull_s32(vget_low_s32(vacc_lo), vget_low_s32(vmultiplier)); in xnn_q8_gavgpool_ukernel_up7__neon()
161 const int64x2_t vproduct23 = vmull_high_s32(vacc_lo, vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
162 const int64x2_t vproduct45 = vmull_s32(vget_low_s32(vacc_hi), vget_low_s32(vmultiplier)); in xnn_q8_gavgpool_ukernel_up7__neon()
163 const int64x2_t vproduct67 = vmull_high_s32(vacc_hi, vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
165 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_q8_gavgpool_ukernel_up7__neon()
166 const int64x2_t vadjusted_product23 = vaddw_high_s32(vproduct23, vneg_mask_lo); in xnn_q8_gavgpool_ukernel_up7__neon()
167 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_q8_gavgpool_ukernel_up7__neon()
168 const int64x2_t vadjusted_product67 = vaddw_high_s32(vproduct67, vneg_mask_hi); in xnn_q8_gavgpool_ukernel_up7__neon()
170 const int64x2_t vproduct01 = vmull_s32(vget_low_s32(vacc_lo), vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
171 const int64x2_t vproduct23 = vmull_s32(vget_high_s32(vacc_lo), vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
172 const int64x2_t vproduct45 = vmull_s32(vget_low_s32(vacc_hi), vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
173 const int64x2_t vproduct67 = vmull_s32(vget_high_s32(vacc_hi), vmultiplier); in xnn_q8_gavgpool_ukernel_up7__neon()
175 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_q8_gavgpool_ukernel_up7__neon()
176 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_q8_gavgpool_ukernel_up7__neon()
177 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_q8_gavgpool_ukernel_up7__neon()
178 const int64x2_t vadjusted_product67 = vaddw_s32(vproduct67, vget_high_s32(vneg_mask_hi)); in xnn_q8_gavgpool_ukernel_up7__neon()
181 const int64x2_t vscaled_acc01 = vrshlq_s64(vadjusted_product01, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon()
182 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon()
183 const int64x2_t vscaled_acc45 = vrshlq_s64(vadjusted_product45, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon()
184 const int64x2_t vscaled_acc67 = vrshlq_s64(vadjusted_product67, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon()