Home
last modified time | relevance | path

Searched refs:vscaled_acc23 (Results 1 – 4 of 4) sorted by relevance

/external/XNNPACK/src/q8-avgpool/
Dup9-neon.c130 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon() local
135 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_up9__neon()
140 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_up9__neon()
203 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon() local
208 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_up9__neon()
213 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_up9__neon()
Dmp9p8q-neon.c210 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_mp9p8q__neon() local
215 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_mp9p8q__neon()
220 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_mp9p8q__neon()
284 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_mp9p8q__neon() local
289 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_mp9p8q__neon()
294 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_mp9p8q__neon()
/external/XNNPACK/src/q8-gavgpool/
Dup7-neon.c112 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon() local
117 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_up7__neon()
122 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_up7__neon()
182 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon() local
187 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_up7__neon()
192 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_up7__neon()
Dmp7p7q-neon.c188 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_mp7p7q__neon() local
193 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_mp7p7q__neon()
198 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_mp7p7q__neon()
260 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_mp7p7q__neon() local
265 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_mp7p7q__neon()
270 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_mp7p7q__neon()