Searched refs:vscaled_acc23 (Results 1 – 4 of 4) sorted by relevance
/external/XNNPACK/src/q8-avgpool/ |
D | up9-neon.c | 130 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon() local 135 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_up9__neon() 140 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_up9__neon() 203 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_up9__neon() local 208 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_up9__neon() 213 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_up9__neon()
|
D | mp9p8q-neon.c | 210 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_mp9p8q__neon() local 215 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_mp9p8q__neon() 220 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_mp9p8q__neon() 284 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_avgpool_ukernel_mp9p8q__neon() local 289 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_mp9p8q__neon() 294 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_avgpool_ukernel_mp9p8q__neon()
|
/external/XNNPACK/src/q8-gavgpool/ |
D | up7-neon.c | 112 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon() local 117 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_up7__neon() 122 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_up7__neon() 182 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_up7__neon() local 187 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_up7__neon() 192 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_up7__neon()
|
D | mp7p7q-neon.c | 188 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_mp7p7q__neon() local 193 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_mp7p7q__neon() 198 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_mp7p7q__neon() 260 const int64x2_t vscaled_acc23 = vrshlq_s64(vadjusted_product23, vleft_shift); in xnn_q8_gavgpool_ukernel_mp7p7q__neon() local 265 … vacc_lo = vuzp1q_s32(vreinterpretq_s32_s64(vscaled_acc01), vreinterpretq_s32_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_mp7p7q__neon() 270 vacc_lo = vcombine_s32(vmovn_s64(vscaled_acc01), vmovn_s64(vscaled_acc23)); in xnn_q8_gavgpool_ukernel_mp7p7q__neon()
|