Searched refs:vxi6 (Results 1 – 6 of 6) sorted by relevance
/external/XNNPACK/src/q8-gavgpool/ |
D | mp7p7q-sse2.c | 58 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() local 64 const __m128i vsum016 = _mm_add_epi16(vsum01, vxi6); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() 102 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() local 108 const __m128i vsum016 = _mm_add_epi16(vsum01, vxi6); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() 170 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() local 176 const __m128i vsum016 = _mm_add_epi16(vsum01, vxi6); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() 241 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() local 247 const __m128i vsum016 = _mm_add_epi16(vsum01, vxi6); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2()
|
D | up7-sse2.c | 76 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_gavgpool_ukernel_up7__sse2() local 82 const __m128i vsum016 = _mm_add_epi16(vsum01, vxi6); in xnn_q8_gavgpool_ukernel_up7__sse2() 145 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_gavgpool_ukernel_up7__sse2() local 151 const __m128i vsum016 = _mm_add_epi16(vsum01, vxi6); in xnn_q8_gavgpool_ukernel_up7__sse2()
|
/external/XNNPACK/src/q8-avgpool/ |
D | mp9p8q-sse2.c | 68 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() local 75 const __m128i vsum67 = _mm_add_epi16(vxi6, vxi7); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() 120 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() local 126 const __m128i vsum67 = _mm_add_epi16(vxi6, vxi7); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() 194 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() local 200 const __m128i vsum67 = _mm_add_epi16(vxi6, vxi7); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() 269 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() local 275 const __m128i vsum67 = _mm_add_epi16(vxi6, vxi7); in xnn_q8_avgpool_ukernel_mp9p8q__sse2()
|
D | up9-sse2.c | 92 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_avgpool_ukernel_up9__sse2() local 99 const __m128i vsum67 = _mm_add_epi16(vxi6, vxi7); in xnn_q8_avgpool_ukernel_up9__sse2() 167 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_avgpool_ukernel_up9__sse2() local 174 const __m128i vsum67 = _mm_add_epi16(vxi6, vxi7); in xnn_q8_avgpool_ukernel_up9__sse2()
|
/external/XNNPACK/src/q8-dwconv/ |
D | up8x9-sse2.c | 101 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_dwconv_ukernel_up8x9__sse2() local 104 const __m128i vprod6_odd = _mm_mullo_epi16(vxi6, vxk6); in xnn_q8_dwconv_ukernel_up8x9__sse2() 105 const __m128i vprod6_even = _mm_mulhi_epi16(vxi6, vxk6); in xnn_q8_dwconv_ukernel_up8x9__sse2() 253 const __m128i vxi6 = _mm_unpacklo_epi8(vi6, vzero); in xnn_q8_dwconv_ukernel_up8x9__sse2() local 256 const __m128i vprod6_odd = _mm_mullo_epi16(vxi6, vxk6); in xnn_q8_dwconv_ukernel_up8x9__sse2() 257 const __m128i vprod6_even = _mm_mulhi_epi16(vxi6, vxk6); in xnn_q8_dwconv_ukernel_up8x9__sse2()
|
D | up8x9-neon.c | 467 const int16x8_t vxi6 = vreinterpretq_s16_u16(vmovl_u8(vi6)); in xnn_q8_dwconv_ukernel_up8x9__neon() local 468 vaccX0_lo = vmlal_s16(vaccX0_lo, vget_low_s16(vxk6), vget_low_s16(vxi6)); in xnn_q8_dwconv_ukernel_up8x9__neon() 469 vaccX0_hi = vmlal_s16(vaccX0_hi, vget_high_s16(vxk6), vget_high_s16(vxi6)); in xnn_q8_dwconv_ukernel_up8x9__neon() 558 const int16x8_t vxi6 = vreinterpretq_s16_u16(vmovl_u8(vi6)); in xnn_q8_dwconv_ukernel_up8x9__neon() local 559 vaccX0_lo = vmlal_s16(vaccX0_lo, vget_low_s16(vxk6), vget_low_s16(vxi6)); in xnn_q8_dwconv_ukernel_up8x9__neon() 560 vaccX0_hi = vmlal_s16(vaccX0_hi, vget_high_s16(vxk6), vget_high_s16(vxi6)); in xnn_q8_dwconv_ukernel_up8x9__neon()
|