Searched refs:vxi0 (Results 1 – 6 of 6) sorted by relevance
/external/XNNPACK/src/q8-gavgpool/ |
D | mp7p7q-sse2.c | 52 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() local 60 const __m128i vsum01 = _mm_add_epi16(vxi0, vxi1); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() 96 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() local 104 const __m128i vsum01 = _mm_add_epi16(vxi0, vxi1); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() 164 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() local 172 const __m128i vsum01 = _mm_add_epi16(vxi0, vxi1); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() 235 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2() local 243 const __m128i vsum01 = _mm_add_epi16(vxi0, vxi1); in xnn_q8_gavgpool_ukernel_mp7p7q__sse2()
|
D | up7-sse2.c | 70 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_gavgpool_ukernel_up7__sse2() local 78 const __m128i vsum01 = _mm_add_epi16(vxi0, vxi1); in xnn_q8_gavgpool_ukernel_up7__sse2() 139 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_gavgpool_ukernel_up7__sse2() local 147 const __m128i vsum01 = _mm_add_epi16(vxi0, vxi1); in xnn_q8_gavgpool_ukernel_up7__sse2()
|
/external/XNNPACK/src/q8-avgpool/ |
D | mp9p8q-sse2.c | 62 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() local 72 const __m128i vsum018 = _mm_add_epi16(_mm_add_epi16(vxi0, vxi1), vxi8); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() 114 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() local 123 const __m128i vsum01 = _mm_add_epi16(vxi0, vxi1); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() 188 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() local 197 const __m128i vsum01 = _mm_add_epi16(vxi0, vxi1); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() 263 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_avgpool_ukernel_mp9p8q__sse2() local 272 const __m128i vsum01 = _mm_add_epi16(vxi0, vxi1); in xnn_q8_avgpool_ukernel_mp9p8q__sse2()
|
D | up9-sse2.c | 86 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_avgpool_ukernel_up9__sse2() local 96 const __m128i vsum018 = _mm_add_epi16(_mm_add_epi16(vxi0, vxi1), vxi8); in xnn_q8_avgpool_ukernel_up9__sse2() 161 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_avgpool_ukernel_up9__sse2() local 171 const __m128i vsum018 = _mm_add_epi16(_mm_add_epi16(vxi0, vxi1), vxi8); in xnn_q8_avgpool_ukernel_up9__sse2()
|
/external/XNNPACK/src/q8-dwconv/ |
D | up8x9-sse2.c | 47 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_dwconv_ukernel_up8x9__sse2() local 50 const __m128i vprod0_odd = _mm_mullo_epi16(vxi0, vxk0); in xnn_q8_dwconv_ukernel_up8x9__sse2() 51 const __m128i vprod0_even = _mm_mulhi_epi16(vxi0, vxk0); in xnn_q8_dwconv_ukernel_up8x9__sse2() 199 const __m128i vxi0 = _mm_unpacklo_epi8(vi0, vzero); in xnn_q8_dwconv_ukernel_up8x9__sse2() local 202 const __m128i vprod0_odd = _mm_mullo_epi16(vxi0, vxk0); in xnn_q8_dwconv_ukernel_up8x9__sse2() 203 const __m128i vprod0_even = _mm_mulhi_epi16(vxi0, vxk0); in xnn_q8_dwconv_ukernel_up8x9__sse2()
|
D | up8x9-neon.c | 425 const int16x8_t vxi0 = vreinterpretq_s16_u16(vmovl_u8(vi0)); in xnn_q8_dwconv_ukernel_up8x9__neon() local 426 int32x4_t vaccX0_lo = vmull_s16(vget_low_s16(vxk0), vget_low_s16(vxi0)); in xnn_q8_dwconv_ukernel_up8x9__neon() 427 int32x4_t vaccX0_hi = vmull_s16(vget_high_s16(vxk0), vget_high_s16(vxi0)); in xnn_q8_dwconv_ukernel_up8x9__neon() 516 const int16x8_t vxi0 = vreinterpretq_s16_u16(vmovl_u8(vi0)); in xnn_q8_dwconv_ukernel_up8x9__neon() local 517 int32x4_t vaccX0_lo = vmull_s16(vget_low_s16(vxk0), vget_low_s16(vxi0)); in xnn_q8_dwconv_ukernel_up8x9__neon() 518 int32x4_t vaccX0_hi = vmull_s16(vget_high_s16(vxk0), vget_high_s16(vxi0)); in xnn_q8_dwconv_ukernel_up8x9__neon()
|