• Home
  • Raw
  • Download

Lines Matching refs:_mm_shuffle_epi32

104 …vacc0x0123 = _mm_add_epi32(vacc0x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa0, _MM_SHUFFLE(0, 0, 0,…  in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
105 …vacc1x0123 = _mm_add_epi32(vacc1x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa1, _MM_SHUFFLE(0, 0, 0,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
106 …vacc2x0123 = _mm_add_epi32(vacc2x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa2, _MM_SHUFFLE(0, 0, 0,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
107 …vacc3x0123 = _mm_add_epi32(vacc3x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa3, _MM_SHUFFLE(0, 0, 0,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
111 …vacc0x0123 = _mm_add_epi32(vacc0x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa0, _MM_SHUFFLE(1, 1, 1,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
112 …vacc1x0123 = _mm_add_epi32(vacc1x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa1, _MM_SHUFFLE(1, 1, 1,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
113 …vacc2x0123 = _mm_add_epi32(vacc2x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa2, _MM_SHUFFLE(1, 1, 1,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
114 …vacc3x0123 = _mm_add_epi32(vacc3x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa3, _MM_SHUFFLE(1, 1, 1,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
118 …vacc0x0123 = _mm_add_epi32(vacc0x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa0, _MM_SHUFFLE(2, 2, 2,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
119 …vacc1x0123 = _mm_add_epi32(vacc1x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa1, _MM_SHUFFLE(2, 2, 2,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
120 …vacc2x0123 = _mm_add_epi32(vacc2x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa2, _MM_SHUFFLE(2, 2, 2,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
121 …vacc3x0123 = _mm_add_epi32(vacc3x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa3, _MM_SHUFFLE(2, 2, 2,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
125 …vacc0x0123 = _mm_add_epi32(vacc0x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa0, _MM_SHUFFLE(3, 3, 3,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
126 …vacc1x0123 = _mm_add_epi32(vacc1x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa1, _MM_SHUFFLE(3, 3, 3,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
127 …vacc2x0123 = _mm_add_epi32(vacc2x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa2, _MM_SHUFFLE(3, 3, 3,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
128 …vacc3x0123 = _mm_add_epi32(vacc3x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa3, _MM_SHUFFLE(3, 3, 3,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
148 …vacc0x0123 = _mm_add_epi32(vacc0x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa0, _MM_SHUFFLE(0, 0, 0,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
149 …vacc1x0123 = _mm_add_epi32(vacc1x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa1, _MM_SHUFFLE(0, 0, 0,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
150 …vacc2x0123 = _mm_add_epi32(vacc2x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa2, _MM_SHUFFLE(0, 0, 0,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
151 …vacc3x0123 = _mm_add_epi32(vacc3x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa3, _MM_SHUFFLE(0, 0, 0,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
158 …vacc0x0123 = _mm_add_epi32(vacc0x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa0, _MM_SHUFFLE(1, 1, 1,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
159 …vacc1x0123 = _mm_add_epi32(vacc1x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa1, _MM_SHUFFLE(1, 1, 1,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
160 …vacc2x0123 = _mm_add_epi32(vacc2x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa2, _MM_SHUFFLE(1, 1, 1,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
161 …vacc3x0123 = _mm_add_epi32(vacc3x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa3, _MM_SHUFFLE(1, 1, 1,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
168 …vacc0x0123 = _mm_add_epi32(vacc0x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa0, _MM_SHUFFLE(2, 2, 2,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
169 …vacc1x0123 = _mm_add_epi32(vacc1x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa1, _MM_SHUFFLE(2, 2, 2,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
170 …vacc2x0123 = _mm_add_epi32(vacc2x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa2, _MM_SHUFFLE(2, 2, 2,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
171 …vacc3x0123 = _mm_add_epi32(vacc3x0123, _mm_madd_epi16(_mm_shuffle_epi32(vxa3, _MM_SHUFFLE(2, 2, 2,… in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
191 const __m128i vabsacc0x1032 = _mm_shuffle_epi32(vabsacc0x0123, _MM_SHUFFLE(2, 3, 0, 1)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
192 const __m128i vabsacc1x1032 = _mm_shuffle_epi32(vabsacc1x0123, _MM_SHUFFLE(2, 3, 0, 1)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
193 const __m128i vabsacc2x1032 = _mm_shuffle_epi32(vabsacc2x0123, _MM_SHUFFLE(2, 3, 0, 1)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
194 const __m128i vabsacc3x1032 = _mm_shuffle_epi32(vabsacc3x0123, _MM_SHUFFLE(2, 3, 0, 1)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
201 const __m128i vnmask0x02 = _mm_shuffle_epi32(vnmask0x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
202 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
203 const __m128i vnmask2x02 = _mm_shuffle_epi32(vnmask2x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
204 const __m128i vnmask3x02 = _mm_shuffle_epi32(vnmask3x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
221 const __m128i vnmask0x13 = _mm_shuffle_epi32(vnmask0x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
222 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
223 const __m128i vnmask2x13 = _mm_shuffle_epi32(vnmask2x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
224 const __m128i vnmask3x13 = _mm_shuffle_epi32(vnmask3x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
245 const __m128i vq31prod0x0123 = _mm_shuffle_epi32(vq31prod0x0213, _MM_SHUFFLE(3, 1, 2, 0)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
246 const __m128i vq31prod1x0123 = _mm_shuffle_epi32(vq31prod1x0213, _MM_SHUFFLE(3, 1, 2, 0)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
247 const __m128i vq31prod2x0123 = _mm_shuffle_epi32(vq31prod2x0213, _MM_SHUFFLE(3, 1, 2, 0)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
248 const __m128i vq31prod3x0123 = _mm_shuffle_epi32(vq31prod3x0213, _MM_SHUFFLE(3, 1, 2, 0)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()