Home
last modified time | relevance | path

Searched refs:vnmask1x0123 (Results 1 – 25 of 33) sorted by relevance

12

/external/XNNPACK/src/qs8-gemm/gen/
D2x4c8-xw-minmax-sse2.c102 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_xw_minmax_ukernel_2x4c8__sse2() local
105 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_gemm_xw_minmax_ukernel_2x4c8__sse2()
114 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_xw_minmax_ukernel_2x4c8__sse2()
126 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_xw_minmax_ukernel_2x4c8__sse2()
D2x4c8-minmax-sse2-ld128.c104 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_minmax_ukernel_2x4c8__sse2_ld128() local
107 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_gemm_minmax_ukernel_2x4c8__sse2_ld128()
116 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_minmax_ukernel_2x4c8__sse2_ld128()
128 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_minmax_ukernel_2x4c8__sse2_ld128()
D2x4c8-minmax-sse2-ld64.c106 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_minmax_ukernel_2x4c8__sse2_ld64() local
109 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_gemm_minmax_ukernel_2x4c8__sse2_ld64()
118 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_minmax_ukernel_2x4c8__sse2_ld64()
130 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_minmax_ukernel_2x4c8__sse2_ld64()
D3x4c8-minmax-sse2-ld64.c126 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_minmax_ukernel_3x4c8__sse2_ld64() local
130 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_gemm_minmax_ukernel_3x4c8__sse2_ld64()
142 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_minmax_ukernel_3x4c8__sse2_ld64()
158 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_minmax_ukernel_3x4c8__sse2_ld64()
D3x4c8-minmax-sse2-ld128.c124 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_minmax_ukernel_3x4c8__sse2_ld128() local
128 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_gemm_minmax_ukernel_3x4c8__sse2_ld128()
140 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_minmax_ukernel_3x4c8__sse2_ld128()
156 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_minmax_ukernel_3x4c8__sse2_ld128()
D3x4c8-xw-minmax-sse2.c122 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_xw_minmax_ukernel_3x4c8__sse2() local
126 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_gemm_xw_minmax_ukernel_3x4c8__sse2()
138 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_xw_minmax_ukernel_3x4c8__sse2()
154 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_xw_minmax_ukernel_3x4c8__sse2()
D2x4c8-minmax-ssse3-ld128.c104 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_minmax_ukernel_2x4c8__ssse3_ld128() local
116 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_minmax_ukernel_2x4c8__ssse3_ld128()
128 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_minmax_ukernel_2x4c8__ssse3_ld128()
D2x4c8-minmax-ssse3-ld64.c106 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_minmax_ukernel_2x4c8__ssse3_ld64() local
118 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_minmax_ukernel_2x4c8__ssse3_ld64()
130 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_minmax_ukernel_2x4c8__ssse3_ld64()
D2x4c8-xw-minmax-ssse3.c102 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_xw_minmax_ukernel_2x4c8__ssse3() local
114 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_xw_minmax_ukernel_2x4c8__ssse3()
126 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_xw_minmax_ukernel_2x4c8__ssse3()
D4x4c2-xw-minmax-sse2.c186 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_xw_minmax_ukernel_4x4c2__sse2() local
191 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_gemm_xw_minmax_ukernel_4x4c2__sse2()
206 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_xw_minmax_ukernel_4x4c2__sse2()
226 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_xw_minmax_ukernel_4x4c2__sse2()
D4x4c2-minmax-sse2-ld128.c193 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld128() local
198 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld128()
213 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld128()
233 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld128()
D4x4c2-minmax-sse2-ld64.c193 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld64() local
198 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld64()
213 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld64()
233 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld64()
D3x4c8-xw-minmax-ssse3.c122 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_xw_minmax_ukernel_3x4c8__ssse3() local
138 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_xw_minmax_ukernel_3x4c8__ssse3()
154 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_xw_minmax_ukernel_3x4c8__ssse3()
D3x4c8-minmax-ssse3-ld128.c124 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_gemm_minmax_ukernel_3x4c8__ssse3_ld128() local
140 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_gemm_minmax_ukernel_3x4c8__ssse3_ld128()
156 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_gemm_minmax_ukernel_3x4c8__ssse3_ld128()
/external/XNNPACK/src/qu8-gemm/
D2x4c8-minmax-sse2.c115 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qu8_gemm_minmax_ukernel_2x4c8__sse2() local
118 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qu8_gemm_minmax_ukernel_2x4c8__sse2()
127 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qu8_gemm_minmax_ukernel_2x4c8__sse2()
139 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qu8_gemm_minmax_ukernel_2x4c8__sse2()
D4x4c2-minmax-sse2.c198 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qu8_gemm_minmax_ukernel_4x4c2__sse2() local
203 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qu8_gemm_minmax_ukernel_4x4c2__sse2()
218 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qu8_gemm_minmax_ukernel_4x4c2__sse2()
238 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qu8_gemm_minmax_ukernel_4x4c2__sse2()
/external/XNNPACK/src/qs8-igemm/gen/
D2x4c8-minmax-sse2-ld128.c119 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_igemm_minmax_ukernel_2x4c8__sse2_ld128() local
122 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_igemm_minmax_ukernel_2x4c8__sse2_ld128()
131 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_igemm_minmax_ukernel_2x4c8__sse2_ld128()
143 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_igemm_minmax_ukernel_2x4c8__sse2_ld128()
D2x4c8-minmax-sse2-ld64.c121 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_igemm_minmax_ukernel_2x4c8__sse2_ld64() local
124 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_igemm_minmax_ukernel_2x4c8__sse2_ld64()
133 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_igemm_minmax_ukernel_2x4c8__sse2_ld64()
145 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_igemm_minmax_ukernel_2x4c8__sse2_ld64()
D3x4c8-minmax-sse2-ld128.c141 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_igemm_minmax_ukernel_3x4c8__sse2_ld128() local
145 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_igemm_minmax_ukernel_3x4c8__sse2_ld128()
157 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__sse2_ld128()
173 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__sse2_ld128()
D3x4c8-minmax-sse2-ld64.c143 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_igemm_minmax_ukernel_3x4c8__sse2_ld64() local
147 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_igemm_minmax_ukernel_3x4c8__sse2_ld64()
159 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__sse2_ld64()
175 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_igemm_minmax_ukernel_3x4c8__sse2_ld64()
D2x4c8-minmax-ssse3-ld128.c119 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_igemm_minmax_ukernel_2x4c8__ssse3_ld128() local
131 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_igemm_minmax_ukernel_2x4c8__ssse3_ld128()
143 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_igemm_minmax_ukernel_2x4c8__ssse3_ld128()
D4x4c2-minmax-sse2-ld128.c212 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld128() local
217 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld128()
232 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld128()
252 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld128()
D2x4c8-minmax-ssse3-ld64.c121 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_igemm_minmax_ukernel_2x4c8__ssse3_ld64() local
133 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_igemm_minmax_ukernel_2x4c8__ssse3_ld64()
145 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_igemm_minmax_ukernel_2x4c8__ssse3_ld64()
D4x4c2-minmax-sse2-ld64.c212 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld64() local
217 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld64()
232 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld64()
252 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld64()
/external/XNNPACK/src/qu8-igemm/
D4x4c2-minmax-sse2.c182 const __m128i vnmask1x0123 = _mm_cmpgt_epi32(_mm_setzero_si128(), vacc1x0123); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2() local
187 …const __m128i vabsacc1x0123 = _mm_sub_epi32(_mm_xor_si128(vacc1x0123, vnmask1x0123), vnmask1x0123); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
202 const __m128i vnmask1x02 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(2, 2, 0, 0)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
222 const __m128i vnmask1x13 = _mm_shuffle_epi32(vnmask1x0123, _MM_SHUFFLE(3, 3, 1, 1)); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()

12