/external/XNNPACK/src/qs8-gemm/gen/ |
D | 1x4c8-minmax-sse41-ld64.c | 92 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_minmax_ukernel_1x4c8__sse41_ld64() local 94 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_minmax_ukernel_1x4c8__sse41_ld64()
|
D | 1x4c8-minmax-xop-ld128.c | 95 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_minmax_ukernel_1x4c8__xop_ld128() local 97 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_minmax_ukernel_1x4c8__xop_ld128()
|
D | 1x4c8-xw-minmax-xop.c | 93 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c8__xop() local 95 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c8__xop()
|
D | 1x4c8-minmax-sse41-ld128.c | 90 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_minmax_ukernel_1x4c8__sse41_ld128() local 92 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_minmax_ukernel_1x4c8__sse41_ld128()
|
D | 1x4c8-xw-minmax-sse41.c | 88 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c8__sse41() local 90 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c8__sse41()
|
D | 1x4c8-minmax-xop-ld64.c | 97 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_minmax_ukernel_1x4c8__xop_ld64() local 99 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_minmax_ukernel_1x4c8__xop_ld64()
|
D | 1x4c2-xw-minmax-xop.c | 116 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c2__xop() local 118 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c2__xop()
|
D | 1x4c2-xw-minmax-sse41.c | 111 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c2__sse41() local 113 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c2__sse41()
|
D | 1x4c8-xw-minmax-ssse3.c | 101 const __m128i vq31prod0x13 = _mm_srli_epi64(_mm_add_epi64(vprod0x13, vrounding), 31); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c8__ssse3() local 104 _mm_castsi128_ps(vq31prod0x02), _mm_castsi128_ps(vq31prod0x13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c8__ssse3()
|
D | 1x4c2-minmax-xop-ld64.c | 123 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_minmax_ukernel_1x4c2__xop_ld64() local 125 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_minmax_ukernel_1x4c2__xop_ld64()
|
D | 1x4c2-minmax-xop-ld128.c | 123 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_minmax_ukernel_1x4c2__xop_ld128() local 125 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_minmax_ukernel_1x4c2__xop_ld128()
|
D | 1x4c8-minmax-ssse3-ld128.c | 103 const __m128i vq31prod0x13 = _mm_srli_epi64(_mm_add_epi64(vprod0x13, vrounding), 31); in xnn_qs8_gemm_minmax_ukernel_1x4c8__ssse3_ld128() local 106 _mm_castsi128_ps(vq31prod0x02), _mm_castsi128_ps(vq31prod0x13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_gemm_minmax_ukernel_1x4c8__ssse3_ld128()
|
D | 1x4c8-xw-minmax-sse2.c | 101 const __m128i vq31prod0x13 = _mm_srli_epi64(_mm_add_epi64(vprod0x13, vrounding), 31); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c8__sse2() local 104 _mm_castsi128_ps(vq31prod0x02), _mm_castsi128_ps(vq31prod0x13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c8__sse2()
|
D | 1x4c8-minmax-ssse3-ld64.c | 105 const __m128i vq31prod0x13 = _mm_srli_epi64(_mm_add_epi64(vprod0x13, vrounding), 31); in xnn_qs8_gemm_minmax_ukernel_1x4c8__ssse3_ld64() local 108 _mm_castsi128_ps(vq31prod0x02), _mm_castsi128_ps(vq31prod0x13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_gemm_minmax_ukernel_1x4c8__ssse3_ld64()
|
D | 1x4c2-minmax-sse41-ld128.c | 118 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_minmax_ukernel_1x4c2__sse41_ld128() local 120 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_minmax_ukernel_1x4c2__sse41_ld128()
|
D | 1x4c2-minmax-sse41-ld64.c | 118 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_gemm_minmax_ukernel_1x4c2__sse41_ld64() local 120 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_gemm_minmax_ukernel_1x4c2__sse41_ld64()
|
D | 1x4c8-minmax-sse2-ld128.c | 103 const __m128i vq31prod0x13 = _mm_srli_epi64(_mm_add_epi64(vprod0x13, vrounding), 31); in xnn_qs8_gemm_minmax_ukernel_1x4c8__sse2_ld128() local 106 _mm_castsi128_ps(vq31prod0x02), _mm_castsi128_ps(vq31prod0x13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_gemm_minmax_ukernel_1x4c8__sse2_ld128()
|
D | 1x4c8-minmax-sse2-ld64.c | 105 const __m128i vq31prod0x13 = _mm_srli_epi64(_mm_add_epi64(vprod0x13, vrounding), 31); in xnn_qs8_gemm_minmax_ukernel_1x4c8__sse2_ld64() local 108 _mm_castsi128_ps(vq31prod0x02), _mm_castsi128_ps(vq31prod0x13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_gemm_minmax_ukernel_1x4c8__sse2_ld64()
|
D | 1x4c2-xw-minmax-sse2.c | 124 const __m128i vq31prod0x13 = _mm_srli_epi64(_mm_add_epi64(vprod0x13, vrounding), 31); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c2__sse2() local 127 _mm_castsi128_ps(vq31prod0x02), _mm_castsi128_ps(vq31prod0x13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_gemm_xw_minmax_ukernel_1x4c2__sse2()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 1x4c8-minmax-xop-ld64.c | 110 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_igemm_minmax_ukernel_1x4c8__xop_ld64() local 112 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_igemm_minmax_ukernel_1x4c8__xop_ld64()
|
D | 1x4c8-minmax-sse41-ld64.c | 105 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_igemm_minmax_ukernel_1x4c8__sse41_ld64() local 107 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_igemm_minmax_ukernel_1x4c8__sse41_ld64()
|
D | 1x4c8-minmax-xop-ld128.c | 108 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_igemm_minmax_ukernel_1x4c8__xop_ld128() local 110 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_igemm_minmax_ukernel_1x4c8__xop_ld128()
|
D | 1x4c8-minmax-sse41-ld128.c | 103 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_igemm_minmax_ukernel_1x4c8__sse41_ld128() local 105 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_igemm_minmax_ukernel_1x4c8__sse41_ld128()
|
D | 1x4c8-minmax-ssse3-ld128.c | 116 const __m128i vq31prod0x13 = _mm_srli_epi64(_mm_add_epi64(vprod0x13, vrounding), 31); in xnn_qs8_igemm_minmax_ukernel_1x4c8__ssse3_ld128() local 119 _mm_castsi128_ps(vq31prod0x02), _mm_castsi128_ps(vq31prod0x13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_igemm_minmax_ukernel_1x4c8__ssse3_ld128()
|
D | 1x4c2-minmax-sse41-ld64.c | 131 const __m128i vq31prod0x13 = _mm_add_epi64(vprod0x13, vprod0x13); in xnn_qs8_igemm_minmax_ukernel_1x4c2__sse41_ld64() local 133 const __m128i vq31prod0x0123 = _mm_blend_epi16(vq31prod0x02, vq31prod0x13, 0xCC); in xnn_qs8_igemm_minmax_ukernel_1x4c2__sse41_ld64()
|