Searched refs:vacc0x67 (Results 1 – 9 of 9) sorted by relevance
/external/XNNPACK/src/qs8-gemm/gen/ |
D | 2x8c8-xw-minmax-avx2.c | 62 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2() local 66 __m256i vacc1x67 = vacc0x67; in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2() 92 vacc0x67 = _mm256_add_epi32(vacc0x67, _mm256_madd_epi16(vxa0, vxb67)); in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2() 100 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2()
|
D | 2x8c8-minmax-avx2.c | 62 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2() local 66 __m256i vacc1x67 = vacc0x67; in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2() 96 vacc0x67 = _mm256_add_epi32(vacc0x67, _mm256_madd_epi16(vxa0, vxb67)); in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2() 104 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2()
|
D | 1x8c8-xw-minmax-avx2.c | 56 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_xw_minmax_ukernel_1x8c8__avx2() local 76 vacc0x67 = _mm256_add_epi32(vacc0x67, _mm256_madd_epi16(vxa0, vxb67)); in xnn_qs8_gemm_xw_minmax_ukernel_1x8c8__avx2() 83 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_xw_minmax_ukernel_1x8c8__avx2()
|
D | 1x8c8-minmax-avx2.c | 56 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_minmax_ukernel_1x8c8__avx2() local 80 vacc0x67 = _mm256_add_epi32(vacc0x67, _mm256_madd_epi16(vxa0, vxb67)); in xnn_qs8_gemm_minmax_ukernel_1x8c8__avx2() 87 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_minmax_ukernel_1x8c8__avx2()
|
D | 3x8c8-minmax-avx2.c | 68 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2() local 72 __m256i vacc1x67 = vacc0x67; in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2() 76 __m256i vacc2x67 = vacc0x67; in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2() 112 vacc0x67 = _mm256_add_epi32(vacc0x67, _mm256_madd_epi16(vxa0, vxb67)); in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2() 121 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2()
|
D | 3x8c8-xw-minmax-avx2.c | 68 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2() local 72 __m256i vacc1x67 = vacc0x67; in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2() 76 __m256i vacc2x67 = vacc0x67; in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2() 108 vacc0x67 = _mm256_add_epi32(vacc0x67, _mm256_madd_epi16(vxa0, vxb67)); in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2() 117 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 3x8c8-minmax-avx2.c | 67 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2() local 71 __m256i vacc1x67 = vacc0x67; in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2() 75 __m256i vacc2x67 = vacc0x67; in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2() 127 vacc0x67 = _mm256_add_epi32(vacc0x67, _mm256_madd_epi16(vxa0, vxb67)); in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2() 138 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2()
|
D | 2x8c8-minmax-avx2.c | 63 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2() local 67 __m256i vacc1x67 = vacc0x67; in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2() 109 vacc0x67 = _mm256_add_epi32(vacc0x67, _mm256_madd_epi16(vxa0, vxb67)); in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2() 119 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2()
|
D | 1x8c8-minmax-avx2.c | 59 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_igemm_minmax_ukernel_1x8c8__avx2() local 91 vacc0x67 = _mm256_add_epi32(vacc0x67, _mm256_madd_epi16(vxa0, vxb67)); in xnn_qs8_igemm_minmax_ukernel_1x8c8__avx2() 100 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_igemm_minmax_ukernel_1x8c8__avx2()
|