Home
last modified time | relevance | path

Searched refs:vacc0x45 (Results 1 – 9 of 9) sorted by relevance

/external/XNNPACK/src/qs8-gemm/gen/
D2x8c8-xw-minmax-avx2.c59 __m256i vacc0x45 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x4), vbias0x5, 1); in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2() local
65 __m256i vacc1x45 = vacc0x45; in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2()
88 vacc0x45 = _mm256_add_epi32(vacc0x45, _mm256_madd_epi16(vxa0, vxb45)); in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2()
100 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2()
D2x8c8-minmax-avx2.c59 __m256i vacc0x45 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x4), vbias0x5, 1); in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2() local
65 __m256i vacc1x45 = vacc0x45; in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2()
91 vacc0x45 = _mm256_add_epi32(vacc0x45, _mm256_madd_epi16(vxa0, vxb45)); in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2()
104 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2()
D1x8c8-xw-minmax-avx2.c53 __m256i vacc0x45 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x4), vbias0x5, 1); in xnn_qs8_gemm_xw_minmax_ukernel_1x8c8__avx2() local
73 vacc0x45 = _mm256_add_epi32(vacc0x45, _mm256_madd_epi16(vxa0, vxb45)); in xnn_qs8_gemm_xw_minmax_ukernel_1x8c8__avx2()
83 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_xw_minmax_ukernel_1x8c8__avx2()
D1x8c8-minmax-avx2.c53 __m256i vacc0x45 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x4), vbias0x5, 1); in xnn_qs8_gemm_minmax_ukernel_1x8c8__avx2() local
76 vacc0x45 = _mm256_add_epi32(vacc0x45, _mm256_madd_epi16(vxa0, vxb45)); in xnn_qs8_gemm_minmax_ukernel_1x8c8__avx2()
87 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_minmax_ukernel_1x8c8__avx2()
D3x8c8-minmax-avx2.c65 __m256i vacc0x45 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x4), vbias0x5, 1); in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2() local
71 __m256i vacc1x45 = vacc0x45; in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2()
75 __m256i vacc2x45 = vacc0x45; in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2()
106 vacc0x45 = _mm256_add_epi32(vacc0x45, _mm256_madd_epi16(vxa0, vxb45)); in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2()
121 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2()
D3x8c8-xw-minmax-avx2.c65 __m256i vacc0x45 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x4), vbias0x5, 1); in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2() local
71 __m256i vacc1x45 = vacc0x45; in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2()
75 __m256i vacc2x45 = vacc0x45; in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2()
103 vacc0x45 = _mm256_add_epi32(vacc0x45, _mm256_madd_epi16(vxa0, vxb45)); in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2()
117 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2()
/external/XNNPACK/src/qs8-igemm/gen/
D3x8c8-minmax-avx2.c64 __m256i vacc0x45 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x4), vbias0x5, 1); in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2() local
70 __m256i vacc1x45 = vacc0x45; in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2()
74 __m256i vacc2x45 = vacc0x45; in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2()
121 vacc0x45 = _mm256_add_epi32(vacc0x45, _mm256_madd_epi16(vxa0, vxb45)); in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2()
138 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2()
D2x8c8-minmax-avx2.c60 __m256i vacc0x45 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x4), vbias0x5, 1); in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2() local
66 __m256i vacc1x45 = vacc0x45; in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2()
104 vacc0x45 = _mm256_add_epi32(vacc0x45, _mm256_madd_epi16(vxa0, vxb45)); in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2()
119 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2()
D1x8c8-minmax-avx2.c56 __m256i vacc0x45 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x4), vbias0x5, 1); in xnn_qs8_igemm_minmax_ukernel_1x8c8__avx2() local
87 vacc0x45 = _mm256_add_epi32(vacc0x45, _mm256_madd_epi16(vxa0, vxb45)); in xnn_qs8_igemm_minmax_ukernel_1x8c8__avx2()
100 const __m256i vacc0x4657 = _mm256_hadd_epi32(vacc0x45, vacc0x67); in xnn_qs8_igemm_minmax_ukernel_1x8c8__avx2()