Lines Matching refs:vc6
36 const __m256 vc6 = _mm256_set1_ps(0x1.6b7338p-10f); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80() local
109 __m256 vp0 = _mm256_fmadd_ps(vc6, vt0, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
110 __m256 vp1 = _mm256_fmadd_ps(vc6, vt1, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
111 __m256 vp2 = _mm256_fmadd_ps(vc6, vt2, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
112 __m256 vp3 = _mm256_fmadd_ps(vc6, vt3, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
113 __m256 vp4 = _mm256_fmadd_ps(vc6, vt4, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
114 __m256 vp5 = _mm256_fmadd_ps(vc6, vt5, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
115 __m256 vp6 = _mm256_fmadd_ps(vc6, vt6, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
116 __m256 vp7 = _mm256_fmadd_ps(vc6, vt7, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
117 __m256 vp8 = _mm256_fmadd_ps(vc6, vt8, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
118 __m256 vp9 = _mm256_fmadd_ps(vc6, vt9, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
251 __m256 vp = _mm256_fmadd_ps(vc6, vt, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
283 __m256 vp = _mm256_fmadd_ps(vc6, vt, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()