Lines Matching refs:vt
52 __m256 vt = _mm256_fmadd_ps(vn, vminus_ln2, vz); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8() local
54 __m256 vp = _mm256_fmadd_ps(vc6, vt, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
55 vp = _mm256_fmadd_ps(vp, vt, vc4); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
56 vp = _mm256_fmadd_ps(vp, vt, vc3); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
57 vp = _mm256_fmadd_ps(vp, vt, vc2); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
58 vp = _mm256_mul_ps(vp, vt); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
60 vt = _mm256_mul_ps(vt, vs); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
62 vp = _mm256_fmadd_ps(vp, vt, vt); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
84 __m256 vt = _mm256_fmadd_ps(vn, vminus_ln2, vz); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8() local
86 __m256 vp = _mm256_fmadd_ps(vc6, vt, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
87 vp = _mm256_fmadd_ps(vp, vt, vc4); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
88 vp = _mm256_fmadd_ps(vp, vt, vc3); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
89 vp = _mm256_fmadd_ps(vp, vt, vc2); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
90 vp = _mm256_mul_ps(vp, vt); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
92 vt = _mm256_mul_ps(vt, vs); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()
94 vp = _mm256_fmadd_ps(vp, vt, vt); in xnn_f32_velu_ukernel__avx2_rr1_p6_x8()