Lines Matching refs:vt
55 __m256 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_hi), vz); in xnn_f32_velu_ukernel__avx_rr2_p6_x8() local
57 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_lo), vt); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
59 __m256 vp = _mm256_add_ps(_mm256_mul_ps(vc6, vt), vc5); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
60 vp = _mm256_add_ps(_mm256_mul_ps(vp, vt), vc4); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
61 vp = _mm256_add_ps(_mm256_mul_ps(vp, vt), vc3); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
62 vp = _mm256_add_ps(_mm256_mul_ps(vp, vt), vc2); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
63 vp = _mm256_mul_ps(vp, vt); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
65 vt = _mm256_mul_ps(vt, vs); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
67 vp = _mm256_add_ps(_mm256_mul_ps(vp, vt), vt); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
90 __m256 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_hi), vz); in xnn_f32_velu_ukernel__avx_rr2_p6_x8() local
92 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_lo), vt); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
94 __m256 vp = _mm256_add_ps(_mm256_mul_ps(vc6, vt), vc5); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
95 vp = _mm256_add_ps(_mm256_mul_ps(vp, vt), vc4); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
96 vp = _mm256_add_ps(_mm256_mul_ps(vp, vt), vc3); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
97 vp = _mm256_add_ps(_mm256_mul_ps(vp, vt), vc2); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
98 vp = _mm256_mul_ps(vp, vt); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
100 vt = _mm256_mul_ps(vt, vs); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()
102 vp = _mm256_add_ps(_mm256_mul_ps(vp, vt), vt); in xnn_f32_velu_ukernel__avx_rr2_p6_x8()