Lines Matching refs:vn
47 __m256 vn = _mm256_add_ps(_mm256_mul_ps(vz, vlog2e), vmagic_bias); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8() local
49 …const __m128 vs_lo = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn)),… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()
50 …const __m128 vs_hi = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn, 1)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()
53 vn = _mm256_sub_ps(vn, vmagic_bias); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()
55 __m256 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_hi), vz); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()
56 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_lo), vt); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()
84 __m256 vn = _mm256_add_ps(_mm256_mul_ps(vz, vlog2e), vmagic_bias); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8() local
85 …const __m128 vs_lo = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn)),… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()
86 …const __m128 vs_hi = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn, 1)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()
89 vn = _mm256_sub_ps(vn, vmagic_bias); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()
91 __m256 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_hi), vz); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()
92 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_lo), vt); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x8()