Lines Matching refs:vn
48 __m256 vn = _mm256_add_ps(_mm256_mul_ps(vz, vlog2e), vmagic_bias); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8() local
50 …const __m128 vs_lo = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn)),… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()
51 …const __m128 vs_hi = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn, 1)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()
54 vn = _mm256_sub_ps(vn, vmagic_bias); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()
56 __m256 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_hi), vz); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()
57 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_lo), vt); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()
88 __m256 vn = _mm256_add_ps(_mm256_mul_ps(vz, vlog2e), vmagic_bias); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8() local
89 …const __m128 vs_lo = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn)),… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()
90 …const __m128 vs_hi = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn, 1)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()
93 vn = _mm256_sub_ps(vn, vmagic_bias); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()
95 __m256 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_hi), vz); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()
96 vt = _mm256_add_ps(_mm256_mul_ps(vn, vminus_ln2_lo), vt); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x8()