Lines Matching refs:vtwo
39 const __m256 vtwo = _mm256_set1_ps(2.0f); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72() local
212 vr0 = _mm256_mul_ps(vr0, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr0, vd0))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
213 vr0 = _mm256_mul_ps(vr0, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr0, vd0))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
214 vr1 = _mm256_mul_ps(vr1, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr1, vd1))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
215 vr1 = _mm256_mul_ps(vr1, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr1, vd1))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
216 vr2 = _mm256_mul_ps(vr2, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr2, vd2))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
217 vr2 = _mm256_mul_ps(vr2, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr2, vd2))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
218 vr3 = _mm256_mul_ps(vr3, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr3, vd3))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
219 vr3 = _mm256_mul_ps(vr3, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr3, vd3))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
220 vr4 = _mm256_mul_ps(vr4, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr4, vd4))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
221 vr4 = _mm256_mul_ps(vr4, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr4, vd4))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
222 vr5 = _mm256_mul_ps(vr5, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr5, vd5))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
223 vr5 = _mm256_mul_ps(vr5, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr5, vd5))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
224 vr6 = _mm256_mul_ps(vr6, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr6, vd6))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
225 vr6 = _mm256_mul_ps(vr6, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr6, vd6))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
226 vr7 = _mm256_mul_ps(vr7, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr7, vd7))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
227 vr7 = _mm256_mul_ps(vr7, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr7, vd7))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
228 vr8 = _mm256_mul_ps(vr8, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr8, vd8))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
229 vr8 = _mm256_mul_ps(vr8, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr8, vd8))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
299 vr = _mm256_mul_ps(vr, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr, vd))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
300 vr = _mm256_mul_ps(vr, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr, vd))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
338 vr = _mm256_mul_ps(vr, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr, vd))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
339 vr = _mm256_mul_ps(vr, _mm256_sub_ps(vtwo, _mm256_mul_ps(vr, vd))); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()