• Home
  • Raw
  • Download

Lines Matching refs:__m256

27   const __m256 vlog2e = _mm256_set1_ps(0x1.715476p+0f);  in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
28 const __m256 vminus_ln2_hi = _mm256_set1_ps(-0x1.62E43p-1f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
29 const __m256 vminus_ln2_lo = _mm256_set1_ps(0x1.05C61p-29f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
33 const __m256 vmin_exponent = _mm256_set1_ps(-127.0f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
34 const __m256 vmagic_bias = _mm256_set1_ps(0x1.8000FEp23f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
37 const __m256 vc0 = _mm256_set1_ps(1.0f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
38 const __m256 vc1 = _mm256_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
39 const __m256 vc2 = _mm256_set1_ps(0x1.FFFDC6p-2f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
40 const __m256 vc3 = _mm256_set1_ps(0x1.555A80p-3f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
41 const __m256 vc4 = _mm256_set1_ps(0x1.573A1Ap-5f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
42 const __m256 vc5 = _mm256_set1_ps(0x1.0F9F9Cp-7f); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
44 __m256 vaccv0 = _mm256_setzero_ps(); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
45 __m256 vaccv1 = _mm256_setzero_ps(); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
46 __m256 vaccv2 = _mm256_setzero_ps(); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
47 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
48 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
49 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
52 const __m256 vx0 = _mm256_loadu_ps(x); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
53 const __m256 vx1 = _mm256_loadu_ps(x + 8); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
54 const __m256 vx2 = _mm256_loadu_ps(x + 16); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
55 const __m256 vx3 = _mm256_loadu_ps(x + 24); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
56 const __m256 vx4 = _mm256_loadu_ps(x + 32); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
57 const __m256 vx5 = _mm256_loadu_ps(x + 40); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
58 const __m256 vx6 = _mm256_loadu_ps(x + 48); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
59 const __m256 vx7 = _mm256_loadu_ps(x + 56); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
60 const __m256 vx8 = _mm256_loadu_ps(x + 64); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
64 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
65 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
66 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
67 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
68 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
69 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
70 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
71 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
72 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
76 __m256 vt0 = _mm256_fmadd_ps(vn0, vminus_ln2_hi, vx0); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
77 __m256 vt1 = _mm256_fmadd_ps(vn1, vminus_ln2_hi, vx1); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
78 __m256 vt2 = _mm256_fmadd_ps(vn2, vminus_ln2_hi, vx2); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
79 __m256 vt3 = _mm256_fmadd_ps(vn3, vminus_ln2_hi, vx3); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
80 __m256 vt4 = _mm256_fmadd_ps(vn4, vminus_ln2_hi, vx4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
81 __m256 vt5 = _mm256_fmadd_ps(vn5, vminus_ln2_hi, vx5); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
82 __m256 vt6 = _mm256_fmadd_ps(vn6, vminus_ln2_hi, vx6); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
83 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
84 __m256 vt8 = _mm256_fmadd_ps(vn8, vminus_ln2_hi, vx8); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
97 __m256 vp0 = _mm256_fmadd_ps(vc5, vt0, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
98 __m256 vp1 = _mm256_fmadd_ps(vc5, vt1, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
99 __m256 vp2 = _mm256_fmadd_ps(vc5, vt2, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
100 __m256 vp3 = _mm256_fmadd_ps(vc5, vt3, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
101 __m256 vp4 = _mm256_fmadd_ps(vc5, vt4, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
102 __m256 vp5 = _mm256_fmadd_ps(vc5, vt5, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
103 __m256 vp6 = _mm256_fmadd_ps(vc5, vt6, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
104 __m256 vp7 = _mm256_fmadd_ps(vc5, vt7, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
105 __m256 vp8 = _mm256_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
157 __m256 vmax_e0 = _mm256_max_ps(vacce0, vn0); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
158 __m256 vmax_e1 = _mm256_max_ps(vacce1, vn1); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
159 __m256 vmax_e2 = _mm256_max_ps(vacce2, vn2); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
171 const __m256 vdelta_acce0 = _mm256_max_ps(_mm256_sub_ps(vacce0, vmax_e0), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
172 const __m256 vdelta_acce1 = _mm256_max_ps(_mm256_sub_ps(vacce1, vmax_e1), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
173 const __m256 vdelta_acce2 = _mm256_max_ps(_mm256_sub_ps(vacce2, vmax_e2), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
174 const __m256 vdelta_e0 = _mm256_max_ps(_mm256_sub_ps(vn0, vmax_e0), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
175 const __m256 vdelta_e1 = _mm256_max_ps(_mm256_sub_ps(vn1, vmax_e1), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
176 const __m256 vdelta_e2 = _mm256_max_ps(_mm256_sub_ps(vn2, vmax_e2), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
177 const __m256 vdelta_e3 = _mm256_max_ps(_mm256_sub_ps(vn3, vmax_e0), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
178 const __m256 vdelta_e4 = _mm256_max_ps(_mm256_sub_ps(vn4, vmax_e1), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
179 const __m256 vdelta_e5 = _mm256_max_ps(_mm256_sub_ps(vn5, vmax_e2), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
180 const __m256 vdelta_e6 = _mm256_max_ps(_mm256_sub_ps(vn6, vmax_e0), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
181 const __m256 vdelta_e7 = _mm256_max_ps(_mm256_sub_ps(vn7, vmax_e1), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
182 const __m256 vdelta_e8 = _mm256_max_ps(_mm256_sub_ps(vn8, vmax_e2), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
189 …const __m256 vaccs0 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdel… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
190 …const __m256 vaccs1 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdel… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
191 …const __m256 vaccs2 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdel… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
192 …const __m256 vs0 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
193 …const __m256 vs1 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
194 …const __m256 vs2 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
195 …const __m256 vs3 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
196 …const __m256 vs4 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
197 …const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
198 …const __m256 vs6 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
199 …const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
200 …const __m256 vs8 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
222 const __m256 vmax_acce01 = _mm256_max_ps(vacce0, vacce1); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
223 const __m256 vmax_acce2 = vacce2; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
224 const __m256 vmax_acce012 = _mm256_max_ps(vmax_acce01, vmax_acce2); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
226 const __m256 vdelta_acce0 = _mm256_max_ps(_mm256_sub_ps(vacce0, vmax_acce012), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
227 const __m256 vdelta_acce1 = _mm256_max_ps(_mm256_sub_ps(vacce1, vmax_acce012), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
228 const __m256 vdelta_acce2 = _mm256_max_ps(_mm256_sub_ps(vacce2, vmax_acce012), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
230 …const __m256 vaccs0 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdel… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
231 …const __m256 vaccs1 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdel… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
232 …const __m256 vaccs2 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdel… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
234 __m256 vaccv = _mm256_mul_ps(vaccv0, vaccs0); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
237 __m256 vacce = vmax_acce012; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
241 const __m256 vx = _mm256_loadu_ps(x); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
245 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
249 __m256 vt = _mm256_fmadd_ps(vn, vminus_ln2_hi, vx); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
253 __m256 vp = _mm256_fmadd_ps(vc5, vt, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
260 const __m256 vmax_e = _mm256_max_ps(vacce, vn); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
263 const __m256 vdelta_acce = _mm256_max_ps(_mm256_sub_ps(vacce, vmax_e), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
264 const __m256 vdelta_e = _mm256_max_ps(_mm256_sub_ps(vn, vmax_e), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
267 …const __m256 vaccs = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelt… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
268 …const __m256 vs = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_e… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
282 const __m256 vx = _mm256_maskload_ps(x, vmask); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
285__m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_E… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
289 __m256 vt = _mm256_fmadd_ps(vn, vminus_ln2_hi, vx); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
296 __m256 vp = _mm256_fmadd_ps(vc5, vt, vc4); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
304 const __m256 vmax_e = _mm256_max_ps(vacce, vn); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
307 const __m256 vdelta_e = _mm256_max_ps(_mm256_sub_ps(vn, vmax_e), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
308 const __m256 vdelta_acce = _mm256_max_ps(_mm256_sub_ps(vacce, vmax_e), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
311 …const __m256 vs = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelta_e… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
312 …const __m256 vaccs = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelt… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
322 __m256 vmax_acce = _mm256_max_ps(vacce, _mm256_permute2f128_ps(vacce, vacce, 1)); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
325 const __m256 vdelta_acce = _mm256_max_ps(_mm256_sub_ps(vacce, vmax_acce), vmin_exponent); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
326 …const __m256 vaccs = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(vdelt… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()