• Home
  • Raw
  • Download

Lines Matching refs:__m256

29   const __m256 vlog2e = _mm256_set1_ps(0x1.715476p+0f);  in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
30 const __m256 vminus_ln2_hi = _mm256_set1_ps(-0x1.62E43p-1f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
31 const __m256 vminus_ln2_lo = _mm256_set1_ps(0x1.05C61p-29f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
35 const __m256 vmin_exponent = _mm256_set1_ps(-127.0f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
36 const __m256 vmagic_bias = _mm256_set1_ps(0x1.8000FEp23f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
38 const __m256 vc0 = _mm256_set1_ps(1.0f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
39 const __m256 vc1 = _mm256_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
40 const __m256 vc2 = _mm256_set1_ps(0x1.FFFDC6p-2f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
41 const __m256 vc3 = _mm256_set1_ps(0x1.555A80p-3f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
42 const __m256 vc4 = _mm256_set1_ps(0x1.573A1Ap-5f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
43 const __m256 vc5 = _mm256_set1_ps(0x1.0F9F9Cp-7f); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
45 const __m256 vscalev = _mm256_set1_ps(scale_value); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
46 const __m256 vscalee = _mm256_set1_ps(scale_exp); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
50 const __m256 vx0 = _mm256_loadu_ps(x); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
51 const __m256 vx1 = _mm256_loadu_ps(x + 8); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
52 const __m256 vx2 = _mm256_loadu_ps(x + 16); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
53 const __m256 vx3 = _mm256_loadu_ps(x + 24); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
54 const __m256 vx4 = _mm256_loadu_ps(x + 32); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
55 const __m256 vx5 = _mm256_loadu_ps(x + 40); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
56 const __m256 vx6 = _mm256_loadu_ps(x + 48); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
57 const __m256 vx7 = _mm256_loadu_ps(x + 56); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
58 const __m256 vx8 = _mm256_loadu_ps(x + 64); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
59 const __m256 vx9 = _mm256_loadu_ps(x + 72); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
60 const __m256 vx10 = _mm256_loadu_ps(x + 80); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
61 const __m256 vx11 = _mm256_loadu_ps(x + 88); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
65 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
66 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
67 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
68 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
69 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
70 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
71 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
72 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
73 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
74 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
75 …const __m256 vn10 = _mm256_round_ps(_mm256_mul_ps(vx10, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_F… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
76 …const __m256 vn11 = _mm256_round_ps(_mm256_mul_ps(vx11, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_F… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
80 __m256 vt0 = _mm256_fmadd_ps(vn0, vminus_ln2_hi, vx0); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
81 __m256 vt1 = _mm256_fmadd_ps(vn1, vminus_ln2_hi, vx1); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
82 __m256 vt2 = _mm256_fmadd_ps(vn2, vminus_ln2_hi, vx2); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
83 __m256 vt3 = _mm256_fmadd_ps(vn3, vminus_ln2_hi, vx3); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
84 __m256 vt4 = _mm256_fmadd_ps(vn4, vminus_ln2_hi, vx4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
85 __m256 vt5 = _mm256_fmadd_ps(vn5, vminus_ln2_hi, vx5); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
86 __m256 vt6 = _mm256_fmadd_ps(vn6, vminus_ln2_hi, vx6); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
87 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
88 __m256 vt8 = _mm256_fmadd_ps(vn8, vminus_ln2_hi, vx8); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
89 __m256 vt9 = _mm256_fmadd_ps(vn9, vminus_ln2_hi, vx9); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
90 __m256 vt10 = _mm256_fmadd_ps(vn10, vminus_ln2_hi, vx10); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
91 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
107 __m256 vp0 = _mm256_fmadd_ps(vc5, vt0, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
108 __m256 vp1 = _mm256_fmadd_ps(vc5, vt1, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
109 __m256 vp2 = _mm256_fmadd_ps(vc5, vt2, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
110 __m256 vp3 = _mm256_fmadd_ps(vc5, vt3, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
111 __m256 vp4 = _mm256_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
112 __m256 vp5 = _mm256_fmadd_ps(vc5, vt5, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
113 __m256 vp6 = _mm256_fmadd_ps(vc5, vt6, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
114 __m256 vp7 = _mm256_fmadd_ps(vc5, vt7, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
115 __m256 vp8 = _mm256_fmadd_ps(vc5, vt8, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
116 __m256 vp9 = _mm256_fmadd_ps(vc5, vt9, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
117 __m256 vp10 = _mm256_fmadd_ps(vc5, vt10, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
118 __m256 vp11 = _mm256_fmadd_ps(vc5, vt11, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
178 __m256 vf0 = _mm256_mul_ps(vp0, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
179 __m256 vf1 = _mm256_mul_ps(vp1, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
180 __m256 vf2 = _mm256_mul_ps(vp2, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
181 __m256 vf3 = _mm256_mul_ps(vp3, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
182 __m256 vf4 = _mm256_mul_ps(vp4, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
183 __m256 vf5 = _mm256_mul_ps(vp5, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
184 __m256 vf6 = _mm256_mul_ps(vp6, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
185 __m256 vf7 = _mm256_mul_ps(vp7, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
186 __m256 vf8 = _mm256_mul_ps(vp8, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
187 __m256 vf9 = _mm256_mul_ps(vp9, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
188 __m256 vf10 = _mm256_mul_ps(vp10, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
189 __m256 vf11 = _mm256_mul_ps(vp11, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
191 __m256 ve0 = _mm256_add_ps(vn0, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
192 __m256 ve1 = _mm256_add_ps(vn1, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
193 __m256 ve2 = _mm256_add_ps(vn2, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
194 __m256 ve3 = _mm256_add_ps(vn3, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
195 __m256 ve4 = _mm256_add_ps(vn4, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
196 __m256 ve5 = _mm256_add_ps(vn5, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
197 __m256 ve6 = _mm256_add_ps(vn6, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
198 __m256 ve7 = _mm256_add_ps(vn7, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
199 __m256 ve8 = _mm256_add_ps(vn8, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
200 __m256 ve9 = _mm256_add_ps(vn9, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
201 __m256 ve10 = _mm256_add_ps(vn10, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
202 __m256 ve11 = _mm256_add_ps(vn11, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
224 …const __m256 vs0 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve0, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
225 …const __m256 vs1 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve1, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
226 …const __m256 vs2 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve2, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
227 …const __m256 vs3 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve3, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
228 …const __m256 vs4 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve4, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
229 …const __m256 vs5 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve5, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
230 …const __m256 vs6 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve6, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
231 …const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve7, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
232 …const __m256 vs8 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve8, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
233 …const __m256 vs9 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve9, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
234 …const __m256 vs10 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve10, … in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
235 …const __m256 vs11 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve11, … in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
269 const __m256 vx = _mm256_loadu_ps(x); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
273 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
277 __m256 vt = _mm256_fmadd_ps(vn, vminus_ln2_hi, vx); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
281 __m256 vp = _mm256_fmadd_ps(vc5, vt, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
288 __m256 vf = _mm256_mul_ps(vp, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
289 __m256 ve = _mm256_add_ps(vn, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
295 …const __m256 vs = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve, vmag… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
310 const __m256 vx = _mm256_maskload_ps(x, vmask); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
313 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
317 __m256 vt = _mm256_fmadd_ps(vn, vminus_ln2_hi, vx); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
321 __m256 vp = _mm256_fmadd_ps(vc5, vt, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
328 __m256 vf = _mm256_mul_ps(vp, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
329 __m256 ve = _mm256_add_ps(vn, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
335 …const __m256 vs = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve, vmag… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()