Home
last modified time | relevance | path

Searched refs:_MM_FROUND_NO_EXC (Results 1 – 25 of 32) sorted by relevance

12

/external/XNNPACK/src/f32-vscaleextexp/gen/
Davx2-p5-x96.c65 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
66 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
67 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
68 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
69 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
70 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
71 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
72 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
73 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
74 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
[all …]
Davx2-p5-x88.c64 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
65 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
66 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
67 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
68 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
69 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
70 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
71 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
72 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
73 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
[all …]
Davx2-p5-x80.c63 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
64 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
65 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
66 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
67 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
68 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
69 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
70 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
71 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
72 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
[all …]
Davx2-p5-x72.c62 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
63 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
64 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
65 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
66 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
67 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
68 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
69 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
70 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
228 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
[all …]
Davx2-p5-x56.c60 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
61 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
62 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
63 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
64 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
65 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
66 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
198 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
238 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
Davx2-p5-x64.c61 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
62 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
63 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
64 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
65 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
66 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
67 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
68 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
213 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
253 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
Davx2-p5-x48.c59 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48()
60 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48()
61 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48()
62 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48()
63 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48()
64 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48()
183 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48()
223 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48()
Davx2-p5-x40.c58 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40()
59 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40()
60 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40()
61 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40()
62 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40()
168 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40()
208 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40()
Davx2-p5-x32.c57 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32()
58 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32()
59 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32()
60 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32()
153 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32()
193 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32()
Davx2-p5-x24.c56 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24()
57 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24()
58 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24()
138 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24()
178 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x24()
Davx2-p5-x16.c55 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x16()
56 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x16()
123 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x16()
163 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x16()
Davx2-p5-x8.c54 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x8()
108 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x8()
148 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x8()
/external/XNNPACK/src/f32-raddextexp/gen/
Davx2-p5-x96.c63 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
64 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
65 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
66 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
67 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
68 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
69 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
70 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
71 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
72 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
[all …]
Davx2-p5-x80.c61 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
62 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
63 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
64 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
65 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
66 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
67 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
68 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
69 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
70 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
[all …]
Davx2-p5-x80-acc2.c63 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
64 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
65 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
66 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
67 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
68 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
69 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
70 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
71 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
72 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
[all …]
Davx2-p5-x96-acc2.c65 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
66 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
67 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
68 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
69 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
70 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
71 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
72 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
73 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
74 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
[all …]
Davx2-p5-x96-acc3.c67 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
68 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
69 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
70 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
71 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
72 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
73 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
74 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
75 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
76 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
[all …]
Davx2-p5-x72.c60 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
61 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
62 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
63 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
64 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
65 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
66 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
67 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
68 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
219 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
[all …]
Davx2-p5-x64.c59 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
60 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
61 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
62 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
63 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
64 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
65 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
66 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
206 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
246 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
Davx2-p5-x72-acc3.c64 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
65 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
66 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
67 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
68 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
69 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
70 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
71 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
72 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
245 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
[all …]
Davx2-p5-x96-acc6.c73 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
74 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
75 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
76 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
77 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
78 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
79 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
80 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
81 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
82 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
[all …]
Davx2-p5-x64-acc2.c61 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
62 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
63 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
64 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
65 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
66 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
67 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
68 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
221 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
261 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
Davx2-p5-x80-acc5.c69 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
70 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
71 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
72 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
73 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
74 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
75 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
76 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
77 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
78 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
[all …]
Davx2-p5-x64-acc4.c65 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
66 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
67 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
68 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
69 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
70 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
71 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
72 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
241 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
281 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
/external/mesa3d/src/util/
Drounding.h66 m = _mm_round_ss(m, m, _MM_FROUND_CUR_DIRECTION | _MM_FROUND_NO_EXC); in _mesa_roundevenf()
83 m = _mm_round_sd(m, m, _MM_FROUND_CUR_DIRECTION | _MM_FROUND_NO_EXC); in _mesa_roundeven()

12