Home
last modified time | relevance | path

Searched refs:_MM_FROUND_NO_EXC (Results 1 – 25 of 63) sorted by relevance

123

/external/llvm-project/clang/test/CodeGen/X86/
Davx512er-builtins.c9 return _mm512_rsqrt28_round_pd(a, _MM_FROUND_NO_EXC); in test_mm512_rsqrt28_round_pd()
15 return _mm512_mask_rsqrt28_round_pd(s, m, a, _MM_FROUND_NO_EXC); in test_mm512_mask_rsqrt28_round_pd()
21 return _mm512_maskz_rsqrt28_round_pd(m, a, _MM_FROUND_NO_EXC); in test_mm512_maskz_rsqrt28_round_pd()
45 return _mm512_rsqrt28_round_ps(a, _MM_FROUND_NO_EXC); in test_mm512_rsqrt28_round_ps()
51 return _mm512_mask_rsqrt28_round_ps(s, m, a, _MM_FROUND_NO_EXC); in test_mm512_mask_rsqrt28_round_ps()
57 return _mm512_maskz_rsqrt28_round_ps(m, a, _MM_FROUND_NO_EXC); in test_mm512_maskz_rsqrt28_round_ps()
81 return _mm_rsqrt28_round_ss(a, b, _MM_FROUND_NO_EXC); in test_mm_rsqrt28_round_ss()
87 return _mm_mask_rsqrt28_round_ss(s, m, a, b, _MM_FROUND_NO_EXC); in test_mm_mask_rsqrt28_round_ss()
93 return _mm_maskz_rsqrt28_round_ss(m, a, b, _MM_FROUND_NO_EXC); in test_mm_maskz_rsqrt28_round_ss()
117 return _mm_rsqrt28_round_sd(a, b, _MM_FROUND_NO_EXC); in test_mm_rsqrt28_round_sd()
[all …]
Davx512f-builtins.c37 return _mm512_mask_sqrt_round_pd(__W,__U,__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask_sqrt_round_pd()
46 return _mm512_maskz_sqrt_round_pd(__U,__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_maskz_sqrt_round_pd()
53 return _mm512_sqrt_round_pd(__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_sqrt_round_pd()
87 return _mm512_mask_sqrt_round_ps(__W,__U,__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask_sqrt_round_ps()
96 return _mm512_maskz_sqrt_round_ps(__U,__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_maskz_sqrt_round_ps()
103 return _mm512_sqrt_round_ps(__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_sqrt_round_ps()
500 return _mm512_fmadd_round_pd(__A, __B, __C, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_fmadd_round_pd()
508 return _mm512_mask_fmadd_round_pd(__A, __U, __B, __C, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask_fmadd_round_pd()
515 return _mm512_mask3_fmadd_round_pd(__A, __B, __C, __U, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask3_fmadd_round_pd()
522 return _mm512_maskz_fmadd_round_pd(__U, __A, __B, __C, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_maskz_fmadd_round_pd()
[all …]
Davx512f-builtins-constrained.c77 return _mm512_cvt_roundps_ph(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundps_ph()
84 return _mm512_mask_cvt_roundps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask_cvt_roundps_ph()
91 return _mm512_maskz_cvt_roundps_ph(__U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_maskz_cvt_roundps_ph()
98 return _mm512_cvt_roundph_ps(__A, _MM_FROUND_NO_EXC); in test_mm512_cvt_roundph_ps()
105 return _mm512_mask_cvt_roundph_ps(__W, __U, __A, _MM_FROUND_NO_EXC); in test_mm512_mask_cvt_roundph_ps()
112 return _mm512_maskz_cvt_roundph_ps(__U, __A, _MM_FROUND_NO_EXC); in test_mm512_maskz_cvt_roundph_ps()
Davx512dq-builtins.c471 return _mm512_cvt_roundpd_epi64(__A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundpd_epi64()
477 …return _mm512_mask_cvt_roundpd_epi64(__W, __U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)… in test_mm512_mask_cvt_roundpd_epi64()
483 return _mm512_maskz_cvt_roundpd_epi64(__U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_maskz_cvt_roundpd_epi64()
507 return _mm512_cvt_roundpd_epu64(__A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundpd_epu64()
513 …return _mm512_mask_cvt_roundpd_epu64(__W, __U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)… in test_mm512_mask_cvt_roundpd_epu64()
519 return _mm512_maskz_cvt_roundpd_epu64(__U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_maskz_cvt_roundpd_epu64()
543 return _mm512_cvt_roundps_epi64(__A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundps_epi64()
549 …return _mm512_mask_cvt_roundps_epi64(__W, __U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)… in test_mm512_mask_cvt_roundps_epi64()
555 return _mm512_maskz_cvt_roundps_epi64(__U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_maskz_cvt_roundps_epi64()
579 return _mm512_cvt_roundps_epu64(__A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundps_epu64()
[all …]
Davx512vl-builtins-constrained.c54 return _mm_mask_cvtps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm_mask_cvtps_ph()
60 return _mm_maskz_cvtps_ph(__U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm_maskz_cvtps_ph()
66 return _mm256_mask_cvtps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm256_mask_cvtps_ph()
72 return _mm256_maskz_cvtps_ph(__U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm256_maskz_cvtps_ph()
/external/XNNPACK/src/f32-vrnd/gen/
Dvrndd-sse41-x8.c33 const __m128 vy0123 = _mm_round_ps(vx0123, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndd_ukernel__sse41_x8()
34 const __m128 vy4567 = _mm_round_ps(vx4567, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndd_ukernel__sse41_x8()
44 const __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndd_ukernel__sse41_x8()
51 __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndd_ukernel__sse41_x8()
Dvrndz-sse41-x8.c33 const __m128 vy0123 = _mm_round_ps(vx0123, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x8()
34 const __m128 vy4567 = _mm_round_ps(vx4567, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x8()
44 const __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x8()
51 __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x8()
Dvrndne-sse41-x8.c33 const __m128 vy0123 = _mm_round_ps(vx0123, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__sse41_x8()
34 const __m128 vy4567 = _mm_round_ps(vx4567, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__sse41_x8()
44 const __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__sse41_x8()
51 __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__sse41_x8()
Dvrndu-sse41-x8.c33 const __m128 vy0123 = _mm_round_ps(vx0123, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndu_ukernel__sse41_x8()
34 const __m128 vy4567 = _mm_round_ps(vx4567, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndu_ukernel__sse41_x8()
44 const __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndu_ukernel__sse41_x8()
51 __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndu_ukernel__sse41_x8()
Dvrndz-avx-x16.c35 const __m256 vy01234567 = _mm256_round_ps(vx01234567, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x16()
36 const __m256 vy89ABCDEF = _mm256_round_ps(vx89ABCDEF, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x16()
46 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x16()
57 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x16()
Dvrndd-avx-x16.c35 … const __m256 vy01234567 = _mm256_round_ps(vx01234567, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndd_ukernel__avx_x16()
36 … const __m256 vy89ABCDEF = _mm256_round_ps(vx89ABCDEF, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndd_ukernel__avx_x16()
46 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndd_ukernel__avx_x16()
57 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndd_ukernel__avx_x16()
Dvrndne-avx-x16.c35 …nst __m256 vy01234567 = _mm256_round_ps(vx01234567, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__avx_x16()
36 …nst __m256 vy89ABCDEF = _mm256_round_ps(vx89ABCDEF, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__avx_x16()
46 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__avx_x16()
57 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__avx_x16()
Dvrndu-avx-x16.c35 … const __m256 vy01234567 = _mm256_round_ps(vx01234567, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndu_ukernel__avx_x16()
36 … const __m256 vy89ABCDEF = _mm256_round_ps(vx89ABCDEF, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndu_ukernel__avx_x16()
46 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndu_ukernel__avx_x16()
57 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_POS_INF | _MM_FROUND_NO_EXC); in xnn_f32_vrndu_ukernel__avx_x16()
/external/XNNPACK/src/f32-vscaleextexp/gen/
Davx2-p5-x96.c65 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
66 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
67 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
68 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
69 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
70 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
71 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
72 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
73 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
74 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
[all …]
Davx2-p5-x88.c64 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
65 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
66 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
67 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
68 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
69 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
70 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
71 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
72 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
73 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
[all …]
Davx2-p5-x80.c63 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
64 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
65 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
66 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
67 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
68 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
69 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
70 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
71 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
72 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
[all …]
Davx2-p5-x72.c62 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
63 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
64 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
65 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
66 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
67 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
68 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
69 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
70 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
228 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72()
[all …]
Davx2-p5-x56.c60 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
61 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
62 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
63 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
64 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
65 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
66 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
198 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
238 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
Davx2-p5-x64.c61 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
62 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
63 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
64 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
65 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
66 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
67 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
68 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
213 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
253 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
/external/XNNPACK/src/f32-raddextexp/gen/
Davx2-p5-x96.c63 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
64 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
65 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
66 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
67 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
68 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
69 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
70 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
71 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
72 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
[all …]
Davx2-p5-x80-acc2.c63 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
64 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
65 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
66 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
67 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
68 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
69 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
70 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
71 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
72 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
[all …]
Davx2-p5-x80.c61 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
62 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
63 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
64 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
65 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
66 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
67 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
68 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
69 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
70 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
[all …]
Davx2-p5-x96-acc3.c67 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
68 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
69 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
70 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
71 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
72 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
73 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
74 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
75 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
76 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
[all …]
Davx2-p5-x96-acc2.c65 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
66 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
67 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
68 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
69 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
70 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
71 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
72 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
73 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
74 …6 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
[all …]
Davx2-p5-x72.c60 …6 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
61 …6 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
62 …6 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
63 …6 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
64 …6 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
65 …6 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
66 …6 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
67 …6 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
68 …6 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
219 …256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
[all …]

123