/external/clang/test/CodeGen/ |
D | avx512er-builtins.c | 11 return _mm512_rsqrt28_round_pd(a, _MM_FROUND_TO_NEAREST_INT); in test_mm512_rsqrt28_round_pd() 17 return _mm512_mask_rsqrt28_round_pd(s, m, a, _MM_FROUND_TO_NEAREST_INT); in test_mm512_mask_rsqrt28_round_pd() 23 return _mm512_maskz_rsqrt28_round_pd(m, a, _MM_FROUND_TO_NEAREST_INT); in test_mm512_maskz_rsqrt28_round_pd() 47 return _mm512_rsqrt28_round_ps(a, _MM_FROUND_TO_NEAREST_INT); in test_mm512_rsqrt28_round_ps() 53 return _mm512_mask_rsqrt28_round_ps(s, m, a, _MM_FROUND_TO_NEAREST_INT); in test_mm512_mask_rsqrt28_round_ps() 59 return _mm512_maskz_rsqrt28_round_ps(m, a, _MM_FROUND_TO_NEAREST_INT); in test_mm512_maskz_rsqrt28_round_ps() 83 return _mm_rsqrt28_round_ss(a, b, _MM_FROUND_TO_NEAREST_INT); in test_mm_rsqrt28_round_ss() 89 return _mm_mask_rsqrt28_round_ss(s, m, a, b, _MM_FROUND_TO_NEAREST_INT); in test_mm_mask_rsqrt28_round_ss() 95 return _mm_maskz_rsqrt28_round_ss(m, a, b, _MM_FROUND_TO_NEAREST_INT); in test_mm_maskz_rsqrt28_round_ss() 119 return _mm_rsqrt28_round_sd(a, b, _MM_FROUND_TO_NEAREST_INT); in test_mm_rsqrt28_round_sd() [all …]
|
D | avx512dq-builtins.c | 191 return _mm512_cvt_roundpd_epi64(__A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_cvt_roundpd_epi64() 197 return _mm512_mask_cvt_roundpd_epi64(__W, __U, __A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_mask_cvt_roundpd_epi64() 203 return _mm512_maskz_cvt_roundpd_epi64(__U, __A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_maskz_cvt_roundpd_epi64() 227 return _mm512_cvt_roundpd_epu64(__A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_cvt_roundpd_epu64() 233 return _mm512_mask_cvt_roundpd_epu64(__W, __U, __A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_mask_cvt_roundpd_epu64() 239 return _mm512_maskz_cvt_roundpd_epu64(__U, __A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_maskz_cvt_roundpd_epu64() 263 return _mm512_cvt_roundps_epi64(__A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_cvt_roundps_epi64() 269 return _mm512_mask_cvt_roundps_epi64(__W, __U, __A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_mask_cvt_roundps_epi64() 275 return _mm512_maskz_cvt_roundps_epi64(__U, __A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_maskz_cvt_roundps_epi64() 299 return _mm512_cvt_roundps_epu64(__A, _MM_FROUND_TO_NEAREST_INT); in test_mm512_cvt_roundps_epu64() [all …]
|
/external/XNNPACK/src/f32-vrnd/gen/ |
D | vrndne-avx512f-x32.c | 34 …st __m512 vy0123456789ABCDEF = _mm512_roundscale_ps(vx0123456789ABCDEF, _MM_FROUND_TO_NEAREST_INT); in xnn_f32_vrndne_ukernel__avx512f_x32() 35 …st __m512 vyGHIJKLMNOPQRSTUV = _mm512_roundscale_ps(vxGHIJKLMNOPQRSTUV, _MM_FROUND_TO_NEAREST_INT); in xnn_f32_vrndne_ukernel__avx512f_x32() 45 const __m512 vy = _mm512_roundscale_ps(vx, _MM_FROUND_TO_NEAREST_INT); in xnn_f32_vrndne_ukernel__avx512f_x32() 58 const __m512 vy = _mm512_maskz_roundscale_ps(vmask, vx, _MM_FROUND_TO_NEAREST_INT); in xnn_f32_vrndne_ukernel__avx512f_x32()
|
D | vrndne-sse41-x8.c | 33 const __m128 vy0123 = _mm_round_ps(vx0123, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__sse41_x8() 34 const __m128 vy4567 = _mm_round_ps(vx4567, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__sse41_x8() 44 const __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__sse41_x8() 51 __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__sse41_x8()
|
D | vrndne-avx-x16.c | 35 …const __m256 vy01234567 = _mm256_round_ps(vx01234567, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EX… in xnn_f32_vrndne_ukernel__avx_x16() 36 …const __m256 vy89ABCDEF = _mm256_round_ps(vx89ABCDEF, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EX… in xnn_f32_vrndne_ukernel__avx_x16() 46 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__avx_x16() 57 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in xnn_f32_vrndne_ukernel__avx_x16()
|
/external/XNNPACK/src/f32-vscaleextexp/gen/ |
D | avx2-p5-x96.c | 65 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() 66 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() 67 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() 68 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() 69 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() 70 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() 71 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() 72 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() 73 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() 74 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() [all …]
|
D | avx2-p5-x88.c | 64 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() 65 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() 66 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() 67 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() 68 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() 69 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() 70 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() 71 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() 72 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() 73 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() [all …]
|
D | avx2-p5-x80.c | 63 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() 64 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() 65 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() 66 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() 67 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() 68 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() 69 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() 70 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() 71 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() 72 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() [all …]
|
D | avx2-p5-x72.c | 62 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() 63 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() 64 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() 65 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() 66 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() 67 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() 68 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() 69 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() 70 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() 228 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x72() [all …]
|
D | avx2-p5-x56.c | 60 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56() 61 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56() 62 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56() 63 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56() 64 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56() 65 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56() 66 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56() 198 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56() 238 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x56()
|
D | avx2-p5-x64.c | 61 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() 62 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() 63 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() 64 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() 65 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() 66 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() 67 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() 68 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() 213 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64() 253 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x64()
|
D | avx2-p5-x48.c | 59 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48() 60 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48() 61 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48() 62 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48() 63 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48() 64 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48() 183 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48() 223 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48()
|
D | avx2-p5-x40.c | 58 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 59 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 60 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 61 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 62 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 168 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() 208 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40()
|
D | avx2-p5-x32.c | 57 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() 58 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() 59 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() 60 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() 153 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32() 193 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x32()
|
/external/XNNPACK/src/f32-raddextexp/gen/ |
D | avx2-p5-x96.c | 63 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() 64 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() 65 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() 66 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() 67 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() 68 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() 69 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() 70 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() 71 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() 72 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96() [all …]
|
D | avx2-p5-x80-acc2.c | 63 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 64 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 65 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 66 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 67 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 68 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 69 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 70 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 71 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 72 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() [all …]
|
D | avx2-p5-x80.c | 61 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() 62 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() 63 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() 64 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() 65 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() 66 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() 67 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() 68 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() 69 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() 70 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x80() [all …]
|
D | avx2-p5-x96-acc3.c | 67 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 68 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 69 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 70 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 71 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 72 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 73 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 74 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 75 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 76 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() [all …]
|
D | avx2-p5-x96-acc2.c | 65 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 66 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 67 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 68 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 69 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 70 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 71 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 72 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 73 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 74 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() [all …]
|
D | avx2-p5-x72.c | 60 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() 61 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() 62 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() 63 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() 64 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() 65 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() 66 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() 67 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() 68 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() 219 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_raddextexp_ukernel__avx2_p5_x72() [all …]
|
D | avx2-p5-x64.c | 59 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64() 60 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64() 61 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64() 62 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64() 63 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64() 64 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64() 65 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64() 66 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64() 206 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_raddextexp_ukernel__avx2_p5_x64() 246 …__m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_E… in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
|
D | avx2-p5-x72-acc3.c | 64 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 65 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 66 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 67 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 68 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 69 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 70 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 71 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 72 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 245 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() [all …]
|
D | avx2-p5-x96-acc6.c | 73 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 74 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 75 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 76 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 77 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 78 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 79 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 80 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 81 …const __m256 vn8 = _mm256_round_ps(_mm256_mul_ps(vx8, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 82 …const __m256 vn9 = _mm256_round_ps(_mm256_mul_ps(vx9, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() [all …]
|
D | avx2-p5-x64-acc2.c | 61 …const __m256 vn0 = _mm256_round_ps(_mm256_mul_ps(vx0, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 62 …const __m256 vn1 = _mm256_round_ps(_mm256_mul_ps(vx1, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 63 …const __m256 vn2 = _mm256_round_ps(_mm256_mul_ps(vx2, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 64 …const __m256 vn3 = _mm256_round_ps(_mm256_mul_ps(vx3, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 65 …const __m256 vn4 = _mm256_round_ps(_mm256_mul_ps(vx4, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 66 …const __m256 vn5 = _mm256_round_ps(_mm256_mul_ps(vx5, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 67 …const __m256 vn6 = _mm256_round_ps(_mm256_mul_ps(vx6, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 68 …const __m256 vn7 = _mm256_round_ps(_mm256_mul_ps(vx7, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FRO… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 221 …const __m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUN… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 261 …__m256 vn = _mm256_round_ps(_mm256_mul_ps(vx, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_E… in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
|
/external/llvm-project/clang/test/CodeGen/X86/ |
D | avx512dq-builtins.c | 471 return _mm512_cvt_roundpd_epi64(__A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundpd_epi64() 477 …return _mm512_mask_cvt_roundpd_epi64(__W, __U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)… in test_mm512_mask_cvt_roundpd_epi64() 483 return _mm512_maskz_cvt_roundpd_epi64(__U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_maskz_cvt_roundpd_epi64() 507 return _mm512_cvt_roundpd_epu64(__A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundpd_epu64() 513 …return _mm512_mask_cvt_roundpd_epu64(__W, __U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)… in test_mm512_mask_cvt_roundpd_epu64() 519 return _mm512_maskz_cvt_roundpd_epu64(__U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_maskz_cvt_roundpd_epu64() 543 return _mm512_cvt_roundps_epi64(__A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundps_epi64() 549 …return _mm512_mask_cvt_roundps_epi64(__W, __U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC)… in test_mm512_mask_cvt_roundps_epi64() 555 return _mm512_maskz_cvt_roundps_epi64(__U, __A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_maskz_cvt_roundps_epi64() 579 return _mm512_cvt_roundps_epu64(__A, _MM_FROUND_TO_NEAREST_INT | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundps_epu64() [all …]
|