Home
last modified time | relevance | path

Searched refs:_MM_FROUND_TO_ZERO (Results 1 – 21 of 21) sorted by relevance

/external/llvm-project/clang/test/CodeGen/X86/
Davx512vl-builtins-constrained.c54 return _mm_mask_cvtps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm_mask_cvtps_ph()
60 return _mm_maskz_cvtps_ph(__U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm_maskz_cvtps_ph()
66 return _mm256_mask_cvtps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm256_mask_cvtps_ph()
72 return _mm256_maskz_cvtps_ph(__U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm256_maskz_cvtps_ph()
78 return _mm_mask_cvt_roundps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO); in test_mm_mask_cvt_roundps_ph()
84 return _mm_maskz_cvt_roundps_ph(__U, __A, _MM_FROUND_TO_ZERO); in test_mm_maskz_cvt_roundps_ph()
90 return _mm256_mask_cvt_roundps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO); in test_mm256_mask_cvt_roundps_ph()
96 return _mm256_maskz_cvt_roundps_ph(__U, __A, _MM_FROUND_TO_ZERO); in test_mm256_maskz_cvt_roundps_ph()
Davx512f-builtins.c37 return _mm512_mask_sqrt_round_pd(__W,__U,__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask_sqrt_round_pd()
46 return _mm512_maskz_sqrt_round_pd(__U,__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_maskz_sqrt_round_pd()
53 return _mm512_sqrt_round_pd(__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_sqrt_round_pd()
87 return _mm512_mask_sqrt_round_ps(__W,__U,__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask_sqrt_round_ps()
96 return _mm512_maskz_sqrt_round_ps(__U,__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_maskz_sqrt_round_ps()
103 return _mm512_sqrt_round_ps(__A,_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_sqrt_round_ps()
500 return _mm512_fmadd_round_pd(__A, __B, __C, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_fmadd_round_pd()
508 return _mm512_mask_fmadd_round_pd(__A, __U, __B, __C, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask_fmadd_round_pd()
515 return _mm512_mask3_fmadd_round_pd(__A, __B, __C, __U, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask3_fmadd_round_pd()
522 return _mm512_maskz_fmadd_round_pd(__U, __A, __B, __C, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_maskz_fmadd_round_pd()
[all …]
Davx512f-builtins-constrained.c77 return _mm512_cvt_roundps_ph(__A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_cvt_roundps_ph()
84 return _mm512_mask_cvt_roundps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_mask_cvt_roundps_ph()
91 return _mm512_maskz_cvt_roundps_ph(__U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm512_maskz_cvt_roundps_ph()
Davx512vl-builtins.c9672 return _mm_mask_cvtps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm_mask_cvtps_ph()
9678 return _mm_maskz_cvtps_ph(__U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm_maskz_cvtps_ph()
9684 return _mm256_mask_cvtps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm256_mask_cvtps_ph()
9690 return _mm256_maskz_cvtps_ph(__U, __A, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in test_mm256_maskz_cvtps_ph()
9696 return _mm_mask_cvt_roundps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO); in test_mm_mask_cvt_roundps_ph()
9702 return _mm_maskz_cvt_roundps_ph(__U, __A, _MM_FROUND_TO_ZERO); in test_mm_maskz_cvt_roundps_ph()
9708 return _mm256_mask_cvt_roundps_ph(__W, __U, __A, _MM_FROUND_TO_ZERO); in test_mm256_mask_cvt_roundps_ph()
9714 return _mm256_maskz_cvt_roundps_ph(__U, __A, _MM_FROUND_TO_ZERO); in test_mm256_maskz_cvt_roundps_ph()
/external/XNNPACK/src/f32-vrnd/gen/
Dvrndz-avx512f-x32.c34 const __m512 vy0123456789ABCDEF = _mm512_roundscale_ps(vx0123456789ABCDEF, _MM_FROUND_TO_ZERO); in xnn_f32_vrndz_ukernel__avx512f_x32()
35 const __m512 vyGHIJKLMNOPQRSTUV = _mm512_roundscale_ps(vxGHIJKLMNOPQRSTUV, _MM_FROUND_TO_ZERO); in xnn_f32_vrndz_ukernel__avx512f_x32()
45 const __m512 vy = _mm512_roundscale_ps(vx, _MM_FROUND_TO_ZERO); in xnn_f32_vrndz_ukernel__avx512f_x32()
58 const __m512 vy = _mm512_maskz_roundscale_ps(vmask, vx, _MM_FROUND_TO_ZERO); in xnn_f32_vrndz_ukernel__avx512f_x32()
Dvrndz-sse41-x8.c33 const __m128 vy0123 = _mm_round_ps(vx0123, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x8()
34 const __m128 vy4567 = _mm_round_ps(vx4567, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x8()
44 const __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x8()
51 __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x8()
Dvrndz-avx-x16.c35 const __m256 vy01234567 = _mm256_round_ps(vx01234567, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x16()
36 const __m256 vy89ABCDEF = _mm256_round_ps(vx89ABCDEF, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x16()
46 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x16()
57 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x16()
Dvrndz-sse41-x4.c32 const __m128 vy0123 = _mm_round_ps(vx0123, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x4()
39 __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__sse41_x4()
Dvrndz-avx512f-x16.c33 const __m512 vy0123456789ABCDEF = _mm512_roundscale_ps(vx0123456789ABCDEF, _MM_FROUND_TO_ZERO); in xnn_f32_vrndz_ukernel__avx512f_x16()
46 const __m512 vy = _mm512_maskz_roundscale_ps(vmask, vx, _MM_FROUND_TO_ZERO); in xnn_f32_vrndz_ukernel__avx512f_x16()
Dvrndz-avx-x8.c34 const __m256 vy01234567 = _mm256_round_ps(vx01234567, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x8()
45 const __m256 vy = _mm256_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_f32_vrndz_ukernel__avx_x8()
/external/XNNPACK/src/math/
Droundz-sse41.c25 const __m128 vy = _mm_round_ps(vx, _MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC); in xnn_math_f32_roundz__sse41()
/external/XNNPACK/src/f32-vrnd/
Dsse41.c.in21 $ "RNDZ": "_MM_FROUND_TO_ZERO",
Davx512f.c.in22 $ "RNDZ": "_MM_FROUND_TO_ZERO",
Davx.c.in23 $ "RNDZ": "_MM_FROUND_TO_ZERO",
/external/llvm-project/clang/lib/Headers/
Dsmmintrin.h22 #define _MM_FROUND_TO_ZERO 0x03 macro
31 #define _MM_FROUND_TRUNC (_MM_FROUND_RAISE_EXC | _MM_FROUND_TO_ZERO)
Davx512fintrin.h44 #define _MM_FROUND_TO_ZERO 0x03 macro
/external/clang/lib/Headers/
Dsmmintrin.h36 #define _MM_FROUND_TO_ZERO 0x03 macro
45 #define _MM_FROUND_TRUNC (_MM_FROUND_RAISE_EXC | _MM_FROUND_TO_ZERO)
Davx512fintrin.h54 #define _MM_FROUND_TO_ZERO 0x03 macro
/external/mesa3d/src/gallium/drivers/swr/rasterizer/core/
Ddepthstencil.h107 result = _simd_round_ps(result, _MM_FROUND_TO_ZERO); in QuantizeDepth()
/external/mesa3d/src/gallium/drivers/swr/rasterizer/jitter/
Dblend_jit.cpp238 src[swizComp] = VROUND(src[swizComp], C(_MM_FROUND_TO_ZERO)); in Quantize()
/external/pffft/
Dsse2neon.h135 #define _MM_FROUND_TO_ZERO 0x03 macro
4846 case (_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC): in _mm_round_ps()
4864 case (_MM_FROUND_TO_ZERO | _MM_FROUND_NO_EXC): in _mm_round_ps()