/external/XNNPACK/src/f32-f16-vcvt/gen/ |
D | vcvt-avx-x8.c | 72 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x8() local 122 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x8() local
|
D | vcvt-sse2-x8.c | 74 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x8() local 124 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x8() local
|
D | vcvt-wasmsimd-x8.c | 71 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); in xnn_f32_f16_vcvt_ukernel__wasmsimd_x8() local 121 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); in xnn_f32_f16_vcvt_ukernel__wasmsimd_x8() local
|
D | vcvt-sse41-x8.c | 72 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x8() local 122 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x8() local
|
D | vcvt-sse41-x16.c | 165 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x16() local 215 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x16() local
|
D | vcvt-sse2-x16.c | 168 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x16() local 218 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x16() local
|
D | vcvt-avx-x16.c | 165 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x16() local 215 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x16() local
|
D | vcvt-wasmsimd-x16.c | 162 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); in xnn_f32_f16_vcvt_ukernel__wasmsimd_x16() local 212 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); in xnn_f32_f16_vcvt_ukernel__wasmsimd_x16() local
|
D | vcvt-sse2-x24.c | 203 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x24() local 253 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x24() local
|
D | vcvt-wasmsimd-x24.c | 196 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); in xnn_f32_f16_vcvt_ukernel__wasmsimd_x24() local 246 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); in xnn_f32_f16_vcvt_ukernel__wasmsimd_x24() local
|
D | vcvt-sse41-x24.c | 199 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x24() local 249 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x24() local
|
D | vcvt-avx-x24.c | 199 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x24() local 249 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x24() local
|
D | vcvt-sse2-x32.c | 238 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x32() local 288 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x32() local
|
D | vcvt-sse41-x32.c | 233 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x32() local 283 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x32() local
|
D | vcvt-wasmsimd-x32.c | 230 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); in xnn_f32_f16_vcvt_ukernel__wasmsimd_x32() local 280 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); in xnn_f32_f16_vcvt_ukernel__wasmsimd_x32() local
|
D | vcvt-avx-x32.c | 233 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x32() local 283 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x32() local
|
/external/XNNPACK/src/f32-f16-vcvt/ |
D | wasmsimd.c.in | 136 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); variable 186 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); variable
|
/external/XNNPACK/src/math/ |
D | cvt-f32-f16-sse2.c | 67 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_math_f32_f16_cvt__sse2() local
|
D | cvt-f32-f16-wasmsimd.c | 64 const v128_t vmantw_lo = wasm_v128_and(vf_lo, vmanth_mask); in xnn_math_f32_f16_cvt__wasmsimd() local
|
D | cvt-f32-f16-sse41.c | 65 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_math_f32_f16_cvt__sse41() local
|
/external/XNNPACK/src/amalgam/ |
D | sse41.c | 212 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x8() local 262 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse41_x8() local
|
D | avx.c | 1413 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x24() local 1463 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__avx_x24() local
|
D | sse2.c | 1038 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x16() local 1088 const __m128i vmantw_lo = _mm_and_si128(_mm_castps_si128(vf_lo), vmanth_mask); in xnn_f32_f16_vcvt_ukernel__sse2_x16() local
|