Home
last modified time | relevance | path

Searched refs:vmax0 (Results 1 – 8 of 8) sorted by relevance

/external/XNNPACK/src/f32-rmax/
Dsse.c21 __m128 vmax0 = _mm_load_ss(x); in xnn_f32_rmax_ukernel__sse() local
22 vmax0 = _mm_shuffle_ps(vmax0, vmax0, _MM_SHUFFLE(0, 0, 0, 0)); in xnn_f32_rmax_ukernel__sse()
23 __m128 vmax1 = vmax0; in xnn_f32_rmax_ukernel__sse()
24 __m128 vmax2 = vmax0; in xnn_f32_rmax_ukernel__sse()
25 __m128 vmax3 = vmax0; in xnn_f32_rmax_ukernel__sse()
33 vmax0 = _mm_max_ps(vmax0, vx0); in xnn_f32_rmax_ukernel__sse()
38 __m128 vmax = _mm_max_ps(_mm_max_ps(vmax0, vmax1), _mm_max_ps(vmax2, vmax3)); in xnn_f32_rmax_ukernel__sse()
Dwasmsimd-x86.c22 v128_t vmax0 = wasm_v32x4_load_splat(x); in xnn_f32_rmax_ukernel__wasmsimd_x86() local
23 v128_t vmax1 = vmax0; in xnn_f32_rmax_ukernel__wasmsimd_x86()
24 v128_t vmax2 = vmax0; in xnn_f32_rmax_ukernel__wasmsimd_x86()
25 v128_t vmax3 = vmax0; in xnn_f32_rmax_ukernel__wasmsimd_x86()
33 const v128_t vlt0 = wasm_f32x4_lt(vx0, vmax0); in xnn_f32_rmax_ukernel__wasmsimd_x86()
38 vmax0 = wasm_v128_bitselect(vmax0, vx0, vlt0); in xnn_f32_rmax_ukernel__wasmsimd_x86()
43 const v128_t vlt01 = wasm_f32x4_lt(vmax0, vmax1); in xnn_f32_rmax_ukernel__wasmsimd_x86()
45 const v128_t vmax01 = wasm_v128_bitselect(vmax1, vmax0, vlt01); in xnn_f32_rmax_ukernel__wasmsimd_x86()
Dscalar.c20 float vmax0 = *x; in xnn_f32_rmax_ukernel__scalar() local
21 float vmax1 = vmax0; in xnn_f32_rmax_ukernel__scalar()
22 float vmax2 = vmax0; in xnn_f32_rmax_ukernel__scalar()
23 float vmax3 = vmax0; in xnn_f32_rmax_ukernel__scalar()
31 vmax0 = math_max_f32(vx0, vmax0); in xnn_f32_rmax_ukernel__scalar()
36 const float vmax01 = math_max_f32(vmax0, vmax1); in xnn_f32_rmax_ukernel__scalar()
Dwasmsimd-arm.c21 v128_t vmax0 = wasm_v32x4_load_splat(x); in xnn_f32_rmax_ukernel__wasmsimd_arm() local
22 v128_t vmax1 = vmax0; in xnn_f32_rmax_ukernel__wasmsimd_arm()
23 v128_t vmax2 = vmax0; in xnn_f32_rmax_ukernel__wasmsimd_arm()
24 v128_t vmax3 = vmax0; in xnn_f32_rmax_ukernel__wasmsimd_arm()
32 vmax0 = wasm_f32x4_max(vmax0, vx0); in xnn_f32_rmax_ukernel__wasmsimd_arm()
37 v128_t vmax0123 = wasm_f32x4_max(wasm_f32x4_max(vmax0, vmax1), wasm_f32x4_max(vmax2, vmax3)); in xnn_f32_rmax_ukernel__wasmsimd_arm()
Davx.c21 __m256 vmax0 = _mm256_broadcast_ss(x); in xnn_f32_rmax_ukernel__avx() local
22 __m256 vmax1 = vmax0; in xnn_f32_rmax_ukernel__avx()
23 __m256 vmax2 = vmax0; in xnn_f32_rmax_ukernel__avx()
24 __m256 vmax3 = vmax0; in xnn_f32_rmax_ukernel__avx()
32 vmax0 = _mm256_max_ps(vmax0, vx0); in xnn_f32_rmax_ukernel__avx()
37 __m256 vmax = _mm256_max_ps(_mm256_max_ps(vmax0, vmax1), _mm256_max_ps(vmax2, vmax3)); in xnn_f32_rmax_ukernel__avx()
Dneon.c22 float32x4_t vmax0 = vld1q_dup_f32(x); in xnn_f32_rmax_ukernel__neon() local
23 float32x4_t vmax1 = vmax0; in xnn_f32_rmax_ukernel__neon()
24 float32x4_t vmax2 = vmax0; in xnn_f32_rmax_ukernel__neon()
25 float32x4_t vmax3 = vmax0; in xnn_f32_rmax_ukernel__neon()
32 vmax0 = vmaxq_f32(vmax0, vx0); in xnn_f32_rmax_ukernel__neon()
37 float32x4_t vmax = vmaxq_f32(vmaxq_f32(vmax0, vmax1), vmaxq_f32(vmax2, vmax3)); in xnn_f32_rmax_ukernel__neon()
Davx512f.c22 __m512 vmax0 = _mm512_broadcastss_ps(_mm_load_ss(x)); in xnn_f32_rmax_ukernel__avx512f() local
23 __m512 vmax1 = vmax0; in xnn_f32_rmax_ukernel__avx512f()
24 __m512 vmax2 = vmax0; in xnn_f32_rmax_ukernel__avx512f()
25 __m512 vmax3 = vmax0; in xnn_f32_rmax_ukernel__avx512f()
33 vmax0 = _mm512_max_ps(vmax0, vx0); in xnn_f32_rmax_ukernel__avx512f()
38 __m512 vmax = _mm512_max_ps(_mm512_max_ps(vmax0, vmax1), _mm512_max_ps(vmax2, vmax3)); in xnn_f32_rmax_ukernel__avx512f()
/external/XNNPACK/src/u8-rmax/
Dscalar.c18 uint8_t vmax0 = 0; in xnn_u8_rmax_ukernel__scalar() local
25 vmax0 = vt0 > vmax0 ? vt0 : vmax0; in xnn_u8_rmax_ukernel__scalar()
28 uint8_t vmax = vmax0 > vmax1 ? vmax0 : vmax1; in xnn_u8_rmax_ukernel__scalar()