Home
last modified time | relevance | path

Searched refs:vmax0 (Results 1 – 7 of 7) sorted by relevance

/external/XNNPACK/src/f32-rmax/
Dsse.c21 __m128 vmax0 = _mm_load_ss(x); in xnn_f32_rmax_ukernel__sse() local
22 vmax0 = _mm_shuffle_ps(vmax0, vmax0, _MM_SHUFFLE(0, 0, 0, 0)); in xnn_f32_rmax_ukernel__sse()
23 __m128 vmax1 = vmax0; in xnn_f32_rmax_ukernel__sse()
24 __m128 vmax2 = vmax0; in xnn_f32_rmax_ukernel__sse()
25 __m128 vmax3 = vmax0; in xnn_f32_rmax_ukernel__sse()
33 vmax0 = _mm_max_ps(vmax0, vx0); in xnn_f32_rmax_ukernel__sse()
38 __m128 vmax = _mm_max_ps(_mm_max_ps(vmax0, vmax1), _mm_max_ps(vmax2, vmax3)); in xnn_f32_rmax_ukernel__sse()
Dscalar.c20 float vmax0 = *x; in xnn_f32_rmax_ukernel__scalar() local
21 float vmax1 = vmax0; in xnn_f32_rmax_ukernel__scalar()
22 float vmax2 = vmax0; in xnn_f32_rmax_ukernel__scalar()
23 float vmax3 = vmax0; in xnn_f32_rmax_ukernel__scalar()
31 vmax0 = math_max_f32(vx0, vmax0); in xnn_f32_rmax_ukernel__scalar()
36 const float vmax01 = math_max_f32(vmax0, vmax1); in xnn_f32_rmax_ukernel__scalar()
Dpsimd.c22 psimd_f32 vmax0 = psimd_load_splat_f32(x); in xnn_f32_rmax_ukernel__psimd() local
23 psimd_f32 vmax1 = vmax0; in xnn_f32_rmax_ukernel__psimd()
24 psimd_f32 vmax2 = vmax0; in xnn_f32_rmax_ukernel__psimd()
25 psimd_f32 vmax3 = vmax0; in xnn_f32_rmax_ukernel__psimd()
33 vmax0 = psimd_max_f32(vmax0, vx0); in xnn_f32_rmax_ukernel__psimd()
38 psimd_f32 vmax0123 = psimd_max_f32(psimd_max_f32(vmax0, vmax1), psimd_max_f32(vmax2, vmax3)); in xnn_f32_rmax_ukernel__psimd()
Davx.c21 __m256 vmax0 = _mm256_broadcast_ss(x); in xnn_f32_rmax_ukernel__avx() local
22 __m256 vmax1 = vmax0; in xnn_f32_rmax_ukernel__avx()
23 __m256 vmax2 = vmax0; in xnn_f32_rmax_ukernel__avx()
24 __m256 vmax3 = vmax0; in xnn_f32_rmax_ukernel__avx()
32 vmax0 = _mm256_max_ps(vmax0, vx0); in xnn_f32_rmax_ukernel__avx()
37 __m256 vmax = _mm256_max_ps(_mm256_max_ps(vmax0, vmax1), _mm256_max_ps(vmax2, vmax3)); in xnn_f32_rmax_ukernel__avx()
Dneon.c22 float32x4_t vmax0 = vld1q_dup_f32(x); in xnn_f32_rmax_ukernel__neon() local
23 float32x4_t vmax1 = vmax0; in xnn_f32_rmax_ukernel__neon()
24 float32x4_t vmax2 = vmax0; in xnn_f32_rmax_ukernel__neon()
25 float32x4_t vmax3 = vmax0; in xnn_f32_rmax_ukernel__neon()
32 vmax0 = vmaxq_f32(vmax0, vx0); in xnn_f32_rmax_ukernel__neon()
37 float32x4_t vmax = vmaxq_f32(vmaxq_f32(vmax0, vmax1), vmaxq_f32(vmax2, vmax3)); in xnn_f32_rmax_ukernel__neon()
Davx512f.c22 __m512 vmax0 = _mm512_broadcastss_ps(_mm_load_ss(x)); in xnn_f32_rmax_ukernel__avx512f() local
23 __m512 vmax1 = vmax0; in xnn_f32_rmax_ukernel__avx512f()
24 __m512 vmax2 = vmax0; in xnn_f32_rmax_ukernel__avx512f()
25 __m512 vmax3 = vmax0; in xnn_f32_rmax_ukernel__avx512f()
33 vmax0 = _mm512_max_ps(vmax0, vx0); in xnn_f32_rmax_ukernel__avx512f()
38 __m512 vmax = _mm512_max_ps(_mm512_max_ps(vmax0, vmax1), _mm512_max_ps(vmax2, vmax3)); in xnn_f32_rmax_ukernel__avx512f()
/external/XNNPACK/src/u8-rmax/
Dscalar.c18 uint8_t vmax0 = 0; in xnn_u8_rmax_ukernel__scalar() local
25 vmax0 = vt0 > vmax0 ? vt0 : vmax0; in xnn_u8_rmax_ukernel__scalar()
28 uint8_t vmax = vmax0 > vmax1 ? vmax0 : vmax1; in xnn_u8_rmax_ukernel__scalar()