Home
last modified time | relevance | path

Searched defs:vp4 (Results 1 – 25 of 262) sorted by relevance

1234567891011

/external/XNNPACK/src/f32-vscaleexpminusmax/gen/
Davx512f-p5-scalef-x80.c83 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x80() local
Davx512f-p5-scalef-x96.c88 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x96() local
Davx2-p5-x40.c102 __m256 vp4 = _mm256_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x40() local
Davx512f-p5-scalef-x112.c93 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x112() local
/external/XNNPACK/src/f32-vscaleextexp/gen/
Davx512f-p5-scalef-x80.c77 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x80() local
Davx512f-p5-scalef-x96.c81 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x96() local
Davx2-p5-x40.c83 __m256 vp4 = _mm256_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() local
Davx512f-p5-scalef-x112.c85 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x112() local
Davx512f-p5-scalef-x128.c89 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x128() local
/external/XNNPACK/src/f32-velu/gen/
Dvelu-wasm-rr2-p6-x5.c89 float vp4 = vc6 * vt4 + vc5; in xnn_f32_velu_ukernel__wasm_rr2_p6_x5() local
Dvelu-scalar-rr2-p6-x5.c109 float vp4 = vc6 * vt4 + vc5; in xnn_f32_velu_ukernel__scalar_rr2_p6_x5() local
Dvelu-avx2-rr1-p6-x40.c80 __m256 vp4 = _mm256_fmadd_ps(vc6, vt4, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x40() local
Dvelu-wasm-rr2-p6-x6.c96 float vp4 = vc6 * vt4 + vc5; in xnn_f32_velu_ukernel__wasm_rr2_p6_x6() local
Dvelu-scalar-rr2-p6-x6.c120 float vp4 = vc6 * vt4 + vc5; in xnn_f32_velu_ukernel__scalar_rr2_p6_x6() local
Dvelu-avx512f-rr1-p6-x80.c82 __m512 vp4 = _mm512_fmadd_ps(vc6, vt4, vc5); in xnn_f32_velu_ukernel__avx512f_rr1_p6_x80() local
/external/XNNPACK/src/f32-vsigmoid/gen/
Dvsigmoid-avx512f-rr1-p5-scalef-div-x80.c73 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vsigmoid_ukernel__avx512f_rr1_p5_scalef_div_x80() local
Dvsigmoid-avx512f-rr1-p5-scalef-nr1fma-x80.c73 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_vsigmoid_ukernel__avx512f_rr1_p5_scalef_nr1fma_x80() local
/external/XNNPACK/src/f16-raddstoreexpminusmax/gen/
Davx2-rr1-p2-x40.c81 const __m256 vp4 = _mm256_fmadd_ps(vc2, vt4, vc1); in xnn_f16_raddstoreexpminusmax_ukernel__avx2_rr1_p2_x40() local
Davx2-rr1-p2-x40-acc2.c82 const __m256 vp4 = _mm256_fmadd_ps(vc2, vt4, vc1); in xnn_f16_raddstoreexpminusmax_ukernel__avx2_rr1_p2_x40_acc2() local
Davx2-rr1-p2-x40-acc5.c85 const __m256 vp4 = _mm256_fmadd_ps(vc2, vt4, vc1); in xnn_f16_raddstoreexpminusmax_ukernel__avx2_rr1_p2_x40_acc5() local
Dneonfp16arith-rr2-p2-x40.c87 const float16x8_t vp4 = vfmaq_f16(vc1, vc2, vt4); in xnn_f16_raddstoreexpminusmax_ukernel__neonfp16arith_rr2_p2_x40() local
Davx2-rr1-p2-x48-acc2.c88 const __m256 vp4 = _mm256_fmadd_ps(vc2, vt4, vc1); in xnn_f16_raddstoreexpminusmax_ukernel__avx2_rr1_p2_x48_acc2() local
/external/XNNPACK/src/f32-raddexpminusmax/gen/
Davx512f-p5-scalef-x128-acc2.c98 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() local
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/
Davx512f-rr1-p5-scalef-x128.c81 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_rr1_p5_scalef_x128() local
/external/XNNPACK/src/f32-raddextexp/gen/
Davx512f-p5-scalef-x128.c89 __m512 vp4 = _mm512_fmadd_ps(vc5, vt4, vc4); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128() local

1234567891011