/external/XNNPACK/src/f32-vscaleexpminusmax/gen/ |
D | avx512f-p5-scalef-x80.c | 116 __m512 vf4 = _mm512_scalef_ps(vp4, vn4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x80() local
|
D | avx512f-p5-scalef-x96.c | 126 __m512 vf4 = _mm512_scalef_ps(vp4, vn4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x96() local
|
D | avx2-p5-x40.c | 136 __m256 vf4 = _mm256_fmadd_ps(vt4, vp4, vs4); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x40() local
|
D | avx512f-p5-scalef-x112.c | 136 __m512 vf4 = _mm512_scalef_ps(vp4, vn4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x112() local
|
D | avx512f-p5-scalef-x128.c | 146 __m512 vf4 = _mm512_scalef_ps(vp4, vn4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x128() local
|
D | avx2-p5-x48.c | 148 __m256 vf4 = _mm256_fmadd_ps(vt4, vp4, vs4); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x48() local
|
D | avx2-p5-x56.c | 160 __m256 vf4 = _mm256_fmadd_ps(vt4, vp4, vs4); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x56() local
|
/external/XNNPACK/src/f32-vscaleextexp/gen/ |
D | avx512f-p5-scalef-x80.c | 113 __m512 vf4 = _mm512_mul_ps(vp4, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x80() local
|
D | avx512f-p5-scalef-x96.c | 122 __m512 vf4 = _mm512_mul_ps(vp4, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x96() local
|
D | avx2-p5-x40.c | 119 __m256 vf4 = _mm256_mul_ps(vp4, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x40() local
|
D | avx512f-p5-scalef-x112.c | 131 __m512 vf4 = _mm512_mul_ps(vp4, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x112() local
|
D | avx512f-p5-scalef-x128.c | 140 __m512 vf4 = _mm512_mul_ps(vp4, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x128() local
|
D | avx2-p5-x48.c | 128 __m256 vf4 = _mm256_mul_ps(vp4, vscalev); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x48() local
|
/external/llvm-project/clang/test/CXX/temp/temp.param/ |
D | p7.cpp | 68 VF4 vf4; member
|
/external/XNNPACK/src/f32-sigmoid/gen/ |
D | avx512f-rr1-p5-scalef-div-x80.c | 115 __m512 vf4 = _mm512_div_ps(ve4, vd4); in xnn_f32_sigmoid_ukernel__avx512f_rr1_p5_scalef_div_x80() local
|
D | avx512f-rr1-lut16-p3-perm-scalef-div-x80.c | 118 __m512 vf4 = _mm512_div_ps(ve4, vd4); in xnn_f32_sigmoid_ukernel__avx512f_rr1_lut16_p3_perm_scalef_div_x80() local
|
D | avx512f-rr2-lut32-p2-perm2-scalef-div-x80.c | 124 __m512 vf4 = _mm512_div_ps(ve4, vd4); in xnn_f32_sigmoid_ukernel__avx512f_rr2_lut32_p2_perm2_scalef_div_x80() local
|
D | avx512f-rr1-p5-scalef-nr1fma-x80.c | 127 __m512 vf4 = _mm512_mul_ps(ve4, vr4); in xnn_f32_sigmoid_ukernel__avx512f_rr1_p5_scalef_nr1fma_x80() local
|
D | avx512f-rr1-p5-scalef-div-x96.c | 127 __m512 vf4 = _mm512_div_ps(ve4, vd4); in xnn_f32_sigmoid_ukernel__avx512f_rr1_p5_scalef_div_x96() local
|
D | avx2-rr1-p5-div-x40.c | 124 __m256 vf4 = _mm256_div_ps(ve4, vd4); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x40() local
|
/external/XNNPACK/src/f32-raddexpminusmax/gen/ |
D | avx512f-p5-scalef-x128.c | 145 const __m512 vf4 = _mm512_scalef_ps(vp4, vn4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128() local
|
D | avx512f-p5-scalef-x128-acc2.c | 146 const __m512 vf4 = _mm512_scalef_ps(vp4, vn4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() local
|
D | avx512f-p5-scalef-x128-acc4.c | 148 const __m512 vf4 = _mm512_scalef_ps(vp4, vn4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x128_acc4() local
|
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/ |
D | avx512f-p5-scalef-x128.c | 146 const __m512 vf4 = _mm512_scalef_ps(vp4, vn4); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x128() local
|
D | avx512f-p5-scalef-x128-acc2.c | 147 const __m512 vf4 = _mm512_scalef_ps(vp4, vn4); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x128_acc2() local
|