/external/XNNPACK/src/cs16-vsquareabs/gen/ |
D | scalar-x4.c | 34 const int32_t vr3 = (int32_t) input[6]; in xnn_cs16_vsquareabs_ukernel__scalar_x4() local
|
/external/XNNPACK/src/f32-vsigmoid/gen/ |
D | vsigmoid-avx512f-rr1-p5-scalef-nr1fma-x64.c | 102 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr1_p5_scalef_nr1fma_x64() local
|
D | vsigmoid-avx512f-rr1-lut16-p3-perm-scalef-nr1fma-x64.c | 101 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr1_lut16_p3_perm_scalef_nr1fma_x64() local
|
D | vsigmoid-avx2-rr1-p5-nr2fma-x32.c | 108 __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr2fma_x32() local
|
D | vsigmoid-avx512f-rr1-p5-scalef-nr1fma-x80.c | 114 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr1_p5_scalef_nr1fma_x80() local
|
D | vsigmoid-avx512f-rr2-lut32-p2-perm2-scalef-nr1fma-x64.c | 103 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr2_lut32_p2_perm2_scalef_nr1fma_x64() local
|
D | vsigmoid-avx2-rr1-p5-nr1fma-x32.c | 108 __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr1fma_x32() local
|
D | vsigmoid-avx512f-rr2-lut32-p2-perm2-scalef-nr1fma-x80.c | 115 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr2_lut32_p2_perm2_scalef_nr1fma_x80() local
|
D | vsigmoid-avx512f-rr1-p5-scalef-nr1fma-x96.c | 126 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr1_p5_scalef_nr1fma_x96() local
|
D | vsigmoid-avx512f-rr1-lut16-p3-perm-scalef-nr1fma-x80.c | 113 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr1_lut16_p3_perm_scalef_nr1fma_x80() local
|
D | vsigmoid-avx2-rr1-p5-nr1fma-x40.c | 121 __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr1fma_x40() local
|
D | vsigmoid-avx512f-rr1-lut16-p3-perm-scalef-nr1fma-x96.c | 125 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr1_lut16_p3_perm_scalef_nr1fma_x96() local
|
D | vsigmoid-avx-rr2-p5-nr2-x32.c | 123 __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx_rr2_p5_nr2_x32() local
|
D | vsigmoid-avx2-rr1-p5-nr2fma-x40.c | 121 __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr2fma_x40() local
|
D | vsigmoid-avx512f-rr1-p5-scalef-nr1fma-x112.c | 138 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr1_p5_scalef_nr1fma_x112() local
|
D | vsigmoid-avx2-rr1-p5-nr1fma-x48.c | 134 __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr1fma_x48() local
|
D | vsigmoid-avx2-rr1-p5-nr2fma-x48.c | 134 __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr2fma_x48() local
|
D | vsigmoid-avx512f-rr2-lut32-p2-perm2-scalef-nr1fma-x96.c | 127 __m512 vr3 = _mm512_rcp14_ps(vd3); in xnn_f32_vsigmoid_ukernel__avx512f_rr2_lut32_p2_perm2_scalef_nr1fma_x96() local
|
/external/XNNPACK/src/f16-vsigmoid/gen/ |
D | vsigmoid-avx2-rr1-p2-rcp-x32.c | 93 const __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f16_vsigmoid_ukernel__avx2_rr1_p2_rcp_x32() local
|
D | vsigmoid-avx2-rr1-p2-rcp-x40.c | 103 const __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f16_vsigmoid_ukernel__avx2_rr1_p2_rcp_x40() local
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1fma-x32.c | 96 float16x8_t vr3 = vrecpeq_f16(vd3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x32() local
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x32.c | 96 float16x8_t vr3 = vrecpeq_f16(vd3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x32() local
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1recps-x40.c | 107 float16x8_t vr3 = vrecpeq_f16(vd3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1recps_x40() local
|
D | vsigmoid-neonfp16arith-rr2-p2-nr1fma-x40.c | 107 float16x8_t vr3 = vrecpeq_f16(vd3); in xnn_f16_vsigmoid_ukernel__neonfp16arith_rr2_p2_nr1fma_x40() local
|
D | vsigmoid-avx2-rr1-p2-rcp-x48.c | 113 const __m256 vr3 = _mm256_rcp_ps(vd3); in xnn_f16_vsigmoid_ukernel__avx2_rr1_p2_rcp_x48() local
|