Home
last modified time | relevance | path

Searched refs:ve9 (Results 1 – 15 of 15) sorted by relevance

/external/XNNPACK/src/f32-vscaleextexp/gen/
Davx2-p5-x80.c180 __m256 ve9 = _mm256_add_ps(vn9, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80() local
195 ve9 = _mm256_max_ps(ve9, vmin_exponent); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
209 …const __m256 vs9 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve9, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x80()
Davx2-p5-x88.c190 __m256 ve9 = _mm256_add_ps(vn9, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88() local
206 ve9 = _mm256_max_ps(ve9, vmin_exponent); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
221 …const __m256 vs9 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve9, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x88()
Davx2-p5-x96.c200 __m256 ve9 = _mm256_add_ps(vn9, vscalee); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() local
217 ve9 = _mm256_max_ps(ve9, vmin_exponent); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
233 …const __m256 vs9 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(_mm256_add_ps(ve9, vm… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
Davx512f-p5-scalef-x160.c174 const __m512 ve9 = _mm512_add_ps(vn9, vscalee); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160() local
186 vf9 = _mm512_scalef_ps(vf9, ve9); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
Davx512f-p5-scalef-x176.c184 const __m512 ve9 = _mm512_add_ps(vn9, vscalee); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176() local
197 vf9 = _mm512_scalef_ps(vf9, ve9); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
Davx512f-p5-scalef-x192.c194 const __m512 ve9 = _mm512_add_ps(vn9, vscalee); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192() local
208 vf9 = _mm512_scalef_ps(vf9, ve9); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
/external/XNNPACK/src/f32-vsigmoid/gen/
Dvsigmoid-avx2-rr1-p5-div-x80.c170 const __m256 ve9 = _mm256_fmadd_ps(vt9, vp9, vs9); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_div_x80() local
181 const __m256 vd9 = _mm256_add_ps(ve9, vone); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_div_x80()
192 __m256 vf9 = _mm256_div_ps(ve9, vd9); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_div_x80()
Dvsigmoid-avx2-rr1-p5-nr2fma-x80.c170 const __m256 ve9 = _mm256_fmadd_ps(vt9, vp9, vs9); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr2fma_x80() local
181 const __m256 vd9 = _mm256_add_ps(ve9, vone); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr2fma_x80()
225 __m256 vf9 = _mm256_mul_ps(ve9, vr9); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr2fma_x80()
Dvsigmoid-avx2-rr1-p5-nr1fma-x80.c170 const __m256 ve9 = _mm256_fmadd_ps(vt9, vp9, vs9); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr1fma_x80() local
181 const __m256 vd9 = _mm256_add_ps(ve9, vone); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr1fma_x80()
215 __m256 vf9 = _mm256_mul_ps(ve9, vr9); in xnn_f32_vsigmoid_ukernel__avx2_rr1_p5_nr1fma_x80()
Dvsigmoid-avx-rr2-p5-div-x80.c202 const __m256 ve9 = _mm256_add_ps(_mm256_mul_ps(vt9, vp9), vs9); in xnn_f32_vsigmoid_ukernel__avx_rr2_p5_div_x80() local
213 const __m256 vd9 = _mm256_add_ps(ve9, vone); in xnn_f32_vsigmoid_ukernel__avx_rr2_p5_div_x80()
224 __m256 vf9 = _mm256_div_ps(ve9, vd9); in xnn_f32_vsigmoid_ukernel__avx_rr2_p5_div_x80()
Dvsigmoid-avx-rr2-p5-nr2-x80.c203 const __m256 ve9 = _mm256_add_ps(_mm256_mul_ps(vt9, vp9), vs9); in xnn_f32_vsigmoid_ukernel__avx_rr2_p5_nr2_x80() local
214 const __m256 vd9 = _mm256_add_ps(ve9, vone); in xnn_f32_vsigmoid_ukernel__avx_rr2_p5_nr2_x80()
257 __m256 vf9 = _mm256_mul_ps(ve9, vr9); in xnn_f32_vsigmoid_ukernel__avx_rr2_p5_nr2_x80()
/external/XNNPACK/src/f32-velu/gen/
Dvelu-avx2-rr1-p6-x80.c210 const __m256 ve9 = _mm256_fmadd_ps(vp9, valpha, vs9); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80() local
222 const __m256 vy9 = _mm256_blendv_ps(vx9, ve9, vx9); in xnn_f32_velu_ukernel__avx2_rr1_p6_x80()
Dvelu-avx2-rr1-lut16-p3-gather-x80.c208 const __m256 ve9 = _mm256_fmadd_ps(vp9, valpha, vs9); in xnn_f32_velu_ukernel__avx2_rr1_lut16_p3_gather_x80() local
220 const __m256 vy9 = _mm256_blendv_ps(vx9, ve9, vx9); in xnn_f32_velu_ukernel__avx2_rr1_lut16_p3_gather_x80()
Dvelu-avx2-rr1-lut4-p4-perm-x80.c207 const __m256 ve9 = _mm256_fmadd_ps(vp9, valpha, vs9); in xnn_f32_velu_ukernel__avx2_rr1_lut4_p4_perm_x80() local
219 const __m256 vy9 = _mm256_blendv_ps(vx9, ve9, vx9); in xnn_f32_velu_ukernel__avx2_rr1_lut4_p4_perm_x80()
Dvelu-avx2-rr1-lut8-p4-perm-x80.c207 const __m256 ve9 = _mm256_fmadd_ps(vp9, valpha, vs9); in xnn_f32_velu_ukernel__avx2_rr1_lut8_p4_perm_x80() local
219 const __m256 vy9 = _mm256_blendv_ps(vx9, ve9, vx9); in xnn_f32_velu_ukernel__avx2_rr1_lut8_p4_perm_x80()