Searched refs:vaccv4 (Results 1 – 4 of 4) sorted by relevance
/external/XNNPACK/src/f32-raddextexp/gen/ |
D | avx512f-p5-scalef-x160-acc5.c | 44 __m512 vaccv4 = _mm512_setzero_ps(); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() local 198 vaccv4 = _mm512_scalef_ps(vaccv4, vdelta_acce4); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() 203 vaccv4 = _mm512_add_ps(vaccv4, _mm512_scalef_ps(vp4, vdelta_e4)); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() 208 vaccv4 = _mm512_add_ps(vaccv4, _mm512_scalef_ps(vp9, vdelta_e9)); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() 234 vaccv = _mm512_add_ps(vaccv, _mm512_scalef_ps(vaccv4, vdelta_acce4)); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5()
|
D | avx512f-p5-scalef-x192-acc6.c | 44 __m512 vaccv4 = _mm512_setzero_ps(); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() local 223 vaccv4 = _mm512_scalef_ps(vaccv4, vdelta_acce4); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() 229 vaccv4 = _mm512_add_ps(vaccv4, _mm512_scalef_ps(vp4, vdelta_e4)); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() 235 vaccv4 = _mm512_add_ps(vaccv4, _mm512_scalef_ps(vp10, vdelta_e10)); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() 264 vaccv = _mm512_add_ps(vaccv, _mm512_scalef_ps(vaccv4, vdelta_acce4)); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
|
D | avx2-p5-x80-acc5.c | 48 __m256 vaccv4 = _mm256_setzero_ps(); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() local 227 vaccv4 = _mm256_mul_ps(vaccv4, vaccs4); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() 232 vaccv4 = _mm256_fmadd_ps(vp4, vs4, vaccv4); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() 237 vaccv4 = _mm256_fmadd_ps(vp9, vs9, vaccv4); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() 269 vaccv = _mm256_fmadd_ps(vaccv4, vaccs4, vaccv); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
|
D | avx2-p5-x96-acc6.c | 48 __m256 vaccv4 = _mm256_setzero_ps(); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() local 255 vaccv4 = _mm256_mul_ps(vaccv4, vaccs4); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 261 vaccv4 = _mm256_fmadd_ps(vp4, vs4, vaccv4); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 267 vaccv4 = _mm256_fmadd_ps(vp10, vs10, vaccv4); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 303 vaccv = _mm256_fmadd_ps(vaccv4, vaccs4, vaccv); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
|