Home
last modified time | relevance | path

Searched refs:vs7 (Results 1 – 25 of 57) sorted by relevance

123

/external/XNNPACK/src/f32-raddexpminusmax/gen/
Davx2-p5-x64.c84 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64() local
164 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64()
173 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64()
Davx2-p5-x64-acc2.c85 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc2() local
165 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc2()
174 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc2()
Davx2-p5-x64-acc4.c87 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc4() local
167 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc4()
176 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc4()
Davx2-p5-x72.c87 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72() local
175 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72()
185 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72()
Davx2-p5-x72-acc3.c89 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72_acc3() local
177 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72_acc3()
187 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72_acc3()
Davx2-p5-x80.c90 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80() local
186 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80()
197 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80()
Davx2-p5-x80-acc2.c91 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc2() local
187 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc2()
198 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc2()
Davx2-p5-x80-acc5.c94 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc5() local
190 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc5()
201 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc5()
Davx2-p5-x96-acc3.c98 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc3() local
210 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc3()
223 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc3()
/external/XNNPACK/src/f32-vscaleexpminusmax/gen/
Davx2-p5-x64.c86 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x64() local
166 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x64()
175 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x64()
Davx2-p5-x72.c89 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x72() local
177 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x72()
187 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x72()
Davx2-p5-x80.c92 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x80() local
188 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x80()
199 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x80()
Davx2-p5-x88.c95 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x88() local
199 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x88()
211 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x88()
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/
Davx2-p5-x64-acc2.c86 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc2() local
166 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc2()
175 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc2()
Davx2-p5-x64.c85 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64() local
165 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64()
174 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64()
Davx2-p5-x64-acc4.c88 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc4() local
168 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc4()
177 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc4()
Davx2-p5-x72.c88 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72() local
176 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72()
186 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72()
Davx2-p5-x72-acc3.c90 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72_acc3() local
178 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72_acc3()
188 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72_acc3()
Davx2-p5-x80-acc5.c95 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc5() local
191 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc5()
202 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc5()
Davx2-p5-x80.c91 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80() local
187 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80()
198 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80()
Davx2-p5-x80-acc2.c92 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc2() local
188 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc2()
199 __m256 vf7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc2()
/external/XNNPACK/src/f32-sigmoid/gen/
Davx2-rr1-p5-div-x64.c94 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x64() local
164 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x64()
173 const __m256 ve7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x64()
Davx2-rr1-p5-div-x80.c100 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x80() local
184 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x80()
195 const __m256 ve7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x80()
Davx2-rr1-p5-div-x72.c97 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x72() local
174 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x72()
184 const __m256 ve7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x72()
Davx2-rr1-p5-nr1fma-x64.c94 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x64() local
164 vt7 = _mm256_mul_ps(vt7, vs7); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x64()
173 const __m256 ve7 = _mm256_fmadd_ps(vt7, vp7, vs7); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x64()

123