Home
last modified time | relevance | path

Searched refs:vn7 (Results 1 – 25 of 101) sorted by relevance

12345

/external/XNNPACK/src/f32-raddexpminusmax/gen/
Davx2-p5-x64.c73 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64() local
84 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64()
94 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64()
105 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64()
114 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64()
Davx2-p5-x64-acc2.c74 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc2() local
85 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc2()
95 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc2()
106 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc2()
115 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc2()
Davx2-p5-x64-acc4.c76 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc4() local
87 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc4()
97 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc4()
108 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc4()
117 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x64_acc4()
Davx2-p5-x72.c75 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72() local
87 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72()
98 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72()
110 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72()
120 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72()
Davx2-p5-x72-acc3.c77 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72_acc3() local
89 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72_acc3()
100 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72_acc3()
112 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72_acc3()
122 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x72_acc3()
Davx2-p5-x80.c77 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80() local
90 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80()
102 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80()
115 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80()
126 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80()
Davx2-p5-x80-acc2.c78 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc2() local
91 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc2()
103 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc2()
116 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc2()
127 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc2()
Davx2-p5-x80-acc5.c81 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc5() local
94 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc5()
106 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc5()
119 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc5()
130 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x80_acc5()
/external/XNNPACK/src/f32-vscaleexpminusmax/gen/
Davx2-p5-x64.c75 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x64() local
86 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x64()
96 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x64()
107 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x64()
116 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x64()
Davx2-p5-x72.c77 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x72() local
89 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x72()
100 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x72()
112 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x72()
122 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x72()
Davx2-p5-x80.c79 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x80() local
92 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x80()
104 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x80()
117 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x80()
128 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x80()
Davx2-p5-x88.c81 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x88() local
95 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x88()
108 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x88()
122 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x88()
134 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x88()
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/
Davx2-p5-x64-acc2.c75 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc2() local
86 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc2()
96 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc2()
107 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc2()
116 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc2()
Davx2-p5-x64.c74 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64() local
85 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64()
95 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64()
106 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64()
115 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64()
Davx2-p5-x64-acc4.c77 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc4() local
88 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc4()
98 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc4()
109 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc4()
118 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x64_acc4()
Davx2-p5-x72.c76 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72() local
88 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72()
99 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72()
111 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72()
121 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72()
Davx2-p5-x72-acc3.c78 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72_acc3() local
90 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72_acc3()
101 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72_acc3()
113 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72_acc3()
123 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x72_acc3()
Davx2-p5-x80-acc5.c82 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc5() local
95 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc5()
107 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc5()
120 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc5()
131 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc5()
Davx2-p5-x80.c78 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80() local
91 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80()
103 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80()
116 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80()
127 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80()
Davx2-p5-x80-acc2.c79 __m256 vn7 = _mm256_fmadd_ps(vx7, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc2() local
92 const __m256 vs7 = _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_castps_si256(vn7), 23)); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc2()
104 vn7 = _mm256_sub_ps(vn7, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc2()
117 __m256 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc2()
128 vt7 = _mm256_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x80_acc2()
/external/XNNPACK/src/f32-raddextexp/gen/
Davx512f-p5-scalef-x128.c62 const __m512 vn7 = _mm512_roundscale_ps(_mm512_mul_ps(vx7, vlog2e), 0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128() local
73 __m512 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128()
82 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128()
147 vmax_e0 = _mm512_max_ps(vmax_e0, vn7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128()
157 const __m512 vdelta_e7 = _mm512_sub_ps(vn7, vmax_e0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128()
Davx512f-p5-scalef-x128-acc2.c64 const __m512 vn7 = _mm512_roundscale_ps(_mm512_mul_ps(vx7, vlog2e), 0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2() local
75 __m512 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2()
84 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2()
149 vmax_e1 = _mm512_max_ps(vmax_e1, vn7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2()
160 const __m512 vdelta_e7 = _mm512_sub_ps(vn7, vmax_e1); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2()
Davx512f-p5-scalef-x144.c63 const __m512 vn7 = _mm512_roundscale_ps(_mm512_mul_ps(vx7, vlog2e), 0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144() local
75 __m512 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
85 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
156 vmax_e0 = _mm512_max_ps(vmax_e0, vn7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
167 const __m512 vdelta_e7 = _mm512_sub_ps(vn7, vmax_e0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
Davx512f-p5-scalef-x144-acc3.c67 const __m512 vn7 = _mm512_roundscale_ps(_mm512_mul_ps(vx7, vlog2e), 0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3() local
79 __m512 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3()
89 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3()
160 vmax_e1 = _mm512_max_ps(vmax_e1, vn7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3()
173 const __m512 vdelta_e7 = _mm512_sub_ps(vn7, vmax_e1); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3()
Davx512f-p5-scalef-x160-acc2.c66 const __m512 vn7 = _mm512_roundscale_ps(_mm512_mul_ps(vx7, vlog2e), 0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2() local
79 __m512 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_hi, vx7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2()
90 vt7 = _mm512_fmadd_ps(vn7, vminus_ln2_lo, vt7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2()
167 vmax_e1 = _mm512_max_ps(vmax_e1, vn7); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2()
180 const __m512 vdelta_e7 = _mm512_sub_ps(vn7, vmax_e1); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2()

12345