Home
last modified time | relevance | path

Searched refs:vx11 (Results 1 – 25 of 28) sorted by relevance

12

/external/XNNPACK/src/f32-raddexpminusmax/gen/
Davx2-p5-x96.c71 const __m256 vx11 = _mm256_sub_ps(vi11, vi_max); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96() local
85 __m256 vn11 = _mm256_fmadd_ps(vx11, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96()
129 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96()
240 vf11 = _mm256_andnot_ps(_mm256_cmp_ps(vx11, vdenorm_cutoff, _CMP_LT_OS), vf11); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96()
Davx2-p5-x96-acc2.c72 const __m256 vx11 = _mm256_sub_ps(vi11, vi_max); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc2() local
86 __m256 vn11 = _mm256_fmadd_ps(vx11, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc2()
130 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc2()
241 vf11 = _mm256_andnot_ps(_mm256_cmp_ps(vx11, vdenorm_cutoff, _CMP_LT_OS), vf11); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc2()
Davx2-p5-x96-acc3.c73 const __m256 vx11 = _mm256_sub_ps(vi11, vi_max); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc3() local
87 __m256 vn11 = _mm256_fmadd_ps(vx11, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc3()
131 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc3()
242 vf11 = _mm256_andnot_ps(_mm256_cmp_ps(vx11, vdenorm_cutoff, _CMP_LT_OS), vf11); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc3()
Davx2-p5-x96-acc6.c76 const __m256 vx11 = _mm256_sub_ps(vi11, vi_max); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc6() local
90 __m256 vn11 = _mm256_fmadd_ps(vx11, vlog2e, vmagic_bias); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc6()
134 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc6()
245 vf11 = _mm256_andnot_ps(_mm256_cmp_ps(vx11, vdenorm_cutoff, _CMP_LT_OS), vf11); in xnn_f32_raddexpminusmax_ukernel__avx2_p5_x96_acc6()
Davx512f-p5-scalef-x192-acc2.c69 const __m512 vx11 = _mm512_sub_ps(vi11, vi_max); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2() local
83 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
98 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
Davx512f-p5-scalef-x192.c68 const __m512 vx11 = _mm512_sub_ps(vi11, vi_max); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192() local
82 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
97 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
Davx512f-p5-scalef-x192-acc3.c70 const __m512 vx11 = _mm512_sub_ps(vi11, vi_max); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3() local
84 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
99 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
Davx512f-p5-scalef-x192-acc6.c73 const __m512 vx11 = _mm512_sub_ps(vi11, vi_max); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6() local
87 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
102 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/
Davx2-p5-x96-acc6.c77 const __m256 vx11 = _mm256_sub_ps(vi11, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc6() local
91 __m256 vn11 = _mm256_fmadd_ps(vx11, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc6()
135 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc6()
246 vf11 = _mm256_andnot_ps(_mm256_cmp_ps(vx11, vdenorm_cutoff, _CMP_LT_OS), vf11); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc6()
Davx2-p5-x96-acc3.c74 const __m256 vx11 = _mm256_sub_ps(vi11, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc3() local
88 __m256 vn11 = _mm256_fmadd_ps(vx11, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc3()
132 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc3()
243 vf11 = _mm256_andnot_ps(_mm256_cmp_ps(vx11, vdenorm_cutoff, _CMP_LT_OS), vf11); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc3()
Davx2-p5-x96-acc2.c73 const __m256 vx11 = _mm256_sub_ps(vi11, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc2() local
87 __m256 vn11 = _mm256_fmadd_ps(vx11, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc2()
131 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc2()
242 vf11 = _mm256_andnot_ps(_mm256_cmp_ps(vx11, vdenorm_cutoff, _CMP_LT_OS), vf11); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96_acc2()
Davx2-p5-x96.c72 const __m256 vx11 = _mm256_sub_ps(vi11, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96() local
86 __m256 vn11 = _mm256_fmadd_ps(vx11, vlog2e, vmagic_bias); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96()
130 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96()
241 vf11 = _mm256_andnot_ps(_mm256_cmp_ps(vx11, vdenorm_cutoff, _CMP_LT_OS), vf11); in xnn_f32_raddstoreexpminusmax_ukernel__avx2_p5_x96()
Davx512f-p5-scalef-x192-acc2.c70 const __m512 vx11 = _mm512_sub_ps(vi11, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2() local
84 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
99 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
Davx512f-p5-scalef-x192-acc3.c71 const __m512 vx11 = _mm512_sub_ps(vi11, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3() local
85 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
100 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
Davx512f-p5-scalef-x192.c69 const __m512 vx11 = _mm512_sub_ps(vi11, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192() local
83 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
98 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
Davx512f-p5-scalef-x192-acc6.c74 const __m512 vx11 = _mm512_sub_ps(vi11, vi_max); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6() local
88 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
103 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
/external/XNNPACK/src/f32-vscaleexpminusmax/gen/
Davx2-p5-x96.c73 const __m256 vx11 = _mm256_sub_ps(vi11, vi_max); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x96() local
87 __m256 vn11 = _mm256_fmadd_ps(vx11, vlog2e, vmagic_bias); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x96()
131 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x96()
242 vf11 = _mm256_andnot_ps(_mm256_cmp_ps(vx11, vdenorm_cutoff, _CMP_LT_OS), vf11); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x96()
Davx512f-p5-scalef-x192.c69 const __m512 vx11 = _mm512_sub_ps(vi11, vi_max); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192() local
83 __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
98 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
/external/XNNPACK/src/f32-vscaleextexp/gen/
Davx512f-p5-scalef-x192.c55 const __m512 vx11 = _mm512_loadu_ps(x + 176); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192() local
70 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
85 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
Davx2-p5-x96.c61 const __m256 vx11 = _mm256_loadu_ps(x + 88); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96() local
76 …const __m256 vn11 = _mm256_round_ps(_mm256_mul_ps(vx11, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_F… in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
91 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_vscaleextexp_ukernel__avx2_p5_x96()
/external/XNNPACK/src/f32-raddextexp/gen/
Davx512f-p5-scalef-x192.c55 const __m512 vx11 = _mm512_loadu_ps(x + 176); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192() local
70 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192()
85 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192()
Davx512f-p5-scalef-x192-acc2.c57 const __m512 vx11 = _mm512_loadu_ps(x + 176); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc2() local
72 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc2()
87 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc2()
Davx512f-p5-scalef-x192-acc3.c59 const __m512 vx11 = _mm512_loadu_ps(x + 176); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3() local
74 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3()
89 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3()
Davx512f-p5-scalef-x192-acc6.c65 const __m512 vx11 = _mm512_loadu_ps(x + 176); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() local
80 const __m512 vn11 = _mm512_roundscale_ps(_mm512_mul_ps(vx11, vlog2e), 0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
95 __m512 vt11 = _mm512_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
Davx2-p5-x96.c59 const __m256 vx11 = _mm256_loadu_ps(x + 88); in xnn_f32_raddextexp_ukernel__avx2_p5_x96() local
74 …const __m256 vn11 = _mm256_round_ps(_mm256_mul_ps(vx11, vlog2e), _MM_FROUND_TO_NEAREST_INT | _MM_F… in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
89 __m256 vt11 = _mm256_fmadd_ps(vn11, vminus_ln2_hi, vx11); in xnn_f32_raddextexp_ukernel__avx2_p5_x96()

12