/external/XNNPACK/src/f32-raddextexp/gen/ |
D | avx512f-p5-scalef-x192-acc6.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() local 46 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() 47 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() 48 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() 49 __m512 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() 50 __m512 vacce4 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() 51 __m512 vacce5 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
|
D | avx512f-p5-scalef-x160-acc5.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() local 45 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() 46 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() 47 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() 48 __m512 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() 49 __m512 vacce4 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5()
|
D | avx512f-p5-scalef-x128-acc4.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4() local 44 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4() 45 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4() 46 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4() 47 __m512 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4()
|
D | avx2-p5-x96-acc6.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() local 50 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 51 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 52 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 53 __m256 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 54 __m256 vacce4 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() 55 __m256 vacce5 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
|
D | avx2-p5-x64-acc4.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4() local 48 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4() 49 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4() 50 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4() 51 __m256 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
|
D | avx2-p5-x80-acc5.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() local 49 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() 50 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() 51 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() 52 __m256 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() 53 __m256 vacce4 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
|
D | avx512f-p5-scalef-x144-acc3.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3() local 43 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3() 44 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3() 45 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3()
|
D | avx512f-p5-scalef-x128-acc2.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2() local 42 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2() 43 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2()
|
D | avx512f-p5-scalef-x192-acc3.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3() local 43 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3() 44 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3() 45 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3()
|
D | avx2-p5-x72-acc3.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() local 47 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 48 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() 49 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
|
D | avx512f-p5-scalef-x160-acc2.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2() local 42 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2() 43 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2()
|
D | avx2-p5-x96-acc3.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() local 47 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 48 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() 49 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
|
D | avx512f-p5-scalef-x192-acc2.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc2() local 42 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc2() 43 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc2()
|
D | avx2-p5-x64-acc2.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() local 46 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() 47 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
|
D | avx2-p5-x80-acc2.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() local 46 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() 47 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
|
D | avx512f-p5-scalef-x128.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128() local 41 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128()
|
D | avx512f-p5-scalef-x144.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144() local 41 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
|
D | avx2-p5-x96-acc2.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() local 46 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() 47 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
|
D | avx512f-p5-scalef-x160.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160() local 41 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160()
|
D | avx2-p5-x64.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x64() local 45 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
|
D | avx2-p5-x72.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x72() local 45 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
|
D | avx512f-p5-scalef-x192.c | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192() local 41 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192()
|
D | avx2-p5-x80.c | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x80() local 45 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
|
/external/XNNPACK/src/f32-raddextexp/ |
D | avx512f-p5-scalef.c.in | 38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); 43 __m512 vacce${K} = vminus_inf;
|
D | avx2-p5.c.in | 35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); 47 __m256 vacce${K} = vminus_inf;
|