/external/XNNPACK/src/f32-sigmoid/gen/ |
D | avx-rr2-p5-div-x16.c | 56 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x16() local 57 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x16()
|
D | avx-rr2-p5-nr2-x16.c | 57 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x16() local 58 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x16()
|
D | avx-rr2-p5-div-x24.c | 59 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x24() local 60 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x24()
|
D | avx-rr2-p5-div-x32.c | 62 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x32() local 63 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x32()
|
D | avx-rr2-p5-nr2-x24.c | 60 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x24() local 61 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x24()
|
D | avx-rr2-p5-nr2-x32.c | 63 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x32() local 64 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x32()
|
D | avx-rr2-p5-div-x40.c | 65 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x40() local 66 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x40()
|
D | avx-rr2-p5-nr2-x40.c | 66 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x40() local 67 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x40()
|
D | avx-rr2-p5-div-x48.c | 68 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x48() local 69 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x48()
|
D | avx-rr2-p5-div-x56.c | 71 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x56() local 72 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x56()
|
D | avx-rr2-p5-div-x64.c | 74 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x64() local 75 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x64()
|
D | avx-rr2-p5-nr2-x48.c | 69 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x48() local 70 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x48()
|
D | avx-rr2-p5-nr2-x56.c | 72 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x56() local 73 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x56()
|
D | avx-rr2-p5-div-x72.c | 77 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x72() local 78 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x72()
|
D | avx-rr2-p5-nr2-x64.c | 75 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x64() local 76 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x64()
|
D | avx-rr2-p5-div-x80.c | 80 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x80() local 81 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x80()
|
D | avx-rr2-p5-nr2-x72.c | 78 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72() local 79 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
|
D | avx-rr2-p5-nr2-x80.c | 81 …const __m128 vs_hi1 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn1, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x80() local 82 const __m256 vs1 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo1), vs_hi1, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x80()
|