/external/XNNPACK/src/f32-sigmoid/gen/ |
D | avx-rr2-p5-div-x24.c | 62 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x24() local 63 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x24()
|
D | avx-rr2-p5-div-x32.c | 65 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x32() local 66 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x32()
|
D | avx-rr2-p5-nr2-x24.c | 63 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x24() local 64 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x24()
|
D | avx-rr2-p5-nr2-x32.c | 66 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x32() local 67 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x32()
|
D | avx-rr2-p5-div-x40.c | 68 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x40() local 69 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x40()
|
D | avx-rr2-p5-nr2-x40.c | 69 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x40() local 70 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x40()
|
D | avx-rr2-p5-div-x48.c | 71 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x48() local 72 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x48()
|
D | avx-rr2-p5-div-x56.c | 74 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x56() local 75 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x56()
|
D | avx-rr2-p5-div-x64.c | 77 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x64() local 78 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x64()
|
D | avx-rr2-p5-nr2-x48.c | 72 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x48() local 73 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x48()
|
D | avx-rr2-p5-nr2-x56.c | 75 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x56() local 76 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x56()
|
D | avx-rr2-p5-div-x72.c | 80 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x72() local 81 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x72()
|
D | avx-rr2-p5-nr2-x64.c | 78 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x64() local 79 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x64()
|
D | avx-rr2-p5-div-x80.c | 83 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x80() local 84 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x80()
|
D | avx-rr2-p5-nr2-x72.c | 81 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72() local 82 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
|
D | avx-rr2-p5-nr2-x80.c | 84 …const __m128 vs_hi2 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(vn2, … in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x80() local 85 const __m256 vs2 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo2), vs_hi2, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x80()
|