Searched refs:vs_lo4 (Results 1 – 12 of 12) sorted by relevance
/external/XNNPACK/src/f32-sigmoid/gen/ |
D | avx-rr2-p5-div-x40.c | 73 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x40() local 75 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x40()
|
D | avx-rr2-p5-nr2-x40.c | 74 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x40() local 76 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x40()
|
D | avx-rr2-p5-div-x48.c | 76 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x48() local 78 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x48()
|
D | avx-rr2-p5-div-x56.c | 79 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x56() local 81 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x56()
|
D | avx-rr2-p5-div-x64.c | 82 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x64() local 84 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x64()
|
D | avx-rr2-p5-nr2-x48.c | 77 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x48() local 79 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x48()
|
D | avx-rr2-p5-nr2-x56.c | 80 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x56() local 82 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x56()
|
D | avx-rr2-p5-div-x72.c | 85 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x72() local 87 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x72()
|
D | avx-rr2-p5-nr2-x64.c | 83 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x64() local 85 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x64()
|
D | avx-rr2-p5-div-x80.c | 88 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x80() local 90 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x80()
|
D | avx-rr2-p5-nr2-x72.c | 86 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72() local 88 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x72()
|
D | avx-rr2-p5-nr2-x80.c | 89 …const __m128 vs_lo4 = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vn4)… in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x80() local 91 const __m256 vs4 = _mm256_insertf128_ps(_mm256_castps128_ps256(vs_lo4), vs_hi4, 1); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_nr2_x80()
|