Home
last modified time | relevance | path

Searched refs:vidx3_lo (Results 1 – 3 of 3) sorted by relevance

/external/XNNPACK/src/f32-velu/gen/
Dvelu-avx-rr2-lut16-p3-x32.c151 const __m128i vidx3_lo = _mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vidx3)), 2); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32() local
154 const uint64_t vidx3_ll = (uint64_t) _mm_cvtsi128_si64(vidx3_lo); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32()
155 const uint64_t vidx3_lh = (uint64_t) _mm_extract_epi64(vidx3_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32()
167 …const void*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_cvtsi128_si32(vidx3_lo))); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32()
168 …st void*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_extract_epi32(vidx3_lo, 2))); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32()
171 …int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_extract_epi32(vidx3_lo, 1))), 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32()
172 …int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_extract_epi32(vidx3_lo, 3))), 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x32()
Dvelu-avx-rr2-lut16-p3-x40.c154 const __m128i vidx3_lo = _mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vidx3)), 2); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40() local
157 const uint64_t vidx3_ll = (uint64_t) _mm_cvtsi128_si64(vidx3_lo); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40()
158 const uint64_t vidx3_lh = (uint64_t) _mm_extract_epi64(vidx3_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40()
170 …const void*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_cvtsi128_si32(vidx3_lo))); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40()
171 …st void*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_extract_epi32(vidx3_lo, 2))); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40()
174 …int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_extract_epi32(vidx3_lo, 1))), 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40()
175 …int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_extract_epi32(vidx3_lo, 3))), 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x40()
Dvelu-avx-rr2-lut16-p3-x48.c157 const __m128i vidx3_lo = _mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(vidx3)), 2); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48() local
160 const uint64_t vidx3_ll = (uint64_t) _mm_cvtsi128_si64(vidx3_lo); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48()
161 const uint64_t vidx3_lh = (uint64_t) _mm_extract_epi64(vidx3_lo, 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48()
173 …const void*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_cvtsi128_si32(vidx3_lo))); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48()
174 …st void*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_extract_epi32(vidx3_lo, 2))); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48()
177 …int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_extract_epi32(vidx3_lo, 1))), 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48()
178 …int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + (uint32_t) _mm_extract_epi32(vidx3_lo, 3))), 1); in xnn_f32_velu_ukernel__avx_rr2_lut16_p3_x48()