Home
last modified time | relevance | path

Searched refs:ven1 (Results 1 – 25 of 50) sorted by relevance

12

/external/XNNPACK/src/f32-velu/gen/
Dvelu-avx-rr2-lut4-p4-perm-x16.c60 __m256 ven1 = _mm256_andnot_ps(vindex_mask, vn1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x16() local
62 …128 ven1_lo = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(ven1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x16()
67 …8 ven1_hi = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(ven1, 1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x16()
72 ven1 = _mm256_insertf128_ps(_mm256_castps128_ps256(ven1_lo), ven1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x16()
77 __m256 vs1 = _mm256_mul_ps(vl1, ven1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x16()
Dvelu-avx-rr2-lut4-p4-perm-x24.c63 __m256 ven1 = _mm256_andnot_ps(vindex_mask, vn1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x24() local
65 …128 ven1_lo = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(ven1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x24()
73 …8 ven1_hi = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(ven1, 1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x24()
80 ven1 = _mm256_insertf128_ps(_mm256_castps128_ps256(ven1_lo), ven1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x24()
87 __m256 vs1 = _mm256_mul_ps(vl1, ven1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x24()
Dvelu-avx-rr2-lut4-p4-perm-x32.c66 __m256 ven1 = _mm256_andnot_ps(vindex_mask, vn1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x32() local
68 …128 ven1_lo = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(ven1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x32()
79 …8 ven1_hi = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(ven1, 1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x32()
88 ven1 = _mm256_insertf128_ps(_mm256_castps128_ps256(ven1_lo), ven1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x32()
97 __m256 vs1 = _mm256_mul_ps(vl1, ven1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x32()
Dvelu-wasm-rr2-lut16-p3-x2.c57 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x2() local
64 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x2()
Dvelu-avx-rr2-lut4-p4-perm-x40.c69 __m256 ven1 = _mm256_andnot_ps(vindex_mask, vn1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x40() local
71 …128 ven1_lo = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(ven1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x40()
85 …8 ven1_hi = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(ven1, 1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x40()
96 ven1 = _mm256_insertf128_ps(_mm256_castps128_ps256(ven1_lo), ven1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x40()
107 __m256 vs1 = _mm256_mul_ps(vl1, ven1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x40()
Dvelu-scalar-rr2-lut16-p3-x2.c57 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x2() local
64 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x2()
Dvelu-wasm-rr2-lut16-p3-x3.c60 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x3() local
70 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x3()
Dvelu-scalar-rr2-lut16-p3-x3.c60 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x3() local
70 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x3()
Dvelu-avx-rr2-lut4-p4-perm-x48.c72 __m256 ven1 = _mm256_andnot_ps(vindex_mask, vn1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x48() local
74 …128 ven1_lo = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_castps256_ps128(ven1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x48()
91 …8 ven1_hi = _mm_castsi128_ps(_mm_slli_epi32(_mm_castps_si128(_mm256_extractf128_ps(ven1, 1)), 21)); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x48()
104 ven1 = _mm256_insertf128_ps(_mm256_castps128_ps256(ven1_lo), ven1_hi, 1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x48()
117 __m256 vs1 = _mm256_mul_ps(vl1, ven1); in xnn_f32_velu_ukernel__avx_rr2_lut4_p4_perm_x48()
Dvelu-scalar-rr2-lut16-p3-x4.c63 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x4() local
76 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x4()
Dvelu-wasm-rr2-lut16-p3-x4.c63 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x4() local
76 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x4()
Dvelu-avx512f-rr1-lut16-p3-perm-x32.c55 const __m512i ven1 = _mm512_slli_epi32(_mm512_castps_si512(vn1), 19); in xnn_f32_velu_ukernel__avx512f_rr1_lut16_p3_perm_x32() local
60 __m512 vs1 = _mm512_castsi512_ps(_mm512_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx512f_rr1_lut16_p3_perm_x32()
Dvelu-scalar-rr2-lut16-p3-x5.c66 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x5() local
82 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x5()
Dvelu-wasm-rr2-lut16-p3-x5.c66 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x5() local
82 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x5()
Dvelu-avx2-rr1-lut16-p3-gather-x16.c60 const __m256i ven1 = _mm256_slli_epi32(_mm256_castps_si256(vn1), 19); in xnn_f32_velu_ukernel__avx2_rr1_lut16_p3_gather_x16() local
65 __m256 vs1 = _mm256_castsi256_ps(_mm256_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx2_rr1_lut16_p3_gather_x16()
Dvelu-avx2-rr1-lut8-p4-perm-x16.c56 const __m256i ven1 = _mm256_slli_epi32(_mm256_castps_si256(vn1), 20); in xnn_f32_velu_ukernel__avx2_rr1_lut8_p4_perm_x16() local
62 __m256 vs1 = _mm256_castsi256_ps(_mm256_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx2_rr1_lut8_p4_perm_x16()
Dvelu-scalar-rr2-lut16-p3-x6.c69 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x6() local
88 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__scalar_rr2_lut16_p3_x6()
Dvelu-avx2-rr1-lut4-p4-perm-x16.c57 const __m256i ven1 = _mm256_slli_epi32(_mm256_castps_si256(vn1), 21); in xnn_f32_velu_ukernel__avx2_rr1_lut4_p4_perm_x16() local
63 __m256 vs1 = _mm256_castsi256_ps(_mm256_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx2_rr1_lut4_p4_perm_x16()
Dvelu-wasm-rr2-lut16-p3-x6.c69 const uint32_t ven1 = fp32_to_bits(vn1) << 19; in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x6() local
88 float vs1 = fp32_from_bits(xnn_table_exp2minus_k_over_16[vidx1] + ven1); in xnn_f32_velu_ukernel__wasm_rr2_lut16_p3_x6()
Dvelu-avx512f-rr1-lut16-p3-perm-x48.c58 const __m512i ven1 = _mm512_slli_epi32(_mm512_castps_si512(vn1), 19); in xnn_f32_velu_ukernel__avx512f_rr1_lut16_p3_perm_x48() local
65 __m512 vs1 = _mm512_castsi512_ps(_mm512_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx512f_rr1_lut16_p3_perm_x48()
Dvelu-avx2-rr1-lut16-p3-gather-x24.c65 const __m256i ven1 = _mm256_slli_epi32(_mm256_castps_si256(vn1), 19); in xnn_f32_velu_ukernel__avx2_rr1_lut16_p3_gather_x24() local
72 __m256 vs1 = _mm256_castsi256_ps(_mm256_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx2_rr1_lut16_p3_gather_x24()
Dvelu-avx2-rr1-lut4-p4-perm-x24.c60 const __m256i ven1 = _mm256_slli_epi32(_mm256_castps_si256(vn1), 21); in xnn_f32_velu_ukernel__avx2_rr1_lut4_p4_perm_x24() local
69 __m256 vs1 = _mm256_castsi256_ps(_mm256_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx2_rr1_lut4_p4_perm_x24()
Dvelu-avx2-rr1-lut8-p4-perm-x24.c59 const __m256i ven1 = _mm256_slli_epi32(_mm256_castps_si256(vn1), 20); in xnn_f32_velu_ukernel__avx2_rr1_lut8_p4_perm_x24() local
68 __m256 vs1 = _mm256_castsi256_ps(_mm256_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx2_rr1_lut8_p4_perm_x24()
Dvelu-avx2-rr1-lut4-p4-perm-x32.c63 const __m256i ven1 = _mm256_slli_epi32(_mm256_castps_si256(vn1), 21); in xnn_f32_velu_ukernel__avx2_rr1_lut4_p4_perm_x32() local
75 __m256 vs1 = _mm256_castsi256_ps(_mm256_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx2_rr1_lut4_p4_perm_x32()
Dvelu-avx2-rr1-lut16-p3-gather-x32.c70 const __m256i ven1 = _mm256_slli_epi32(_mm256_castps_si256(vn1), 19); in xnn_f32_velu_ukernel__avx2_rr1_lut16_p3_gather_x32() local
79 __m256 vs1 = _mm256_castsi256_ps(_mm256_add_epi32(vl1, ven1)); in xnn_f32_velu_ukernel__avx2_rr1_lut16_p3_gather_x32()

12