Home
last modified time | relevance | path

Searched refs:_mm_cvtsi32_si128 (Results 1 – 25 of 282) sorted by relevance

12345678910>>...12

/external/XNNPACK/src/f32-sigmoid/gen/
Dsse2-lut64-p2-div-x20.c73 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
74 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
75 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
77 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
82 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
83 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
84 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
86 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
91 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
92 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x20()
[all …]
Dsse2-lut64-p2-div-x24.c78 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
79 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
80 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
82 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
87 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
88 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
89 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
91 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
96 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
97 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x24()
[all …]
Dsse2-lut64-p2-div-x12.c63 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
64 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
65 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
67 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
72 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
73 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
74 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
76 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
81 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
82 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x12()
[all …]
Dsse2-lut64-p2-div-x16.c68 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
69 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
70 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
72 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
77 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
78 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
79 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
81 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
86 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
87 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x16()
[all …]
Dsse2-lut64-p2-div-x8.c58 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
59 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
60 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
62 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
67 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
68 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
69 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
71 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
79 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
80 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x8()
[all …]
Dsse2-lut64-p2-div-x4.c51 …const __m128i vl_ll = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_6… in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
52 …const __m128i vl_hl = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
53 …const __m128i vl_lh = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
55 …const __m128i vl_hh = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
58 …const __m128i vl_ll = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
59 …const __m128i vl_hl = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
60 …const __m128i vl_lh = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
62 …const __m128i vl_hh = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
98 …const __m128i vl_ll = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_6… in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
99 …const __m128i vl_hl = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse2_lut64_p2_div_x4()
[all …]
Dsse41-lut64-p2-div-x24.c78 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
79 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
85 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
86 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
92 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
93 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
99 …const __m128i vlC = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
100 …const __m128i vlE = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
106 …const __m128i vlG = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
107 …const __m128i vlI = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x24()
[all …]
Dsse41-lut64-p2-div-x16.c68 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
69 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
75 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
76 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
82 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
83 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
89 …const __m128i vlC = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
90 …const __m128i vlE = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
99 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
100 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x16()
[all …]
Dsse41-lut64-p2-div-x20.c73 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
74 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
80 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
81 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
87 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
88 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
94 …const __m128i vlC = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
95 …const __m128i vlE = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
101 …const __m128i vlG = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
102 …const __m128i vlI = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x20()
[all …]
Dsse41-lut64-p2-div-x12.c63 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
64 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
70 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
71 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
77 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
78 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
87 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
88 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
96 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
97 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_f32_sigmoid_ukernel__sse41_lut64_p2_div_x12()
[all …]
/external/XNNPACK/src/f32-velu/gen/
Dvelu-sse2-rr2-lut16-p3-x12.c69 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
70 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
71 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
73 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
78 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
79 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
80 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
82 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
87 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
88 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x12()
[all …]
Dvelu-sse2-rr2-lut16-p3-x16.c74 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
75 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
76 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
78 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
83 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
84 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
85 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
87 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
92 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
93 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x16()
[all …]
Dvelu-sse2-rr2-lut16-p3-x20.c79 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
80 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
81 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
83 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
88 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
89 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
90 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
92 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
97 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
98 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x20()
[all …]
Dvelu-sse2-rr2-lut16-p3-x24.c84 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
85 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
86 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
88 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
93 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
94 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
95 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
97 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
102 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
103 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x24()
[all …]
Dvelu-sse2-rr2-lut16-p3-x8.c64 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
65 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
66 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
68 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
73 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
74 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
75 …const __m128i vl5 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
77 …const __m128i vl7 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
85 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
86 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x8()
[all …]
Dvelu-sse2-rr2-lut16-p3-x4.c58 …const __m128i vl_ll = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_1… in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
59 …const __m128i vl_hl = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
60 …const __m128i vl_lh = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
62 …const __m128i vl_hh = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
65 …const __m128i vl_ll = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
66 …const __m128i vl_hl = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
67 …const __m128i vl_lh = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
69 …const __m128i vl_hh = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
106 …const __m128i vl_ll = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_1… in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
107 …const __m128i vl_hl = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse2_rr2_lut16_p3_x4()
[all …]
Dvelu-sse41-rr2-lut16-p3-x20.c79 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
80 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
86 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
87 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
93 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
94 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
100 …const __m128i vlC = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
101 …const __m128i vlE = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
107 …const __m128i vlG = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
108 …const __m128i vlI = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x20()
[all …]
Dvelu-sse41-rr2-lut16-p3-x24.c84 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
85 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
91 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
92 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
98 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
99 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
105 …const __m128i vlC = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
106 …const __m128i vlE = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
112 …const __m128i vlG = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
113 …const __m128i vlI = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x24()
[all …]
Dvelu-sse41-rr2-lut16-p3-x16.c74 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
75 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
81 …const __m128i vl4 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
82 …const __m128i vl6 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
88 …const __m128i vl8 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
89 …const __m128i vlA = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
95 …const __m128i vlC = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
96 …const __m128i vlE = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
105 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
106 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_f32_velu_ukernel__sse41_rr2_lut16_p3_x16()
[all …]
/external/flac/src/libFLAC/
Dlpc_intrin_sse2.c58 const __m128i cnt = _mm_cvtsi32_si128(lp_quantization); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
68 … q0 = _mm_cvtsi32_si128(0xffff & qlp_coeff[0]); q0 = _mm_shuffle_epi32(q0, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
69 … q1 = _mm_cvtsi32_si128(0xffff & qlp_coeff[1]); q1 = _mm_shuffle_epi32(q1, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
70 … q2 = _mm_cvtsi32_si128(0xffff & qlp_coeff[2]); q2 = _mm_shuffle_epi32(q2, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
71 … q3 = _mm_cvtsi32_si128(0xffff & qlp_coeff[3]); q3 = _mm_shuffle_epi32(q3, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
72 … q4 = _mm_cvtsi32_si128(0xffff & qlp_coeff[4]); q4 = _mm_shuffle_epi32(q4, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
73 … q5 = _mm_cvtsi32_si128(0xffff & qlp_coeff[5]); q5 = _mm_shuffle_epi32(q5, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
74 … q6 = _mm_cvtsi32_si128(0xffff & qlp_coeff[6]); q6 = _mm_shuffle_epi32(q6, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
75 … q7 = _mm_cvtsi32_si128(0xffff & qlp_coeff[7]); q7 = _mm_shuffle_epi32(q7, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
76 … q8 = _mm_cvtsi32_si128(0xffff & qlp_coeff[8]); q8 = _mm_shuffle_epi32(q8, _MM_SHUFFLE(0,0,0,0)); in FLAC__lpc_compute_residual_from_qlp_coefficients_16_intrin_sse2()
[all …]
Dlpc_intrin_sse41.c59 const __m128i cnt = _mm_cvtsi32_si128(lp_quantization); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
138 xmm5 = _mm_cvtsi32_si128(qlp_coeff[10]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
149 xmm7 = _mm_cvtsi32_si128(data[i-11]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
253 xmm4 = _mm_cvtsi32_si128(qlp_coeff[8]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
263 xmm7 = _mm_cvtsi32_si128(data[i-9]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
352 xmm3 = _mm_cvtsi32_si128(qlp_coeff[6]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
361 xmm7 = _mm_cvtsi32_si128(data[i-7]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
431 xmm2 = _mm_cvtsi32_si128(qlp_coeff[4]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
439 xmm7 = _mm_cvtsi32_si128(data[i-5]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
494 xmm1 = _mm_cvtsi32_si128(qlp_coeff[2]); in FLAC__lpc_compute_residual_from_qlp_coefficients_wide_intrin_sse41()
[all …]
/external/XNNPACK/src/math/
Dexpm1minus-sse2-rr2-lut16-p3.c75 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_math_f32_expm1minus__sse2_rr2_lut16_p3()
76 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_math_f32_expm1minus__sse2_rr2_lut16_p3()
77 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_math_f32_expm1minus__sse2_rr2_lut16_p3()
78 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_math_f32_expm1minus__sse2_rr2_lut16_p3()
84 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_math_f32_expm1minus__sse2_rr2_lut16_p3()
85 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_math_f32_expm1minus__sse2_rr2_lut16_p3()
86 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_math_f32_expm1minus__sse2_rr2_lut16_p3()
87 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_16 + … in xnn_math_f32_expm1minus__sse2_rr2_lut16_p3()
Dsigmoid-sse2-rr2-lut64-p2-div.c80 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_div()
81 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_div()
82 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_div()
83 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_div()
89 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_div()
90 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_div()
91 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_div()
92 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_div()
Dsigmoid-sse2-rr2-lut64-p2-nr2.c81 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_nr2()
82 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_nr2()
83 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_nr2()
84 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_nr2()
90 …const __m128i vl0 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_nr2()
91 …const __m128i vl2 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_nr2()
92 …const __m128i vl1 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_nr2()
93 …const __m128i vl3 = _mm_cvtsi32_si128(*((const int*) ((uintptr_t) xnn_table_exp2minus_k_over_64 + … in xnn_math_f32_sigmoid__sse2_rr2_lut64_p2_nr2()
/external/libaom/libaom/av1/common/x86/
Dconvolve_sse2.c87 const __m128i round_shift = _mm_cvtsi32_si128(FILTER_BITS); in av1_convolve_y_sr_sse2()
103 src6 = _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 6 * src_stride)); in av1_convolve_y_sr_sse2()
105 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 0 * src_stride)), in av1_convolve_y_sr_sse2()
106 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 1 * src_stride))); in av1_convolve_y_sr_sse2()
108 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 1 * src_stride)), in av1_convolve_y_sr_sse2()
109 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 2 * src_stride))); in av1_convolve_y_sr_sse2()
111 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 2 * src_stride)), in av1_convolve_y_sr_sse2()
112 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 3 * src_stride))); in av1_convolve_y_sr_sse2()
114 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 3 * src_stride)), in av1_convolve_y_sr_sse2()
115 _mm_cvtsi32_si128(*(uint32_t *)(src_ptr + 4 * src_stride))); in av1_convolve_y_sr_sse2()
[all …]

12345678910>>...12