1 /*===--------------- avx10_2convertintrin.h - AVX10_2CONVERT ---------------===
2 *
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 *
7 *===-----------------------------------------------------------------------===
8 */
9 #ifndef __IMMINTRIN_H
10 #error \
11 "Never use <avx10_2convertintrin.h> directly; include <immintrin.h> instead."
12 #endif // __IMMINTRIN_H
13
14 #ifdef __SSE2__
15
16 #ifndef __AVX10_2CONVERTINTRIN_H
17 #define __AVX10_2CONVERTINTRIN_H
18
19 /* Define the default attributes for the functions in this file. */
20 #define __DEFAULT_FN_ATTRS128 \
21 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-256"), \
22 __min_vector_width__(128)))
23 #define __DEFAULT_FN_ATTRS256 \
24 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-256"), \
25 __min_vector_width__(256)))
26
_mm_cvtx2ps_ph(__m128 __A,__m128 __B)27 static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtx2ps_ph(__m128 __A,
28 __m128 __B) {
29 return (__m128h)__builtin_ia32_vcvt2ps2phx128_mask(
30 (__v4sf)__A, (__v4sf)__B, (__v8hf)_mm_setzero_ph(), (__mmask8)(-1));
31 }
32
33 static __inline__ __m128h __DEFAULT_FN_ATTRS128
_mm_mask_cvtx2ps_ph(__m128h __W,__mmask8 __U,__m128 __A,__m128 __B)34 _mm_mask_cvtx2ps_ph(__m128h __W, __mmask8 __U, __m128 __A, __m128 __B) {
35 return (__m128h)__builtin_ia32_vcvt2ps2phx128_mask(
36 (__v4sf)__A, (__v4sf)__B, (__v8hf)__W, (__mmask8)__U);
37 }
38
39 static __inline__ __m128h __DEFAULT_FN_ATTRS128
_mm_maskz_cvtx2ps_ph(__mmask8 __U,__m128 __A,__m128 __B)40 _mm_maskz_cvtx2ps_ph(__mmask8 __U, __m128 __A, __m128 __B) {
41 return (__m128h)__builtin_ia32_vcvt2ps2phx128_mask(
42 (__v4sf)__A, (__v4sf)__B, (__v8hf)_mm_setzero_ph(), (__mmask8)__U);
43 }
44
_mm256_cvtx2ps_ph(__m256 __A,__m256 __B)45 static __inline__ __m256h __DEFAULT_FN_ATTRS256 _mm256_cvtx2ps_ph(__m256 __A,
46 __m256 __B) {
47 return (__m256h)__builtin_ia32_vcvt2ps2phx256_mask(
48 (__v8sf)__A, (__v8sf)__B, (__v16hf)_mm256_setzero_ph(), (__mmask16)(-1),
49 _MM_FROUND_CUR_DIRECTION);
50 }
51
52 static __inline__ __m256h __DEFAULT_FN_ATTRS256
_mm256_mask_cvtx2ps_ph(__m256h __W,__mmask16 __U,__m256 __A,__m256 __B)53 _mm256_mask_cvtx2ps_ph(__m256h __W, __mmask16 __U, __m256 __A, __m256 __B) {
54 return (__m256h)__builtin_ia32_vcvt2ps2phx256_mask(
55 (__v8sf)__A, (__v8sf)__B, (__v16hf)__W, (__mmask16)__U,
56 _MM_FROUND_CUR_DIRECTION);
57 }
58
59 static __inline__ __m256h __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtx2ps_ph(__mmask16 __U,__m256 __A,__m256 __B)60 _mm256_maskz_cvtx2ps_ph(__mmask16 __U, __m256 __A, __m256 __B) {
61 return (__m256h)__builtin_ia32_vcvt2ps2phx256_mask(
62 (__v8sf)__A, (__v8sf)__B, (__v16hf)_mm256_setzero_ph(), (__mmask16)__U,
63 _MM_FROUND_CUR_DIRECTION);
64 }
65
66 #define _mm256_cvtx_round2ps_ph(A, B, R) \
67 ((__m256h)__builtin_ia32_vcvt2ps2phx256_mask( \
68 (__v8sf)(A), (__v8sf)(B), (__v16hf)_mm256_undefined_ph(), \
69 (__mmask16)(-1), (const int)(R)))
70
71 #define _mm256_mask_cvtx_round2ps_ph(W, U, A, B, R) \
72 ((__m256h)__builtin_ia32_vcvt2ps2phx256_mask( \
73 (__v8sf)(A), (__v8sf)(B), (__v16hf)(W), (__mmask16)(U), (const int)(R)))
74
75 #define _mm256_maskz_cvtx_round2ps_ph(U, A, B, R) \
76 ((__m256h)__builtin_ia32_vcvt2ps2phx256_mask( \
77 (__v8sf)(A), (__v8sf)(B), (__v16hf)(_mm256_setzero_ph()), \
78 (__mmask16)(U), (const int)(R)))
79
80 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_cvtbiasph_pbf8(__m128i __A,__m128h __B)81 _mm_cvtbiasph_pbf8(__m128i __A, __m128h __B) {
82 return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
83 (__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
84 }
85
86 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtbiasph_pbf8(__m128i __W,__mmask8 __U,__m128i __A,__m128h __B)87 _mm_mask_cvtbiasph_pbf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
88 return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
89 (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
90 }
91
92 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtbiasph_pbf8(__mmask8 __U,__m128i __A,__m128h __B)93 _mm_maskz_cvtbiasph_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
94 return (__m128i)__builtin_ia32_vcvtbiasph2bf8_128_mask(
95 (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
96 (__mmask8)__U);
97 }
98
99 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtbiasph_pbf8(__m256i __A,__m256h __B)100 _mm256_cvtbiasph_pbf8(__m256i __A, __m256h __B) {
101 return (__m128i)__builtin_ia32_vcvtbiasph2bf8_256_mask(
102 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
103 (__mmask16)-1);
104 }
105
_mm256_mask_cvtbiasph_pbf8(__m128i __W,__mmask16 __U,__m256i __A,__m256h __B)106 static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph_pbf8(
107 __m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
108 return (__m128i)__builtin_ia32_vcvtbiasph2bf8_256_mask(
109 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
110 }
111
112 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtbiasph_pbf8(__mmask16 __U,__m256i __A,__m256h __B)113 _mm256_maskz_cvtbiasph_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
114 return (__m128i)__builtin_ia32_vcvtbiasph2bf8_256_mask(
115 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
116 (__mmask16)__U);
117 }
118
119 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_cvtbiassph_pbf8(__m128i __A,__m128h __B)120 _mm_cvtbiassph_pbf8(__m128i __A, __m128h __B) {
121 return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_128_mask(
122 (__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
123 }
124
125 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtbiassph_pbf8(__m128i __W,__mmask8 __U,__m128i __A,__m128h __B)126 _mm_mask_cvtbiassph_pbf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
127 return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_128_mask(
128 (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
129 }
130
131 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtbiassph_pbf8(__mmask8 __U,__m128i __A,__m128h __B)132 _mm_maskz_cvtbiassph_pbf8(__mmask8 __U, __m128i __A, __m128h __B) {
133 return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_128_mask(
134 (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
135 (__mmask8)__U);
136 }
137
138 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtbiassph_pbf8(__m256i __A,__m256h __B)139 _mm256_cvtbiassph_pbf8(__m256i __A, __m256h __B) {
140 return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_256_mask(
141 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
142 (__mmask16)-1);
143 }
144
_mm256_mask_cvtbiassph_pbf8(__m128i __W,__mmask16 __U,__m256i __A,__m256h __B)145 static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiassph_pbf8(
146 __m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
147 return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_256_mask(
148 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
149 }
150
151 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtbiassph_pbf8(__mmask16 __U,__m256i __A,__m256h __B)152 _mm256_maskz_cvtbiassph_pbf8(__mmask16 __U, __m256i __A, __m256h __B) {
153 return (__m128i)__builtin_ia32_vcvtbiasph2bf8s_256_mask(
154 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
155 (__mmask16)__U);
156 }
157
158 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_cvtbiasph_phf8(__m128i __A,__m128h __B)159 _mm_cvtbiasph_phf8(__m128i __A, __m128h __B) {
160 return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
161 (__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
162 }
163
164 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtbiasph_phf8(__m128i __W,__mmask8 __U,__m128i __A,__m128h __B)165 _mm_mask_cvtbiasph_phf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
166 return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
167 (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
168 }
169
170 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtbiasph_phf8(__mmask8 __U,__m128i __A,__m128h __B)171 _mm_maskz_cvtbiasph_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
172 return (__m128i)__builtin_ia32_vcvtbiasph2hf8_128_mask(
173 (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
174 (__mmask8)__U);
175 }
176
177 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtbiasph_phf8(__m256i __A,__m256h __B)178 _mm256_cvtbiasph_phf8(__m256i __A, __m256h __B) {
179 return (__m128i)__builtin_ia32_vcvtbiasph2hf8_256_mask(
180 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
181 (__mmask16)-1);
182 }
183
_mm256_mask_cvtbiasph_phf8(__m128i __W,__mmask16 __U,__m256i __A,__m256h __B)184 static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiasph_phf8(
185 __m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
186 return (__m128i)__builtin_ia32_vcvtbiasph2hf8_256_mask(
187 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
188 }
189
190 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtbiasph_phf8(__mmask16 __U,__m256i __A,__m256h __B)191 _mm256_maskz_cvtbiasph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
192 return (__m128i)__builtin_ia32_vcvtbiasph2hf8_256_mask(
193 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
194 (__mmask16)__U);
195 }
196
197 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_cvtbiassph_phf8(__m128i __A,__m128h __B)198 _mm_cvtbiassph_phf8(__m128i __A, __m128h __B) {
199 return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_128_mask(
200 (__v16qi)__A, (__v8hf)__B, (__v16qi)_mm_undefined_si128(), (__mmask8)-1);
201 }
202
203 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtbiassph_phf8(__m128i __W,__mmask8 __U,__m128i __A,__m128h __B)204 _mm_mask_cvtbiassph_phf8(__m128i __W, __mmask8 __U, __m128i __A, __m128h __B) {
205 return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_128_mask(
206 (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)__W, (__mmask8)__U);
207 }
208
209 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtbiassph_phf8(__mmask8 __U,__m128i __A,__m128h __B)210 _mm_maskz_cvtbiassph_phf8(__mmask8 __U, __m128i __A, __m128h __B) {
211 return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_128_mask(
212 (__v16qi)__A, (__v8hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
213 (__mmask8)__U);
214 }
215
216 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtbiassph_phf8(__m256i __A,__m256h __B)217 _mm256_cvtbiassph_phf8(__m256i __A, __m256h __B) {
218 return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_256_mask(
219 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_undefined_si128(),
220 (__mmask16)-1);
221 }
222
_mm256_mask_cvtbiassph_phf8(__m128i __W,__mmask16 __U,__m256i __A,__m256h __B)223 static __inline__ __m128i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtbiassph_phf8(
224 __m128i __W, __mmask16 __U, __m256i __A, __m256h __B) {
225 return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_256_mask(
226 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)__W, (__mmask16)__U);
227 }
228
229 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtbiassph_phf8(__mmask16 __U,__m256i __A,__m256h __B)230 _mm256_maskz_cvtbiassph_phf8(__mmask16 __U, __m256i __A, __m256h __B) {
231 return (__m128i)__builtin_ia32_vcvtbiasph2hf8s_256_mask(
232 (__v32qi)__A, (__v16hf)__B, (__v16qi)(__m128i)_mm_setzero_si128(),
233 (__mmask16)__U);
234 }
235
_mm_cvtne2ph_pbf8(__m128h __A,__m128h __B)236 static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtne2ph_pbf8(__m128h __A,
237 __m128h __B) {
238 return (__m128i)__builtin_ia32_vcvtne2ph2bf8_128((__v8hf)(__A),
239 (__v8hf)(__B));
240 }
241
242 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtne2ph_pbf8(__m128i __W,__mmask16 __U,__m128h __A,__m128h __B)243 _mm_mask_cvtne2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
244 return (__m128i)__builtin_ia32_selectb_128(
245 (__mmask16)__U, (__v16qi)_mm_cvtne2ph_pbf8(__A, __B), (__v16qi)__W);
246 }
247
248 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtne2ph_pbf8(__mmask16 __U,__m128h __A,__m128h __B)249 _mm_maskz_cvtne2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
250 return (__m128i)__builtin_ia32_selectb_128(
251 (__mmask16)__U, (__v16qi)_mm_cvtne2ph_pbf8(__A, __B),
252 (__v16qi)(__m128i)_mm_setzero_si128());
253 }
254
255 static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvtne2ph_pbf8(__m256h __A,__m256h __B)256 _mm256_cvtne2ph_pbf8(__m256h __A, __m256h __B) {
257 return (__m256i)__builtin_ia32_vcvtne2ph2bf8_256((__v16hf)(__A),
258 (__v16hf)(__B));
259 }
260
_mm256_mask_cvtne2ph_pbf8(__m256i __W,__mmask32 __U,__m256h __A,__m256h __B)261 static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtne2ph_pbf8(
262 __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
263 return (__m256i)__builtin_ia32_selectb_256(
264 (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_pbf8(__A, __B), (__v32qi)__W);
265 }
266
267 static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtne2ph_pbf8(__mmask32 __U,__m256h __A,__m256h __B)268 _mm256_maskz_cvtne2ph_pbf8(__mmask32 __U, __m256h __A, __m256h __B) {
269 return (__m256i)__builtin_ia32_selectb_256(
270 (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_pbf8(__A, __B),
271 (__v32qi)(__m256i)_mm256_setzero_si256());
272 }
273
274 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_cvtnes2ph_pbf8(__m128h __A,__m128h __B)275 _mm_cvtnes2ph_pbf8(__m128h __A, __m128h __B) {
276 return (__m128i)__builtin_ia32_vcvtne2ph2bf8s_128((__v8hf)(__A),
277 (__v8hf)(__B));
278 }
279
280 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtnes2ph_pbf8(__m128i __W,__mmask16 __U,__m128h __A,__m128h __B)281 _mm_mask_cvtnes2ph_pbf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
282 return (__m128i)__builtin_ia32_selectb_128(
283 (__mmask16)__U, (__v16qi)_mm_cvtnes2ph_pbf8(__A, __B), (__v16qi)__W);
284 }
285
286 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtnes2ph_pbf8(__mmask16 __U,__m128h __A,__m128h __B)287 _mm_maskz_cvtnes2ph_pbf8(__mmask16 __U, __m128h __A, __m128h __B) {
288 return (__m128i)__builtin_ia32_selectb_128(
289 (__mmask16)__U, (__v16qi)_mm_cvtnes2ph_pbf8(__A, __B),
290 (__v16qi)(__m128i)_mm_setzero_si128());
291 }
292
293 static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvtnes2ph_pbf8(__m256h __A,__m256h __B)294 _mm256_cvtnes2ph_pbf8(__m256h __A, __m256h __B) {
295 return (__m256i)__builtin_ia32_vcvtne2ph2bf8s_256((__v16hf)(__A),
296 (__v16hf)(__B));
297 }
298
_mm256_mask_cvtnes2ph_pbf8(__m256i __W,__mmask32 __U,__m256h __A,__m256h __B)299 static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtnes2ph_pbf8(
300 __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
301 return (__m256i)__builtin_ia32_selectb_256(
302 (__mmask16)__U, (__v32qi)_mm256_cvtnes2ph_pbf8(__A, __B), (__v32qi)__W);
303 }
304
305 static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtnes2ph_pbf8(__mmask32 __U,__m256h __A,__m256h __B)306 _mm256_maskz_cvtnes2ph_pbf8(__mmask32 __U, __m256h __A, __m256h __B) {
307 return (__m256i)__builtin_ia32_selectb_256(
308 (__mmask16)__U, (__v32qi)_mm256_cvtnes2ph_pbf8(__A, __B),
309 (__v32qi)(__m256i)_mm256_setzero_si256());
310 }
311
_mm_cvtne2ph_phf8(__m128h __A,__m128h __B)312 static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtne2ph_phf8(__m128h __A,
313 __m128h __B) {
314 return (__m128i)__builtin_ia32_vcvtne2ph2hf8_128((__v8hf)(__A),
315 (__v8hf)(__B));
316 }
317
318 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtne2ph_phf8(__m128i __W,__mmask16 __U,__m128h __A,__m128h __B)319 _mm_mask_cvtne2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
320 return (__m128i)__builtin_ia32_selectb_128(
321 (__mmask16)__U, (__v16qi)_mm_cvtne2ph_phf8(__A, __B), (__v16qi)__W);
322 }
323
324 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtne2ph_phf8(__mmask16 __U,__m128h __A,__m128h __B)325 _mm_maskz_cvtne2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
326 return (__m128i)__builtin_ia32_selectb_128(
327 (__mmask16)__U, (__v16qi)_mm_cvtne2ph_phf8(__A, __B),
328 (__v16qi)(__m128i)_mm_setzero_si128());
329 }
330
331 static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvtne2ph_phf8(__m256h __A,__m256h __B)332 _mm256_cvtne2ph_phf8(__m256h __A, __m256h __B) {
333 return (__m256i)__builtin_ia32_vcvtne2ph2hf8_256((__v16hf)(__A),
334 (__v16hf)(__B));
335 }
336
_mm256_mask_cvtne2ph_phf8(__m256i __W,__mmask32 __U,__m256h __A,__m256h __B)337 static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtne2ph_phf8(
338 __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
339 return (__m256i)__builtin_ia32_selectb_256(
340 (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_phf8(__A, __B), (__v32qi)__W);
341 }
342
343 static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtne2ph_phf8(__mmask32 __U,__m256h __A,__m256h __B)344 _mm256_maskz_cvtne2ph_phf8(__mmask32 __U, __m256h __A, __m256h __B) {
345 return (__m256i)__builtin_ia32_selectb_256(
346 (__mmask16)__U, (__v32qi)_mm256_cvtne2ph_phf8(__A, __B),
347 (__v32qi)(__m256i)_mm256_setzero_si256());
348 }
349
350 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_cvtnes2ph_phf8(__m128h __A,__m128h __B)351 _mm_cvtnes2ph_phf8(__m128h __A, __m128h __B) {
352 return (__m128i)__builtin_ia32_vcvtne2ph2hf8s_128((__v8hf)(__A),
353 (__v8hf)(__B));
354 }
355
356 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtnes2ph_phf8(__m128i __W,__mmask16 __U,__m128h __A,__m128h __B)357 _mm_mask_cvtnes2ph_phf8(__m128i __W, __mmask16 __U, __m128h __A, __m128h __B) {
358 return (__m128i)__builtin_ia32_selectb_128(
359 (__mmask16)__U, (__v16qi)_mm_cvtnes2ph_phf8(__A, __B), (__v16qi)__W);
360 }
361
362 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtnes2ph_phf8(__mmask16 __U,__m128h __A,__m128h __B)363 _mm_maskz_cvtnes2ph_phf8(__mmask16 __U, __m128h __A, __m128h __B) {
364 return (__m128i)__builtin_ia32_selectb_128(
365 (__mmask16)__U, (__v16qi)_mm_cvtnes2ph_phf8(__A, __B),
366 (__v16qi)(__m128i)_mm_setzero_si128());
367 }
368
369 static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_cvtnes2ph_phf8(__m256h __A,__m256h __B)370 _mm256_cvtnes2ph_phf8(__m256h __A, __m256h __B) {
371 return (__m256i)__builtin_ia32_vcvtne2ph2hf8s_256((__v16hf)(__A),
372 (__v16hf)(__B));
373 }
374
_mm256_mask_cvtnes2ph_phf8(__m256i __W,__mmask32 __U,__m256h __A,__m256h __B)375 static __inline__ __m256i __DEFAULT_FN_ATTRS256 _mm256_mask_cvtnes2ph_phf8(
376 __m256i __W, __mmask32 __U, __m256h __A, __m256h __B) {
377 return (__m256i)__builtin_ia32_selectb_256(
378 (__mmask16)__U, (__v32qi)_mm256_cvtnes2ph_phf8(__A, __B), (__v32qi)__W);
379 }
380
381 static __inline__ __m256i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtnes2ph_phf8(__mmask32 __U,__m256h __A,__m256h __B)382 _mm256_maskz_cvtnes2ph_phf8(__mmask32 __U, __m256h __A, __m256h __B) {
383 return (__m256i)__builtin_ia32_selectb_256(
384 (__mmask16)__U, (__v32qi)_mm256_cvtnes2ph_phf8(__A, __B),
385 (__v32qi)(__m256i)_mm256_setzero_si256());
386 }
387
_mm_cvtnehf8_ph(__m128i __A)388 static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtnehf8_ph(__m128i __A) {
389 return (__m128h)__builtin_ia32_vcvthf8_2ph128_mask(
390 (__v16qi)__A, (__v8hf)(__m128h)_mm_undefined_ph(), (__mmask8)-1);
391 }
392
393 static __inline__ __m128h __DEFAULT_FN_ATTRS128
_mm_mask_cvtnehf8_ph(__m128h __W,__mmask8 __U,__m128i __A)394 _mm_mask_cvtnehf8_ph(__m128h __W, __mmask8 __U, __m128i __A) {
395 return (__m128h)__builtin_ia32_vcvthf8_2ph128_mask(
396 (__v16qi)__A, (__v8hf)(__m128h)__W, (__mmask8)__U);
397 }
398
399 static __inline__ __m128h __DEFAULT_FN_ATTRS128
_mm_maskz_cvtnehf8_ph(__mmask8 __U,__m128i __A)400 _mm_maskz_cvtnehf8_ph(__mmask8 __U, __m128i __A) {
401 return (__m128h)__builtin_ia32_vcvthf8_2ph128_mask(
402 (__v16qi)__A, (__v8hf)(__m128h)_mm_setzero_ph(), (__mmask8)__U);
403 }
404
405 static __inline__ __m256h __DEFAULT_FN_ATTRS256
_mm256_cvtnehf8_ph(__m128i __A)406 _mm256_cvtnehf8_ph(__m128i __A) {
407 return (__m256h)__builtin_ia32_vcvthf8_2ph256_mask(
408 (__v16qi)__A, (__v16hf)(__m256h)_mm256_undefined_ph(), (__mmask16)-1);
409 }
410
411 static __inline__ __m256h __DEFAULT_FN_ATTRS256
_mm256_mask_cvtnehf8_ph(__m256h __W,__mmask16 __U,__m128i __A)412 _mm256_mask_cvtnehf8_ph(__m256h __W, __mmask16 __U, __m128i __A) {
413 return (__m256h)__builtin_ia32_vcvthf8_2ph256_mask(
414 (__v16qi)__A, (__v16hf)(__m256h)__W, (__mmask16)__U);
415 }
416
417 static __inline__ __m256h __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtnehf8_ph(__mmask16 __U,__m128i __A)418 _mm256_maskz_cvtnehf8_ph(__mmask16 __U, __m128i __A) {
419 return (__m256h)__builtin_ia32_vcvthf8_2ph256_mask(
420 (__v16qi)__A, (__v16hf)(__m256h)_mm256_setzero_ph(), (__mmask16)__U);
421 }
422
_mm_cvtneph_pbf8(__m128h __A)423 static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtneph_pbf8(__m128h __A) {
424 return (__m128i)__builtin_ia32_vcvtneph2bf8_128_mask(
425 (__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
426 }
427
428 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtneph_pbf8(__m128i __W,__mmask8 __U,__m128h __A)429 _mm_mask_cvtneph_pbf8(__m128i __W, __mmask8 __U, __m128h __A) {
430 return (__m128i)__builtin_ia32_vcvtneph2bf8_128_mask(
431 (__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
432 }
433
434 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtneph_pbf8(__mmask8 __U,__m128h __A)435 _mm_maskz_cvtneph_pbf8(__mmask8 __U, __m128h __A) {
436 return (__m128i)__builtin_ia32_vcvtneph2bf8_128_mask(
437 (__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
438 }
439
440 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtneph_pbf8(__m256h __A)441 _mm256_cvtneph_pbf8(__m256h __A) {
442 return (__m128i)__builtin_ia32_vcvtneph2bf8_256_mask(
443 (__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
444 }
445
446 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_mask_cvtneph_pbf8(__m128i __W,__mmask16 __U,__m256h __A)447 _mm256_mask_cvtneph_pbf8(__m128i __W, __mmask16 __U, __m256h __A) {
448 return (__m128i)__builtin_ia32_vcvtneph2bf8_256_mask(
449 (__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
450 }
451
452 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtneph_pbf8(__mmask16 __U,__m256h __A)453 _mm256_maskz_cvtneph_pbf8(__mmask16 __U, __m256h __A) {
454 return (__m128i)__builtin_ia32_vcvtneph2bf8_256_mask(
455 (__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
456 }
457
_mm_cvtnesph_pbf8(__m128h __A)458 static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtnesph_pbf8(__m128h __A) {
459 return (__m128i)__builtin_ia32_vcvtneph2bf8s_128_mask(
460 (__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
461 }
462
463 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtnesph_pbf8(__m128i __W,__mmask8 __U,__m128h __A)464 _mm_mask_cvtnesph_pbf8(__m128i __W, __mmask8 __U, __m128h __A) {
465 return (__m128i)__builtin_ia32_vcvtneph2bf8s_128_mask(
466 (__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
467 }
468
469 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtnesph_pbf8(__mmask8 __U,__m128h __A)470 _mm_maskz_cvtnesph_pbf8(__mmask8 __U, __m128h __A) {
471 return (__m128i)__builtin_ia32_vcvtneph2bf8s_128_mask(
472 (__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
473 }
474
475 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtnesph_pbf8(__m256h __A)476 _mm256_cvtnesph_pbf8(__m256h __A) {
477 return (__m128i)__builtin_ia32_vcvtneph2bf8s_256_mask(
478 (__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
479 }
480
481 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_mask_cvtnesph_pbf8(__m128i __W,__mmask16 __U,__m256h __A)482 _mm256_mask_cvtnesph_pbf8(__m128i __W, __mmask16 __U, __m256h __A) {
483 return (__m128i)__builtin_ia32_vcvtneph2bf8s_256_mask(
484 (__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
485 }
486
487 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtnesph_pbf8(__mmask16 __U,__m256h __A)488 _mm256_maskz_cvtnesph_pbf8(__mmask16 __U, __m256h __A) {
489 return (__m128i)__builtin_ia32_vcvtneph2bf8s_256_mask(
490 (__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
491 }
492
_mm_cvtneph_phf8(__m128h __A)493 static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtneph_phf8(__m128h __A) {
494 return (__m128i)__builtin_ia32_vcvtneph2hf8_128_mask(
495 (__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
496 }
497
498 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtneph_phf8(__m128i __W,__mmask8 __U,__m128h __A)499 _mm_mask_cvtneph_phf8(__m128i __W, __mmask8 __U, __m128h __A) {
500 return (__m128i)__builtin_ia32_vcvtneph2hf8_128_mask(
501 (__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
502 }
503
504 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtneph_phf8(__mmask8 __U,__m128h __A)505 _mm_maskz_cvtneph_phf8(__mmask8 __U, __m128h __A) {
506 return (__m128i)__builtin_ia32_vcvtneph2hf8_128_mask(
507 (__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
508 }
509
510 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtneph_phf8(__m256h __A)511 _mm256_cvtneph_phf8(__m256h __A) {
512 return (__m128i)__builtin_ia32_vcvtneph2hf8_256_mask(
513 (__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
514 }
515
516 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_mask_cvtneph_phf8(__m128i __W,__mmask16 __U,__m256h __A)517 _mm256_mask_cvtneph_phf8(__m128i __W, __mmask16 __U, __m256h __A) {
518 return (__m128i)__builtin_ia32_vcvtneph2hf8_256_mask(
519 (__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
520 }
521
522 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtneph_phf8(__mmask16 __U,__m256h __A)523 _mm256_maskz_cvtneph_phf8(__mmask16 __U, __m256h __A) {
524 return (__m128i)__builtin_ia32_vcvtneph2hf8_256_mask(
525 (__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
526 }
527
_mm_cvtnesph_phf8(__m128h __A)528 static __inline__ __m128i __DEFAULT_FN_ATTRS128 _mm_cvtnesph_phf8(__m128h __A) {
529 return (__m128i)__builtin_ia32_vcvtneph2hf8s_128_mask(
530 (__v8hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask8)-1);
531 }
532
533 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_mask_cvtnesph_phf8(__m128i __W,__mmask8 __U,__m128h __A)534 _mm_mask_cvtnesph_phf8(__m128i __W, __mmask8 __U, __m128h __A) {
535 return (__m128i)__builtin_ia32_vcvtneph2hf8s_128_mask(
536 (__v8hf)__A, (__v16qi)(__m128i)__W, (__mmask8)__U);
537 }
538
539 static __inline__ __m128i __DEFAULT_FN_ATTRS128
_mm_maskz_cvtnesph_phf8(__mmask8 __U,__m128h __A)540 _mm_maskz_cvtnesph_phf8(__mmask8 __U, __m128h __A) {
541 return (__m128i)__builtin_ia32_vcvtneph2hf8s_128_mask(
542 (__v8hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask8)__U);
543 }
544
545 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_cvtnesph_phf8(__m256h __A)546 _mm256_cvtnesph_phf8(__m256h __A) {
547 return (__m128i)__builtin_ia32_vcvtneph2hf8s_256_mask(
548 (__v16hf)__A, (__v16qi)(__m128i)_mm_undefined_si128(), (__mmask16)-1);
549 }
550
551 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_mask_cvtnesph_phf8(__m128i __W,__mmask16 __U,__m256h __A)552 _mm256_mask_cvtnesph_phf8(__m128i __W, __mmask16 __U, __m256h __A) {
553 return (__m128i)__builtin_ia32_vcvtneph2hf8s_256_mask(
554 (__v16hf)__A, (__v16qi)(__m128i)__W, (__mmask16)__U);
555 }
556
557 static __inline__ __m128i __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtnesph_phf8(__mmask16 __U,__m256h __A)558 _mm256_maskz_cvtnesph_phf8(__mmask16 __U, __m256h __A) {
559 return (__m128i)__builtin_ia32_vcvtneph2hf8s_256_mask(
560 (__v16hf)__A, (__v16qi)(__m128i)_mm_setzero_si128(), (__mmask16)__U);
561 }
562
_mm_cvtpbf8_ph(__m128i __A)563 static __inline__ __m128h __DEFAULT_FN_ATTRS128 _mm_cvtpbf8_ph(__m128i __A) {
564 return _mm_castsi128_ph(_mm_slli_epi16(_mm_cvtepi8_epi16(__A), 8));
565 }
566
567 static __inline__ __m128h __DEFAULT_FN_ATTRS128
_mm_mask_cvtpbf8_ph(__m128h __S,__mmask8 __U,__m128i __A)568 _mm_mask_cvtpbf8_ph(__m128h __S, __mmask8 __U, __m128i __A) {
569 return _mm_castsi128_ph(
570 _mm_mask_slli_epi16((__m128i)__S, __U, _mm_cvtepi8_epi16(__A), 8));
571 }
572
573 static __inline__ __m128h __DEFAULT_FN_ATTRS128
_mm_maskz_cvtpbf8_ph(__mmask8 __U,__m128i __A)574 _mm_maskz_cvtpbf8_ph(__mmask8 __U, __m128i __A) {
575 return _mm_castsi128_ph(_mm_slli_epi16(_mm_maskz_cvtepi8_epi16(__U, __A), 8));
576 }
577
_mm256_cvtpbf8_ph(__m128i __A)578 static __inline__ __m256h __DEFAULT_FN_ATTRS256 _mm256_cvtpbf8_ph(__m128i __A) {
579 return _mm256_castsi256_ph(_mm256_slli_epi16(_mm256_cvtepi8_epi16(__A), 8));
580 }
581
582 static __inline__ __m256h __DEFAULT_FN_ATTRS256
_mm256_mask_cvtpbf8_ph(__m256h __S,__mmask8 __U,__m128i __A)583 _mm256_mask_cvtpbf8_ph(__m256h __S, __mmask8 __U, __m128i __A) {
584 return _mm256_castsi256_ph(
585 _mm256_mask_slli_epi16((__m256i)__S, __U, _mm256_cvtepi8_epi16(__A), 8));
586 }
587
588 static __inline__ __m256h __DEFAULT_FN_ATTRS256
_mm256_maskz_cvtpbf8_ph(__mmask8 __U,__m128i __A)589 _mm256_maskz_cvtpbf8_ph(__mmask8 __U, __m128i __A) {
590 return _mm256_castsi256_ph(
591 _mm256_slli_epi16(_mm256_maskz_cvtepi8_epi16(__U, __A), 8));
592 }
593
594 #undef __DEFAULT_FN_ATTRS128
595 #undef __DEFAULT_FN_ATTRS256
596
597 #endif // __AVX10_2CONVERTINTRIN_H
598 #endif // __SSE2__
599