1 /* ===-------- Intrin.h ---------------------------------------------------===
2 *
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
9 *
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
12 *
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 * THE SOFTWARE.
20 *
21 *===-----------------------------------------------------------------------===
22 */
23
24 /* Only include this if we're compiling for the windows platform. */
25 #ifndef _MSC_VER
26 #include_next <Intrin.h>
27 #else
28
29 #ifndef __INTRIN_H
30 #define __INTRIN_H
31
32 /* First include the standard intrinsics. */
33 #if defined(__i386__) || defined(__x86_64__)
34 #include <x86intrin.h>
35 #endif
36
37 /* For the definition of jmp_buf. */
38 #if __STDC_HOSTED__
39 #include <setjmp.h>
40 #endif
41
42 /* Define the default attributes for the functions in this file. */
43 #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
44
45 #ifdef __cplusplus
46 extern "C" {
47 #endif
48
49 #if defined(__MMX__)
50 /* And the random ones that aren't in those files. */
51 __m64 _m_from_float(float);
52 float _m_to_float(__m64);
53 #endif
54
55 /* Other assorted instruction intrinsics. */
56 void __addfsbyte(unsigned long, unsigned char);
57 void __addfsdword(unsigned long, unsigned long);
58 void __addfsword(unsigned long, unsigned short);
59 void __code_seg(const char *);
60 static __inline__
61 void __cpuid(int[4], int);
62 static __inline__
63 void __cpuidex(int[4], int, int);
64 void __debugbreak(void);
65 __int64 __emul(int, int);
66 unsigned __int64 __emulu(unsigned int, unsigned int);
67 void __cdecl __fastfail(unsigned int);
68 unsigned int __getcallerseflags(void);
69 static __inline__
70 void __halt(void);
71 unsigned char __inbyte(unsigned short);
72 void __inbytestring(unsigned short, unsigned char *, unsigned long);
73 void __incfsbyte(unsigned long);
74 void __incfsdword(unsigned long);
75 void __incfsword(unsigned long);
76 unsigned long __indword(unsigned short);
77 void __indwordstring(unsigned short, unsigned long *, unsigned long);
78 void __int2c(void);
79 void __invlpg(void *);
80 unsigned short __inword(unsigned short);
81 void __inwordstring(unsigned short, unsigned short *, unsigned long);
82 void __lidt(void *);
83 unsigned __int64 __ll_lshift(unsigned __int64, int);
84 __int64 __ll_rshift(__int64, int);
85 void __llwpcb(void *);
86 unsigned char __lwpins32(unsigned int, unsigned int, unsigned int);
87 void __lwpval32(unsigned int, unsigned int, unsigned int);
88 unsigned int __lzcnt(unsigned int);
89 unsigned short __lzcnt16(unsigned short);
90 static __inline__
91 void __movsb(unsigned char *, unsigned char const *, size_t);
92 static __inline__
93 void __movsd(unsigned long *, unsigned long const *, size_t);
94 static __inline__
95 void __movsw(unsigned short *, unsigned short const *, size_t);
96 void __nop(void);
97 void __nvreg_restore_fence(void);
98 void __nvreg_save_fence(void);
99 void __outbyte(unsigned short, unsigned char);
100 void __outbytestring(unsigned short, unsigned char *, unsigned long);
101 void __outdword(unsigned short, unsigned long);
102 void __outdwordstring(unsigned short, unsigned long *, unsigned long);
103 void __outword(unsigned short, unsigned short);
104 void __outwordstring(unsigned short, unsigned short *, unsigned long);
105 static __inline__
106 unsigned int __popcnt(unsigned int);
107 static __inline__
108 unsigned short __popcnt16(unsigned short);
109 unsigned long __readcr0(void);
110 unsigned long __readcr2(void);
111 static __inline__
112 unsigned long __readcr3(void);
113 unsigned long __readcr4(void);
114 unsigned long __readcr8(void);
115 unsigned int __readdr(unsigned int);
116 #ifdef __i386__
117 static __inline__
118 unsigned char __readfsbyte(unsigned long);
119 static __inline__
120 unsigned long __readfsdword(unsigned long);
121 static __inline__
122 unsigned __int64 __readfsqword(unsigned long);
123 static __inline__
124 unsigned short __readfsword(unsigned long);
125 #endif
126 static __inline__
127 unsigned __int64 __readmsr(unsigned long);
128 unsigned __int64 __readpmc(unsigned long);
129 unsigned long __segmentlimit(unsigned long);
130 void __sidt(void *);
131 void *__slwpcb(void);
132 static __inline__
133 void __stosb(unsigned char *, unsigned char, size_t);
134 static __inline__
135 void __stosd(unsigned long *, unsigned long, size_t);
136 static __inline__
137 void __stosw(unsigned short *, unsigned short, size_t);
138 void __svm_clgi(void);
139 void __svm_invlpga(void *, int);
140 void __svm_skinit(int);
141 void __svm_stgi(void);
142 void __svm_vmload(size_t);
143 void __svm_vmrun(size_t);
144 void __svm_vmsave(size_t);
145 void __ud2(void);
146 unsigned __int64 __ull_rshift(unsigned __int64, int);
147 void __vmx_off(void);
148 void __vmx_vmptrst(unsigned __int64 *);
149 void __wbinvd(void);
150 void __writecr0(unsigned int);
151 static __inline__
152 void __writecr3(unsigned int);
153 void __writecr4(unsigned int);
154 void __writecr8(unsigned int);
155 void __writedr(unsigned int, unsigned int);
156 void __writefsbyte(unsigned long, unsigned char);
157 void __writefsdword(unsigned long, unsigned long);
158 void __writefsqword(unsigned long, unsigned __int64);
159 void __writefsword(unsigned long, unsigned short);
160 void __writemsr(unsigned long, unsigned __int64);
161 static __inline__
162 void *_AddressOfReturnAddress(void);
163 static __inline__
164 unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
165 static __inline__
166 unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
167 static __inline__
168 unsigned char _bittest(long const *, long);
169 static __inline__
170 unsigned char _bittestandcomplement(long *, long);
171 static __inline__
172 unsigned char _bittestandreset(long *, long);
173 static __inline__
174 unsigned char _bittestandset(long *, long);
175 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
176 unsigned long __cdecl _byteswap_ulong(unsigned long);
177 unsigned short __cdecl _byteswap_ushort(unsigned short);
178 void __cdecl _disable(void);
179 void __cdecl _enable(void);
180 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
181 static __inline__
182 long _InterlockedAnd(long volatile *_Value, long _Mask);
183 static __inline__
184 short _InterlockedAnd16(short volatile *_Value, short _Mask);
185 static __inline__
186 char _InterlockedAnd8(char volatile *_Value, char _Mask);
187 unsigned char _interlockedbittestandreset(long volatile *, long);
188 static __inline__
189 unsigned char _interlockedbittestandset(long volatile *, long);
190 static __inline__
191 long __cdecl _InterlockedCompareExchange(long volatile *_Destination,
192 long _Exchange, long _Comparand);
193 long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
194 long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
195 static __inline__
196 short _InterlockedCompareExchange16(short volatile *_Destination,
197 short _Exchange, short _Comparand);
198 static __inline__
199 __int64 _InterlockedCompareExchange64(__int64 volatile *_Destination,
200 __int64 _Exchange, __int64 _Comparand);
201 __int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
202 __int64);
203 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
204 __int64);
205 static __inline__
206 char _InterlockedCompareExchange8(char volatile *_Destination, char _Exchange,
207 char _Comparand);
208 void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
209 void *);
210 void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
211 void *);
212 static __inline__
213 long __cdecl _InterlockedDecrement(long volatile *_Addend);
214 static __inline__
215 short _InterlockedDecrement16(short volatile *_Addend);
216 long _InterlockedExchange(long volatile *_Target, long _Value);
217 static __inline__
218 short _InterlockedExchange16(short volatile *_Target, short _Value);
219 static __inline__
220 char _InterlockedExchange8(char volatile *_Target, char _Value);
221 static __inline__
222 long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
223 long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
224 long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
225 static __inline__
226 short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value);
227 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
228 __int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
229 static __inline__
230 char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
231 static __inline__
232 long __cdecl _InterlockedIncrement(long volatile *_Addend);
233 static __inline__
234 short _InterlockedIncrement16(short volatile *_Addend);
235 static __inline__
236 long _InterlockedOr(long volatile *_Value, long _Mask);
237 static __inline__
238 short _InterlockedOr16(short volatile *_Value, short _Mask);
239 static __inline__
240 char _InterlockedOr8(char volatile *_Value, char _Mask);
241 static __inline__
242 long _InterlockedXor(long volatile *_Value, long _Mask);
243 static __inline__
244 short _InterlockedXor16(short volatile *_Value, short _Mask);
245 static __inline__
246 char _InterlockedXor8(char volatile *_Value, char _Mask);
247 void __cdecl _invpcid(unsigned int, void *);
248 static __inline__
249 unsigned long __cdecl _lrotl(unsigned long, int);
250 static __inline__
251 unsigned long __cdecl _lrotr(unsigned long, int);
252 static __inline__
253 static __inline__
254 void _ReadBarrier(void);
255 static __inline__
256 void _ReadWriteBarrier(void);
257 static __inline__
258 void *_ReturnAddress(void);
259 unsigned int _rorx_u32(unsigned int, const unsigned int);
260 static __inline__
261 unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
262 static __inline__
263 unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
264 static __inline__
265 unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
266 static __inline__
267 unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
268 static __inline__
269 unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
270 static __inline__
271 unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
272 static __inline__
273 unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
274 static __inline__
275 unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
276 int _sarx_i32(int, unsigned int);
277 #if __STDC_HOSTED__
278 int __cdecl _setjmp(jmp_buf);
279 #endif
280 unsigned int _shlx_u32(unsigned int, unsigned int);
281 unsigned int _shrx_u32(unsigned int, unsigned int);
282 void _Store_HLERelease(long volatile *, long);
283 void _Store64_HLERelease(__int64 volatile *, __int64);
284 void _StorePointer_HLERelease(void *volatile *, void *);
285 static __inline__
286 void _WriteBarrier(void);
287 unsigned __int32 xbegin(void);
288 void _xend(void);
289 static __inline__
290 #define _XCR_XFEATURE_ENABLED_MASK 0
291 unsigned __int64 __cdecl _xgetbv(unsigned int);
292 void __cdecl _xsetbv(unsigned int, unsigned __int64);
293
294 /* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
295 #ifdef __x86_64__
296 void __addgsbyte(unsigned long, unsigned char);
297 void __addgsdword(unsigned long, unsigned long);
298 void __addgsqword(unsigned long, unsigned __int64);
299 void __addgsword(unsigned long, unsigned short);
300 static __inline__
301 void __faststorefence(void);
302 void __incgsbyte(unsigned long);
303 void __incgsdword(unsigned long);
304 void __incgsqword(unsigned long);
305 void __incgsword(unsigned long);
306 unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
307 void __lwpval64(unsigned __int64, unsigned int, unsigned int);
308 unsigned __int64 __lzcnt64(unsigned __int64);
309 static __inline__
310 void __movsq(unsigned long long *, unsigned long long const *, size_t);
311 __int64 __mulh(__int64, __int64);
312 static __inline__
313 unsigned __int64 __popcnt64(unsigned __int64);
314 static __inline__
315 unsigned char __readgsbyte(unsigned long);
316 static __inline__
317 unsigned long __readgsdword(unsigned long);
318 static __inline__
319 unsigned __int64 __readgsqword(unsigned long);
320 unsigned short __readgsword(unsigned long);
321 unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
322 unsigned __int64 _HighPart,
323 unsigned char _Shift);
324 unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
325 unsigned __int64 _HighPart,
326 unsigned char _Shift);
327 static __inline__
328 void __stosq(unsigned __int64 *, unsigned __int64, size_t);
329 unsigned char __vmx_on(unsigned __int64 *);
330 unsigned char __vmx_vmclear(unsigned __int64 *);
331 unsigned char __vmx_vmlaunch(void);
332 unsigned char __vmx_vmptrld(unsigned __int64 *);
333 unsigned char __vmx_vmread(size_t, size_t *);
334 unsigned char __vmx_vmresume(void);
335 unsigned char __vmx_vmwrite(size_t, size_t);
336 void __writegsbyte(unsigned long, unsigned char);
337 void __writegsdword(unsigned long, unsigned long);
338 void __writegsqword(unsigned long, unsigned __int64);
339 void __writegsword(unsigned long, unsigned short);
340 static __inline__
341 unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
342 static __inline__
343 unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
344 static __inline__
345 unsigned char _bittest64(__int64 const *, __int64);
346 static __inline__
347 unsigned char _bittestandcomplement64(__int64 *, __int64);
348 static __inline__
349 unsigned char _bittestandreset64(__int64 *, __int64);
350 static __inline__
351 unsigned char _bittestandset64(__int64 *, __int64);
352 unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
353 long _InterlockedAnd_np(long volatile *_Value, long _Mask);
354 short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
355 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
356 char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
357 unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
358 static __inline__
359 unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
360 long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
361 long _Comparand);
362 unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
363 __int64 _ExchangeHigh,
364 __int64 _ExchangeLow,
365 __int64 *_CompareandResult);
366 unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
367 __int64 _ExchangeHigh,
368 __int64 _ExchangeLow,
369 __int64 *_ComparandResult);
370 short _InterlockedCompareExchange16_np(short volatile *_Destination,
371 short _Exchange, short _Comparand);
372 __int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
373 __int64);
374 __int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
375 __int64);
376 __int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
377 __int64 _Exchange, __int64 _Comparand);
378 void *_InterlockedCompareExchangePointer(void *volatile *_Destination,
379 void *_Exchange, void *_Comparand);
380 void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
381 void *_Exchange, void *_Comparand);
382 static __inline__
383 __int64 _InterlockedDecrement64(__int64 volatile *_Addend);
384 static __inline__
385 __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
386 static __inline__
387 __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
388 void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value);
389 static __inline__
390 __int64 _InterlockedIncrement64(__int64 volatile *_Addend);
391 long _InterlockedOr_np(long volatile *_Value, long _Mask);
392 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
393 static __inline__
394 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
395 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
396 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
397 long _InterlockedXor_np(long volatile *_Value, long _Mask);
398 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
399 static __inline__
400 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
401 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
402 char _InterlockedXor8_np(char volatile *_Value, char _Mask);
403 static __inline__
404 __int64 _mul128(__int64 _Multiplier, __int64 _Multiplicand,
405 __int64 *_HighProduct);
406 unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
407 __int64 _sarx_i64(__int64, unsigned int);
408 #if __STDC_HOSTED__
409 int __cdecl _setjmpex(jmp_buf);
410 #endif
411 unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
412 unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
413 /*
414 * Multiply two 64-bit integers and obtain a 64-bit result.
415 * The low-half is returned directly and the high half is in an out parameter.
416 */
417 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
_umul128(unsigned __int64 _Multiplier,unsigned __int64 _Multiplicand,unsigned __int64 * _HighProduct)418 _umul128(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand,
419 unsigned __int64 *_HighProduct) {
420 unsigned __int128 _FullProduct =
421 (unsigned __int128)_Multiplier * (unsigned __int128)_Multiplicand;
422 *_HighProduct = _FullProduct >> 64;
423 return _FullProduct;
424 }
425 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
__umulh(unsigned __int64 _Multiplier,unsigned __int64 _Multiplicand)426 __umulh(unsigned __int64 _Multiplier, unsigned __int64 _Multiplicand) {
427 unsigned __int128 _FullProduct =
428 (unsigned __int128)_Multiplier * (unsigned __int128)_Multiplicand;
429 return _FullProduct >> 64;
430 }
431
432 #endif /* __x86_64__ */
433
434 /*----------------------------------------------------------------------------*\
435 |* Multiplication
436 \*----------------------------------------------------------------------------*/
437 static __inline__ __int64 __DEFAULT_FN_ATTRS
__emul(int __in1,int __in2)438 __emul(int __in1, int __in2) {
439 return (__int64)__in1 * (__int64)__in2;
440 }
441 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
__emulu(unsigned int __in1,unsigned int __in2)442 __emulu(unsigned int __in1, unsigned int __in2) {
443 return (unsigned __int64)__in1 * (unsigned __int64)__in2;
444 }
445 /*----------------------------------------------------------------------------*\
446 |* Bit Twiddling
447 \*----------------------------------------------------------------------------*/
448 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_rotl8(unsigned char _Value,unsigned char _Shift)449 _rotl8(unsigned char _Value, unsigned char _Shift) {
450 _Shift &= 0x7;
451 return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value;
452 }
453 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_rotr8(unsigned char _Value,unsigned char _Shift)454 _rotr8(unsigned char _Value, unsigned char _Shift) {
455 _Shift &= 0x7;
456 return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value;
457 }
458 static __inline__ unsigned short __DEFAULT_FN_ATTRS
_rotl16(unsigned short _Value,unsigned char _Shift)459 _rotl16(unsigned short _Value, unsigned char _Shift) {
460 _Shift &= 0xf;
461 return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value;
462 }
463 static __inline__ unsigned short __DEFAULT_FN_ATTRS
_rotr16(unsigned short _Value,unsigned char _Shift)464 _rotr16(unsigned short _Value, unsigned char _Shift) {
465 _Shift &= 0xf;
466 return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value;
467 }
468 static __inline__ unsigned int __DEFAULT_FN_ATTRS
_rotl(unsigned int _Value,int _Shift)469 _rotl(unsigned int _Value, int _Shift) {
470 _Shift &= 0x1f;
471 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
472 }
473 static __inline__ unsigned int __DEFAULT_FN_ATTRS
_rotr(unsigned int _Value,int _Shift)474 _rotr(unsigned int _Value, int _Shift) {
475 _Shift &= 0x1f;
476 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
477 }
478 static __inline__ unsigned long __DEFAULT_FN_ATTRS
_lrotl(unsigned long _Value,int _Shift)479 _lrotl(unsigned long _Value, int _Shift) {
480 _Shift &= 0x1f;
481 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
482 }
483 static __inline__ unsigned long __DEFAULT_FN_ATTRS
_lrotr(unsigned long _Value,int _Shift)484 _lrotr(unsigned long _Value, int _Shift) {
485 _Shift &= 0x1f;
486 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
487 }
488 static
489 __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
_rotl64(unsigned __int64 _Value,int _Shift)490 _rotl64(unsigned __int64 _Value, int _Shift) {
491 _Shift &= 0x3f;
492 return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value;
493 }
494 static
495 __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
_rotr64(unsigned __int64 _Value,int _Shift)496 _rotr64(unsigned __int64 _Value, int _Shift) {
497 _Shift &= 0x3f;
498 return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value;
499 }
500 /*----------------------------------------------------------------------------*\
501 |* Bit Counting and Testing
502 \*----------------------------------------------------------------------------*/
503 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_BitScanForward(unsigned long * _Index,unsigned long _Mask)504 _BitScanForward(unsigned long *_Index, unsigned long _Mask) {
505 if (!_Mask)
506 return 0;
507 *_Index = __builtin_ctzl(_Mask);
508 return 1;
509 }
510 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_BitScanReverse(unsigned long * _Index,unsigned long _Mask)511 _BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
512 if (!_Mask)
513 return 0;
514 *_Index = 31 - __builtin_clzl(_Mask);
515 return 1;
516 }
517 static __inline__ unsigned short __DEFAULT_FN_ATTRS
__popcnt16(unsigned short _Value)518 __popcnt16(unsigned short _Value) {
519 return __builtin_popcount((int)_Value);
520 }
521 static __inline__ unsigned int __DEFAULT_FN_ATTRS
__popcnt(unsigned int _Value)522 __popcnt(unsigned int _Value) {
523 return __builtin_popcount(_Value);
524 }
525 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_bittest(long const * _BitBase,long _BitPos)526 _bittest(long const *_BitBase, long _BitPos) {
527 return (*_BitBase >> _BitPos) & 1;
528 }
529 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_bittestandcomplement(long * _BitBase,long _BitPos)530 _bittestandcomplement(long *_BitBase, long _BitPos) {
531 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
532 *_BitBase = *_BitBase ^ (1 << _BitPos);
533 return _Res;
534 }
535 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_bittestandreset(long * _BitBase,long _BitPos)536 _bittestandreset(long *_BitBase, long _BitPos) {
537 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
538 *_BitBase = *_BitBase & ~(1 << _BitPos);
539 return _Res;
540 }
541 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_bittestandset(long * _BitBase,long _BitPos)542 _bittestandset(long *_BitBase, long _BitPos) {
543 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
544 *_BitBase = *_BitBase | (1 << _BitPos);
545 return _Res;
546 }
547 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_interlockedbittestandset(long volatile * _BitBase,long _BitPos)548 _interlockedbittestandset(long volatile *_BitBase, long _BitPos) {
549 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_SEQ_CST);
550 return (_PrevVal >> _BitPos) & 1;
551 }
552 #ifdef __x86_64__
553 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_BitScanForward64(unsigned long * _Index,unsigned __int64 _Mask)554 _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
555 if (!_Mask)
556 return 0;
557 *_Index = __builtin_ctzll(_Mask);
558 return 1;
559 }
560 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_BitScanReverse64(unsigned long * _Index,unsigned __int64 _Mask)561 _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
562 if (!_Mask)
563 return 0;
564 *_Index = 63 - __builtin_clzll(_Mask);
565 return 1;
566 }
567 static __inline__
568 unsigned __int64 __DEFAULT_FN_ATTRS
__popcnt64(unsigned __int64 _Value)569 __popcnt64(unsigned __int64 _Value) {
570 return __builtin_popcountll(_Value);
571 }
572 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_bittest64(__int64 const * _BitBase,__int64 _BitPos)573 _bittest64(__int64 const *_BitBase, __int64 _BitPos) {
574 return (*_BitBase >> _BitPos) & 1;
575 }
576 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_bittestandcomplement64(__int64 * _BitBase,__int64 _BitPos)577 _bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
578 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
579 *_BitBase = *_BitBase ^ (1ll << _BitPos);
580 return _Res;
581 }
582 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_bittestandreset64(__int64 * _BitBase,__int64 _BitPos)583 _bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
584 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
585 *_BitBase = *_BitBase & ~(1ll << _BitPos);
586 return _Res;
587 }
588 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_bittestandset64(__int64 * _BitBase,__int64 _BitPos)589 _bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
590 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
591 *_BitBase = *_BitBase | (1ll << _BitPos);
592 return _Res;
593 }
594 static __inline__ unsigned char __DEFAULT_FN_ATTRS
_interlockedbittestandset64(__int64 volatile * _BitBase,__int64 _BitPos)595 _interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
596 long long _PrevVal =
597 __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
598 return (_PrevVal >> _BitPos) & 1;
599 }
600 #endif
601 /*----------------------------------------------------------------------------*\
602 |* Interlocked Exchange Add
603 \*----------------------------------------------------------------------------*/
604 static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedExchangeAdd8(char volatile * _Addend,char _Value)605 _InterlockedExchangeAdd8(char volatile *_Addend, char _Value) {
606 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
607 }
608 static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedExchangeAdd16(short volatile * _Addend,short _Value)609 _InterlockedExchangeAdd16(short volatile *_Addend, short _Value) {
610 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
611 }
612 #ifdef __x86_64__
613 static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedExchangeAdd64(__int64 volatile * _Addend,__int64 _Value)614 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
615 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
616 }
617 #endif
618 /*----------------------------------------------------------------------------*\
619 |* Interlocked Exchange Sub
620 \*----------------------------------------------------------------------------*/
621 static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedExchangeSub8(char volatile * _Subend,char _Value)622 _InterlockedExchangeSub8(char volatile *_Subend, char _Value) {
623 return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
624 }
625 static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedExchangeSub16(short volatile * _Subend,short _Value)626 _InterlockedExchangeSub16(short volatile *_Subend, short _Value) {
627 return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
628 }
629 static __inline__ long __DEFAULT_FN_ATTRS
_InterlockedExchangeSub(long volatile * _Subend,long _Value)630 _InterlockedExchangeSub(long volatile *_Subend, long _Value) {
631 return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
632 }
633 #ifdef __x86_64__
634 static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedExchangeSub64(__int64 volatile * _Subend,__int64 _Value)635 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
636 return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
637 }
638 #endif
639 /*----------------------------------------------------------------------------*\
640 |* Interlocked Increment
641 \*----------------------------------------------------------------------------*/
642 static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedIncrement16(short volatile * _Value)643 _InterlockedIncrement16(short volatile *_Value) {
644 return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST);
645 }
646 #ifdef __x86_64__
647 static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedIncrement64(__int64 volatile * _Value)648 _InterlockedIncrement64(__int64 volatile *_Value) {
649 return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST);
650 }
651 #endif
652 /*----------------------------------------------------------------------------*\
653 |* Interlocked Decrement
654 \*----------------------------------------------------------------------------*/
655 static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedDecrement16(short volatile * _Value)656 _InterlockedDecrement16(short volatile *_Value) {
657 return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST);
658 }
659 #ifdef __x86_64__
660 static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedDecrement64(__int64 volatile * _Value)661 _InterlockedDecrement64(__int64 volatile *_Value) {
662 return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST);
663 }
664 #endif
665 /*----------------------------------------------------------------------------*\
666 |* Interlocked And
667 \*----------------------------------------------------------------------------*/
668 static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedAnd8(char volatile * _Value,char _Mask)669 _InterlockedAnd8(char volatile *_Value, char _Mask) {
670 return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
671 }
672 static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedAnd16(short volatile * _Value,short _Mask)673 _InterlockedAnd16(short volatile *_Value, short _Mask) {
674 return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
675 }
676 static __inline__ long __DEFAULT_FN_ATTRS
_InterlockedAnd(long volatile * _Value,long _Mask)677 _InterlockedAnd(long volatile *_Value, long _Mask) {
678 return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
679 }
680 #ifdef __x86_64__
681 static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedAnd64(__int64 volatile * _Value,__int64 _Mask)682 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
683 return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
684 }
685 #endif
686 /*----------------------------------------------------------------------------*\
687 |* Interlocked Or
688 \*----------------------------------------------------------------------------*/
689 static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedOr8(char volatile * _Value,char _Mask)690 _InterlockedOr8(char volatile *_Value, char _Mask) {
691 return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
692 }
693 static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedOr16(short volatile * _Value,short _Mask)694 _InterlockedOr16(short volatile *_Value, short _Mask) {
695 return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
696 }
697 static __inline__ long __DEFAULT_FN_ATTRS
_InterlockedOr(long volatile * _Value,long _Mask)698 _InterlockedOr(long volatile *_Value, long _Mask) {
699 return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
700 }
701 #ifdef __x86_64__
702 static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedOr64(__int64 volatile * _Value,__int64 _Mask)703 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
704 return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
705 }
706 #endif
707 /*----------------------------------------------------------------------------*\
708 |* Interlocked Xor
709 \*----------------------------------------------------------------------------*/
710 static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedXor8(char volatile * _Value,char _Mask)711 _InterlockedXor8(char volatile *_Value, char _Mask) {
712 return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
713 }
714 static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedXor16(short volatile * _Value,short _Mask)715 _InterlockedXor16(short volatile *_Value, short _Mask) {
716 return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
717 }
718 static __inline__ long __DEFAULT_FN_ATTRS
_InterlockedXor(long volatile * _Value,long _Mask)719 _InterlockedXor(long volatile *_Value, long _Mask) {
720 return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
721 }
722 #ifdef __x86_64__
723 static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedXor64(__int64 volatile * _Value,__int64 _Mask)724 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
725 return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST);
726 }
727 #endif
728 /*----------------------------------------------------------------------------*\
729 |* Interlocked Exchange
730 \*----------------------------------------------------------------------------*/
731 static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedExchange8(char volatile * _Target,char _Value)732 _InterlockedExchange8(char volatile *_Target, char _Value) {
733 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
734 return _Value;
735 }
736 static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedExchange16(short volatile * _Target,short _Value)737 _InterlockedExchange16(short volatile *_Target, short _Value) {
738 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
739 return _Value;
740 }
741 #ifdef __x86_64__
742 static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedExchange64(__int64 volatile * _Target,__int64 _Value)743 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
744 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
745 return _Value;
746 }
747 #endif
748 /*----------------------------------------------------------------------------*\
749 |* Interlocked Compare Exchange
750 \*----------------------------------------------------------------------------*/
751 static __inline__ char __DEFAULT_FN_ATTRS
_InterlockedCompareExchange8(char volatile * _Destination,char _Exchange,char _Comparand)752 _InterlockedCompareExchange8(char volatile *_Destination,
753 char _Exchange, char _Comparand) {
754 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
755 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
756 return _Comparand;
757 }
758 static __inline__ short __DEFAULT_FN_ATTRS
_InterlockedCompareExchange16(short volatile * _Destination,short _Exchange,short _Comparand)759 _InterlockedCompareExchange16(short volatile *_Destination,
760 short _Exchange, short _Comparand) {
761 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
762 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
763 return _Comparand;
764 }
765 static __inline__ __int64 __DEFAULT_FN_ATTRS
_InterlockedCompareExchange64(__int64 volatile * _Destination,__int64 _Exchange,__int64 _Comparand)766 _InterlockedCompareExchange64(__int64 volatile *_Destination,
767 __int64 _Exchange, __int64 _Comparand) {
768 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
769 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
770 return _Comparand;
771 }
772 /*----------------------------------------------------------------------------*\
773 |* Barriers
774 \*----------------------------------------------------------------------------*/
775 static __inline__ void __DEFAULT_FN_ATTRS
776 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
_ReadWriteBarrier(void)777 _ReadWriteBarrier(void) {
778 __atomic_signal_fence(__ATOMIC_SEQ_CST);
779 }
780 static __inline__ void __DEFAULT_FN_ATTRS
781 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
_ReadBarrier(void)782 _ReadBarrier(void) {
783 __atomic_signal_fence(__ATOMIC_SEQ_CST);
784 }
785 static __inline__ void __DEFAULT_FN_ATTRS
786 __attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
_WriteBarrier(void)787 _WriteBarrier(void) {
788 __atomic_signal_fence(__ATOMIC_SEQ_CST);
789 }
790 #ifdef __x86_64__
791 static __inline__ void __DEFAULT_FN_ATTRS
__faststorefence(void)792 __faststorefence(void) {
793 __atomic_thread_fence(__ATOMIC_SEQ_CST);
794 }
795 #endif
796 /*----------------------------------------------------------------------------*\
797 |* readfs, readgs
798 |* (Pointers in address space #256 and #257 are relative to the GS and FS
799 |* segment registers, respectively.)
800 \*----------------------------------------------------------------------------*/
801 #define __ptr_to_addr_space(__addr_space_nbr, __type, __offset) \
802 ((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \
803 (__offset))
804
805 #ifdef __i386__
806 static __inline__ unsigned char __DEFAULT_FN_ATTRS
__readfsbyte(unsigned long __offset)807 __readfsbyte(unsigned long __offset) {
808 return *__ptr_to_addr_space(257, unsigned char, __offset);
809 }
810 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
__readfsqword(unsigned long __offset)811 __readfsqword(unsigned long __offset) {
812 return *__ptr_to_addr_space(257, unsigned __int64, __offset);
813 }
814 static __inline__ unsigned short __DEFAULT_FN_ATTRS
__readfsword(unsigned long __offset)815 __readfsword(unsigned long __offset) {
816 return *__ptr_to_addr_space(257, unsigned short, __offset);
817 }
818 #endif
819 #ifdef __x86_64__
820 static __inline__ unsigned char __DEFAULT_FN_ATTRS
__readgsbyte(unsigned long __offset)821 __readgsbyte(unsigned long __offset) {
822 return *__ptr_to_addr_space(256, unsigned char, __offset);
823 }
824 static __inline__ unsigned long __DEFAULT_FN_ATTRS
__readgsdword(unsigned long __offset)825 __readgsdword(unsigned long __offset) {
826 return *__ptr_to_addr_space(256, unsigned long, __offset);
827 }
828 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
__readgsqword(unsigned long __offset)829 __readgsqword(unsigned long __offset) {
830 return *__ptr_to_addr_space(256, unsigned __int64, __offset);
831 }
832 static __inline__ unsigned short __DEFAULT_FN_ATTRS
__readgsword(unsigned long __offset)833 __readgsword(unsigned long __offset) {
834 return *__ptr_to_addr_space(256, unsigned short, __offset);
835 }
836 #endif
837 #undef __ptr_to_addr_space
838 /*----------------------------------------------------------------------------*\
839 |* movs, stos
840 \*----------------------------------------------------------------------------*/
841 #if defined(__i386__) || defined(__x86_64__)
842 static __inline__ void __DEFAULT_FN_ATTRS
__movsb(unsigned char * __dst,unsigned char const * __src,size_t __n)843 __movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
844 __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n)
845 : "%edi", "%esi", "%ecx");
846 }
847 static __inline__ void __DEFAULT_FN_ATTRS
__movsd(unsigned long * __dst,unsigned long const * __src,size_t __n)848 __movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
849 __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n)
850 : "%edi", "%esi", "%ecx");
851 }
852 static __inline__ void __DEFAULT_FN_ATTRS
__movsw(unsigned short * __dst,unsigned short const * __src,size_t __n)853 __movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
854 __asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n)
855 : "%edi", "%esi", "%ecx");
856 }
857 static __inline__ void __DEFAULT_FN_ATTRS
__stosb(unsigned char * __dst,unsigned char __x,size_t __n)858 __stosb(unsigned char *__dst, unsigned char __x, size_t __n) {
859 __asm__("rep stosb" : : "D"(__dst), "a"(__x), "c"(__n)
860 : "%edi", "%ecx");
861 }
862 static __inline__ void __DEFAULT_FN_ATTRS
__stosd(unsigned long * __dst,unsigned long __x,size_t __n)863 __stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
864 __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n)
865 : "%edi", "%ecx");
866 }
867 static __inline__ void __DEFAULT_FN_ATTRS
__stosw(unsigned short * __dst,unsigned short __x,size_t __n)868 __stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
869 __asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n)
870 : "%edi", "%ecx");
871 }
872 #endif
873 #ifdef __x86_64__
874 static __inline__ void __DEFAULT_FN_ATTRS
__movsq(unsigned long long * __dst,unsigned long long const * __src,size_t __n)875 __movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
876 __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n)
877 : "%edi", "%esi", "%ecx");
878 }
879 static __inline__ void __DEFAULT_FN_ATTRS
__stosq(unsigned __int64 * __dst,unsigned __int64 __x,size_t __n)880 __stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
881 __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n)
882 : "%edi", "%ecx");
883 }
884 #endif
885
886 /*----------------------------------------------------------------------------*\
887 |* Misc
888 \*----------------------------------------------------------------------------*/
889 static __inline__ void * __DEFAULT_FN_ATTRS
_AddressOfReturnAddress(void)890 _AddressOfReturnAddress(void) {
891 return (void*)((char*)__builtin_frame_address(0) + sizeof(void*));
892 }
893 static __inline__ void * __DEFAULT_FN_ATTRS
_ReturnAddress(void)894 _ReturnAddress(void) {
895 return __builtin_return_address(0);
896 }
897 #if defined(__i386__) || defined(__x86_64__)
898 static __inline__ void __DEFAULT_FN_ATTRS
__cpuid(int __info[4],int __level)899 __cpuid(int __info[4], int __level) {
900 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
901 : "a"(__level));
902 }
903 static __inline__ void __DEFAULT_FN_ATTRS
__cpuidex(int __info[4],int __level,int __ecx)904 __cpuidex(int __info[4], int __level, int __ecx) {
905 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
906 : "a"(__level), "c"(__ecx));
907 }
908 static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
_xgetbv(unsigned int __xcr_no)909 _xgetbv(unsigned int __xcr_no) {
910 unsigned int __eax, __edx;
911 __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
912 return ((unsigned __int64)__edx << 32) | __eax;
913 }
914 static __inline__ void __DEFAULT_FN_ATTRS
__halt(void)915 __halt(void) {
916 __asm__ volatile ("hlt");
917 }
918 #endif
919
920 /*----------------------------------------------------------------------------*\
921 |* Privileged intrinsics
922 \*----------------------------------------------------------------------------*/
923 #if defined(__i386__) || defined(__x86_64__)
924 static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
__readmsr(unsigned long __register)925 __readmsr(unsigned long __register) {
926 // Loads the contents of a 64-bit model specific register (MSR) specified in
927 // the ECX register into registers EDX:EAX. The EDX register is loaded with
928 // the high-order 32 bits of the MSR and the EAX register is loaded with the
929 // low-order 32 bits. If less than 64 bits are implemented in the MSR being
930 // read, the values returned to EDX:EAX in unimplemented bit locations are
931 // undefined.
932 unsigned long __edx;
933 unsigned long __eax;
934 __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
935 return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
936 }
937
938 static __inline__ unsigned long __DEFAULT_FN_ATTRS
__readcr3(void)939 __readcr3(void) {
940 unsigned long __cr3_val;
941 __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
942 return __cr3_val;
943 }
944
945 static __inline__ void __DEFAULT_FN_ATTRS
__writecr3(unsigned int __cr3_val)946 __writecr3(unsigned int __cr3_val) {
947 __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
948 }
949 #endif
950
951 #ifdef __cplusplus
952 }
953 #endif
954
955 #undef __DEFAULT_FN_ATTRS
956
957 #endif /* __INTRIN_H */
958 #endif /* _MSC_VER */
959