• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * This file has no copyright assigned and is placed in the Public Domain.
3  * This file is part of the mingw-w64 runtime package.
4  * No warranty is given; refer to the file DISCLAIMER.PD within this package.
5  */
6 
7 /* There are 3 separate ways this file is intended to be used:
8 
9    1) Included from intrin.h.  In this case, all intrinsics in this file get declarations and
10       implementations.  No special #defines are needed for this case.
11 
12    2) Included from the library versions of these functions (ie mingw-w64-crt\intrincs\*.c).  All
13       intrinsics in this file must also be included in the library.  In this case, only the
14       specific functions requested will get defined, and they will not be defined as inline.  If
15       you have followed the instructions (below) for adding functions to this file, then all you
16       need to have in the .c file is the following:
17 
18       #define __INTRINSIC_ONLYSPECIAL
19       #define __INTRINSIC_SPECIAL___stosb // Causes code generation in intrin-impl.h
20 
21       #include <intrin.h>
22 
23    3) Included from various platform sdk headers.  Some platform sdk headers (such as winnt.h)
24       define a subset of intrinsics.  To avoid potential conflicts, this file is designed to
25       allow for specific subsets of functions to be defined.  This is done by defining the
26       appropriate variable before including this file:
27 
28       #define __INTRINSIC_GROUP_WINNT
29       #include <psdk_inc/intrin-impl.h>
30 
31    In all cases, it is acceptable to include this file multiple times in any order (ie include
32    winnt.h to get its subset, then include intrin.h to get everything, or vice versa).
33 
34    See also the comments at the top of intrin.h.
35 */
36 
37 /* To add an implementation for a new intrinsic to this file, you should comment out the current prototype in intrin.h.
38    If the function you are adding is not in intrin.h, you should not be adding it to this file.  This file is only
39    for MSVC intrinsics.
40 
41    Make sure you put your definition in the right section (x86 vs x64), and use this outline when adding definitions
42    to this file:
43 
44 #if __INTRINSIC_PROLOG(__int2c)
45 
46 <prototype goes here>
47 
48 __INTRINSICS_USEINLINE
49 <code goes here>
50 
51 #define __INTRINSIC_DEFINED___int2c
52 #endif
53 */
54 
55 /* Note that there is no file-wide #if to prevent intrin-impl.h from being
56    included multiple times.  This is because this file might be included multiple
57    times to define various subsets of the functions it contains. */
58 
59 /* However we do check for __MINGW_INTRIN_INLINE.  In theory this means we
60    can work with other compilers.  */
61 
62 #ifdef __MINGW_INTRIN_INLINE
63 
64 /* Clang has support for MSVC builtins, GCC doesn't */
65 #pragma push_macro("__has_builtin")
66 #ifndef __has_builtin
67   #define __has_builtin(x) 0
68 #endif
69 
70 /* These macros are used by the routines below.  While this file may be included
71    multiple times, these macros only need to be defined once. */
72 #ifndef _INTRIN_MAC_
73 #define _INTRIN_MAC_
74 
75 /* GCC v6 added support for outputting flags.  This allows better code to be
76    produced for a number of intrinsics. */
77 #ifndef __GCC_ASM_FLAG_OUTPUTS__
78 #define __FLAGCONSTRAINT "=qm"
79 #define __FLAGSET "\n\tsetc %[old]"
80 #define __FLAGCLOBBER1 , "cc"
81 #define __FLAGCLOBBER2 "cc"
82 #else
83 #define __FLAGCONSTRAINT "=@ccc"
84 #define __FLAGSET
85 #define __FLAGCLOBBER1
86 #define __FLAGCLOBBER2
87 #endif
88 
89 /* This macro is used by __stosb, __stosw, __stosd, __stosq */
90 
91 /* Parameters: (FunctionName, DataType, Operator)
92    FunctionName: Any valid function name
93    DataType: BYTE, WORD, DWORD or DWORD64
94    InstructionSize: b|b, w|w, l|d, q|q */
95 
96 /* While we don't need the output values for Dest or Count, we
97    must still inform the compiler the asm changes them. */
98 #define __buildstos(x, y, z) void x(y *Dest, y Data, size_t Count) \
99 { \
100    __asm__ __volatile__ ("rep stos{" z "}" \
101       : "+D" (Dest), "+c" (Count) \
102       : [Data] "a" (Data) \
103       : "memory"); \
104 }
105 
106 /* This macro is used by InterlockedAnd, InterlockedOr, InterlockedXor, InterlockedAnd64, InterlockedOr64, InterlockedXor64 */
107 
108 /* Parameters: (FunctionName, DataType, Operator)
109    FunctionName: Any valid function name
110    DataType: __LONG32 or __int64
111    Operator: One of xor, or, and */
112 #define __buildlogicali(x, y, o) y x(volatile y *Destination, y Value) \
113 { \
114     return __sync_fetch_and_ ## o(Destination, Value); \
115 }
116 
117 /* This macro is used by InterlockedBitTestAndSet, InterlockedBitTestAndReset, InterlockedBitTestAndComplement,
118    InterlockedBitTestAndSet64, InterlockedBitTestAndReset64, InterlockedBitTestAndComplement64
119    _interlockedbittestandset, _interlockedbittestandreset, _interlockedbittestandcomplement
120    _interlockedbittestandset64, _interlockedbittestandreset64, _interlockedbittestandcomplement64 */
121 
122 /* Parameters: (FunctionName, DataType, AsmCode, OffsetConstraint)
123    FunctionName: Any valid function name
124    DataType: __LONG32 or __int64
125    OffsetConstraint: either "I" for 32bit data types or "J" for 64. */
126 #if defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_)
127 #define __buildbittesti(x, y, z, a) unsigned char x(y volatile *Base, y Offset) \
128 { \
129    unsigned char old; \
130    __asm__ __volatile__ (z \
131       : [old] __FLAGCONSTRAINT (old), [Base] "+m" (*Base) \
132       : [Offset] a "r" (Offset) \
133       : "memory" __FLAGCLOBBER1); \
134    return old; \
135 }
136 #elif defined(__arm__) || defined(_ARM_)
137 #define __buildbittesti(x, y, z, a) unsigned char x(y volatile *Base, y Offset) \
138 { \
139    unsigned int old, tmp1, tmp2; \
140    unsigned int bit = 1 << Offset; \
141    __asm__ __volatile__ ("dmb	sy\n\t" \
142         "1: ldrex	%[old], %[Base]\n\t" \
143         "mov	%[tmp1], %[old]\n\t" \
144         z "	%[tmp1], %[tmp1], %[bit]\n\t" \
145         "strex	%[tmp2], %[tmp1], %[Base]\n\t" \
146         "cmp	%[tmp2], #0\n\t" \
147         "bne	1b\n\t" \
148         "dmb	sy" \
149       : [old] "=&r" (old), [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [Base] "+m" (*Base) \
150       : [bit] a "r" (bit) \
151       : "memory", "cc"); \
152    return (old >> Offset) & 1; \
153 }
154 #elif defined(__aarch64__) || defined(_ARM64_)
155 #define __buildbittesti(x, y, z, a) unsigned char x(y volatile *Base, y Offset) \
156 { \
157    unsigned int old, tmp1, tmp2; \
158    unsigned int bit = 1 << Offset; \
159    __asm__ __volatile__ ("dmb	sy\n\t" \
160         "1: ldxr	%w[old], %[Base]\n\t" \
161         "mov	%w[tmp1], %w[old]\n\t" \
162         z "	%w[tmp1], %w[tmp1], %w[bit]\n\t" \
163         "stxr	%w[tmp2], %w[tmp1], %[Base]\n\t" \
164         "cmp	%w[tmp2], #0\n\t" \
165         "b.ne	1b\n\t" \
166         "dmb	sy" \
167       : [old] "=&r" (old), [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [Base] "+m" (*Base) \
168       : [bit] a "r" (bit) \
169       : "memory", "cc"); \
170    return (old >> Offset) & 1; \
171 }
172 #define __buildbittesti64(x, y, z, a) unsigned char x(y volatile *Base, y Offset) \
173 { \
174    unsigned __int64 old, tmp1; \
175    unsigned int tmp2; \
176    unsigned __int64 bit = 1ULL << Offset; \
177    __asm__ __volatile__ ("dmb	sy\n\t" \
178         "1: ldxr	%[old], %[Base]\n\t" \
179         "mov	%[tmp1], %[old]\n\t" \
180         z "	%[tmp1], %[tmp1], %[bit]\n\t" \
181         "stxr	%w[tmp2], %[tmp1], %[Base]\n\t" \
182         "cmp	%w[tmp2], #0\n\t" \
183         "b.ne	1b\n\t" \
184         "dmb	sy" \
185       : [old] "=&r" (old), [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2), [Base] "+m" (*Base) \
186       : [bit] a "r" (bit) \
187       : "memory", "cc"); \
188    return (old >> Offset) & 1; \
189 }
190 #endif /* defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) */
191 
192 /* This macro is used by YieldProcessor when compiling x86 w/o SSE2.
193 It generates the same opcodes as _mm_pause.  */
194 #define __buildpause() __asm__ __volatile__("rep nop")
195 
196 /* This macro is used by DbgRaiseAssertionFailure and __int2c
197 
198 Parameters: (IntNum)
199 IntNum: Interrupt number in hex */
200 #define __buildint(a) __asm__ __volatile__("int {$}" #a :)
201 
202 /* This macro is used by MemoryBarrier when compiling x86 w/o SSE2.
203 Note that on i386, xchg performs an implicit lock. */
204 #define __buildmemorybarrier() \
205 { \
206 unsigned char Barrier; \
207 __asm__ __volatile__("xchg{b %%| }al, %0" :"=m" (Barrier) : /* no inputs */ : "eax", "memory"); \
208 }
209 
210 /* This macro is used by __readfsbyte, __readfsword, __readfsdword
211                          __readgsbyte, __readgsword, __readgsdword, __readgsqword
212 
213 Parameters: (FunctionName, DataType, Segment)
214    FunctionName: Any valid function name
215    DataType: char, short, __LONG32 or __int64
216    Segment: fs or gs
217    Type: b, w, l, q
218    */
219 
220 #define __buildreadseg(x, y, z, a) y x(unsigned __LONG32 Offset) { \
221     y ret; \
222     __asm__ ("mov{" a " %%" z ":%[offset], %[ret] | %[ret], %%" z ":%[offset]}" \
223         : [ret] "=r" (ret) \
224         : [offset] "m" ((*(y *) (size_t) Offset))); \
225     return ret; \
226 }
227 
228 /* This macro is used by __writefsbyte, __writefsword, __writefsdword
229                          __writegsbyte, __writegsword, __writegsdword, __writegsqword
230 
231 Parameters: (FunctionName, DataType, Segment)
232    FunctionName: Any valid function name
233    DataType: char, short, __LONG32 or __int64
234    Segment: fs or gs
235    Type: b, w, l, q
236    */
237 
238 #define __buildwriteseg(x, y, z, a) void x(unsigned __LONG32 Offset, y Data) { \
239     __asm__ ("mov{" a " %[Data], %%" z ":%[offset] | %%" z ":%[offset], %[Data]}" \
240         : [offset] "=m" ((*(y *) (size_t) Offset)) \
241         : [Data] "ri" (Data)); \
242 }
243 
244 /* This macro is used by _BitScanForward, _BitScanForward64, _BitScanReverse _BitScanReverse64
245 
246 Parameters: (FunctionName, DataType, Segment)
247    FunctionName: Any valid function name
248    DataType: unsigned __LONG32 or unsigned __int64
249    Statement: BSF or BSR */
250 
251 /* GCC v6 added support for outputting flags.  This allows better code to be
252    produced for a number of intrinsics. */
253 #ifndef __GCC_ASM_FLAG_OUTPUTS__
254 #define __buildbitscan(x, y, z) unsigned char x(unsigned __LONG32 *Index, y Mask) \
255 { \
256    y n; \
257    __asm__ (z \
258       : [Index] "=r" (n) \
259       : [Mask] "r" (Mask) \
260       : "cc"); \
261    *Index = n; \
262    return Mask!=0; \
263 }
264 #else
265 #define __buildbitscan(x, y, z) unsigned char x(unsigned __LONG32 *Index, y Mask) \
266 { \
267    y n; \
268    unsigned char old; \
269    __asm__ (z \
270       : "=@ccnz" (old), [Index] "=r" (n) \
271       : [Mask] "r" (Mask)); \
272    *Index = n; \
273    return old; \
274 }
275 #endif
276 
277 /* This macro is used by _bittest & _bittest64
278 
279 Parameters: (FunctionName, DataType, OffsetConstraint)
280    FunctionName: Any valid function name
281    DataType: __LONG32 or __int64
282    Type: l, q
283    OffsetConstraint: either "I" for 32bit data types or "J" for 64.
284 
285    */
286 #define __buildbittest(x, y, z, a) unsigned char x(const y *Base, y Offset) \
287 { \
288    unsigned char old; \
289    __asm__ ("bt{" z " %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET \
290       : [old] __FLAGCONSTRAINT (old) \
291       : [Offset] a "r" (Offset), [Base] "rm" (*Base) \
292       : __FLAGCLOBBER2); \
293    return old; \
294 }
295 
296 /* This macro is used by _bittestandset, _bittestandreset, _bittestandcomplement,
297    _bittestandset64, _bittestandreset64, _bittestandcomplement64
298 
299 Parameters: (FunctionName, DataType, Statement, OffsetConstraint)
300    FunctionName: Any valid function name
301    DataType: __LONG32 or __int64
302    Statement: asm statement (bts, btr, btc)
303    OffsetConstraint: either "I" for 32bit data types or "J" for 64.
304    Type: l, q
305    */
306 #define __buildbittestand(x, y, z, a, b) unsigned char x(y *Base, y Offset) \
307 { \
308    unsigned char old; \
309    __asm__ (z "{" b " %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET \
310       : [old] __FLAGCONSTRAINT (old), [Base] "+rm" (*Base) \
311       : [Offset] a "r" (Offset) \
312       : __FLAGCLOBBER2); \
313    return old; \
314 }
315 
316 /* This macro is used by __inbyte, __inword, __indword
317 
318 Parameters: (FunctionName, DataType)
319    FunctionName: Any valid function name
320    DataType: unsigned char, unsigned short, unsigned __LONG32
321    Type: b, w, l
322    */
323 #define __build_inport(x, y, z) y x(unsigned short Port) { \
324    y value; \
325       __asm__ __volatile__ ("in{" z " %w[port],%[value]| %[value],%w[port]}" \
326           : [value] "=a" (value) \
327           : [port] "Nd" (Port)); \
328       return value; \
329    }
330 
331 /* This macro is used by __outbyte, __outword, __outdword
332 
333 Parameters: (FunctionName, DataType)
334    FunctionName: Any valid function name
335    DataType: unsigned char, unsigned short, unsigned __LONG32
336    Type: b, w, l
337    */
338 #define __build_outport(x, y, z) void x(unsigned short Port, y Data) { \
339       __asm__ __volatile__ ("out{" z " %[data],%w[port]| %w[port],%[data]}" \
340           : \
341           : [data] "a" (Data), [port] "Nd" (Port)); \
342    }
343 
344 /* This macro is used by __inbytestring, __inwordstring, __indwordstring
345 
346 Parameters: (FunctionName, DataType, InstructionSizeAtt, InstructionSizeIntel)
347    FunctionName: Any valid function name
348    DataType: unsigned char, unsigned short, unsigned __LONG32
349    InstructionSizeAtt: b, w, l
350    InstructionSizeIntel: b, w, d (not b,w,l)
351    */
352 #define __build_inportstring(x, y, z, a) void x(unsigned short Port, y *Buffer, unsigned __LONG32 Count) { \
353    __asm__ __volatile__ ("cld ; rep ins{" z "|" a "}" \
354       : "=D" (Buffer), "=c" (Count) \
355       : "d"(Port), "0"(Buffer), "1" (Count) \
356       : "memory"); \
357    }
358 
359 /* This macro is used by __outbytestring, __outwordstring, __outdwordstring
360 
361 Parameters: (FunctionName, DataType, InstructionSizeAtt, InstructionSizeIntel)
362    FunctionName: Any valid function name
363    DataType: unsigned char, unsigned short, unsigned __LONG32
364    InstructionSizeAtt: b, w, l
365    InstructionSizeIntel: b, w, d (not b,w,l)
366 
367    */
368 #define __build_outportstring(x, y, z, a) void x(unsigned short Port, y *Buffer, unsigned __LONG32 Count) { \
369    __asm__ __volatile__ ("cld ; rep outs{" z "|" a "}" \
370       : "=S" (Buffer), "=c" (Count) \
371       : "d"(Port), "0"(Buffer), "1" (Count) \
372       : "memory"); \
373   }
374 
375 /* This macro is used by __readcr0, __readcr2, __readcr3, __readcr4, __readcr8
376 
377 Parameters: (FunctionName, DataType, RegisterNumber)
378    FunctionName: Any valid function name
379    DataType: unsigned __LONG32, unsigned __int64
380    RegisterNumber: 0, 2, 3, 4, 8
381 
382    */
383 #define __build_readcr(x, y, z) y x(void) { \
384       y value; \
385       __asm__ __volatile__ ("mov {%%cr" z ", %[value] | %[value], %%cr" z "}" \
386           : [value] "=q" (value)); \
387       return value; \
388   }
389 
390 /* This macro is used by __writecr0, __writecr2, __writecr3, __writecr4, __writecr8
391 
392 Parameters: (FunctionName, DataType, RegisterNumber)
393    FunctionName: Any valid function name
394    DataType: unsigned __LONG32, unsigned __int64
395    RegisterNumber: 0, 2, 3, 4, 8
396 
397    */
398 #define __build_writecr(x, y, z) void x(y Data) { \
399    __asm__ __volatile__ ("mov {%[Data], %%cr" z "|%%cr" z ", %[Data]}" \
400        : \
401        : [Data] "q" (Data) \
402        : "memory"); \
403    }
404 
405 /* This macro is used by __movsb, __movsd, __movsq, __movsw
406 
407 Parameters: (FunctionName, DataType, RegisterNumber)
408    FunctionName: Any valid function name
409    DataType: unsigned char, unsigned short, unsigned __LONG32, unsigned __int64
410    InstructionSize: b, w, d, q
411 
412    */
413 #define __buildmov(x, y, z) void x(y *Destination, y const *Source, size_t Count) \
414 { \
415   __asm__ __volatile__ ( \
416     "rep movs" z \
417        : "=D" (Destination), "=S" (Source), "=c" (Count) \
418        : "0" (Destination), "1" (Source), "2" (Count) \
419        : "memory"); \
420 }
421 
422 #endif /* _INTRIN_MAC_ */
423 
424 /* The Barrier functions can never be in the library.  Since gcc only
425 supports ReadWriteBarrier, map all 3 to do the same. */
426 #ifndef _ReadWriteBarrier
427 
428 #define _ReadWriteBarrier() __asm__ __volatile__ ("" ::: "memory")
429 #define _ReadBarrier _ReadWriteBarrier
430 #define _WriteBarrier _ReadWriteBarrier
431 
432 #endif
433 
434 /* The logic for this macro is:
435    if the function is not yet defined AND
436    (
437        (if we are not just defining special OR
438            (we are defining special AND this is one of the ones we are defining)
439        )
440    )
441 */
442 #define __INTRINSIC_PROLOG(name) (!defined(__INTRINSIC_DEFINED_ ## name)) && ((!defined (__INTRINSIC_ONLYSPECIAL)) || (defined (__INTRINSIC_ONLYSPECIAL) && defined(__INTRINSIC_SPECIAL_ ## name)))
443 
444 #ifdef __INTRINSIC_ONLYSPECIAL
445 #define __INTRINSICS_USEINLINE
446 #else
447 #define __INTRINSICS_USEINLINE __MINGW_INTRIN_INLINE
448 #endif
449 
450 /* Normally __INTRINSIC_ONLYSPECIAL is used to indicate that we are
451    being included in the library version of the intrinsic (case 2).  However,
452    that really only affects the definition of __INTRINSICS_USEINLINE.
453    So here we are letting it serve an additional purpose of only defining
454    the intrinsics for a certain file (case 3).  For example, to create the
455    intrinsics for the functions in winnt.h, define __INTRINSIC_GROUP_WINNT.
456 
457    Note that this file can be included multiple times, and as a result
458    there can be overlap (definitions that appear in more than one
459    file).  This is handled by __INTRINSIC_DEFINED_*
460 
461    If no groups are defined (such as what happens when including intrin.h),
462    all intrinsics are defined.   */
463 
464 /* If __INTRINSIC_ONLYSPECIAL is defined at this point, we are processing case 2.  In
465    that case, don't go looking for groups */
466 #ifndef __INTRINSIC_ONLYSPECIAL
467 
468 #ifdef __INTRINSIC_GROUP_WINNT
469 #undef __INTRINSIC_GROUP_WINNT /* Remove this for efficiency if intrin-impl.h is included again */
470 
471 /* Note that this gets undefined at the end of this file */
472 #define __INTRINSIC_ONLYSPECIAL
473 
474 #define __INTRINSIC_SPECIAL___faststorefence
475 #define __INTRINSIC_SPECIAL___int2c
476 #define __INTRINSIC_SPECIAL___stosb
477 #define __INTRINSIC_SPECIAL___stosd
478 #define __INTRINSIC_SPECIAL___stosq
479 #define __INTRINSIC_SPECIAL___stosw
480 #define __INTRINSIC_SPECIAL__InterlockedAnd
481 #define __INTRINSIC_SPECIAL__InterlockedAnd64
482 #define __INTRINSIC_SPECIAL__interlockedbittestandcomplement
483 #define __INTRINSIC_SPECIAL__interlockedbittestandcomplement64
484 #define __INTRINSIC_SPECIAL__interlockedbittestandreset
485 #define __INTRINSIC_SPECIAL__interlockedbittestandreset64
486 #define __INTRINSIC_SPECIAL__interlockedbittestandset
487 #define __INTRINSIC_SPECIAL__interlockedbittestandset64
488 #define __INTRINSIC_SPECIAL__InterlockedOr
489 #define __INTRINSIC_SPECIAL__InterlockedOr64
490 #define __INTRINSIC_SPECIAL__InterlockedXor
491 #define __INTRINSIC_SPECIAL__InterlockedXor64
492 #define __INTRINSIC_SPECIAL_InterlockedBitTestAndComplement
493 #define __INTRINSIC_SPECIAL_InterlockedBitTestAndComplement64
494 #define __INTRINSIC_SPECIAL_InterlockedBitTestAndReset
495 #define __INTRINSIC_SPECIAL_InterlockedBitTestAndReset64
496 #define __INTRINSIC_SPECIAL_InterlockedBitTestAndSet
497 #define __INTRINSIC_SPECIAL_InterlockedBitTestAndSet64
498 #define __INTRINSIC_SPECIAL__InterlockedIncrement16
499 #define __INTRINSIC_SPECIAL__InterlockedDecrement16
500 #define __INTRINSIC_SPECIAL__InterlockedCompareExchange16
501 #define __INTRINSIC_SPECIAL__InterlockedIncrement
502 #define __INTRINSIC_SPECIAL__InterlockedDecrement
503 #define __INTRINSIC_SPECIAL__InterlockedAdd
504 #define __INTRINSIC_SPECIAL__InterlockedExchange
505 #define __INTRINSIC_SPECIAL__InterlockedExchangeAdd
506 #define __INTRINSIC_SPECIAL__InterlockedCompareExchange
507 #define __INTRINSIC_SPECIAL__InterlockedIncrement64
508 #define __INTRINSIC_SPECIAL__InterlockedDecrement64
509 #define __INTRINSIC_SPECIAL__InterlockedAdd64
510 #define __INTRINSIC_SPECIAL__InterlockedExchangeAdd64
511 #define __INTRINSIC_SPECIAL__InterlockedExchange64
512 #define __INTRINSIC_SPECIAL__InterlockedCompareExchange64
513 #define __INTRINSIC_SPECIAL__InterlockedExchangePointer
514 #define __INTRINSIC_SPECIAL__InterlockedCompareExchangePointer
515 #define __INTRINSIC_SPECIAL___readgsbyte
516 #define __INTRINSIC_SPECIAL___readgsword
517 #define __INTRINSIC_SPECIAL___readgsdword
518 #define __INTRINSIC_SPECIAL___readgsqword
519 #define __INTRINSIC_SPECIAL___writegsbyte
520 #define __INTRINSIC_SPECIAL___writegsword
521 #define __INTRINSIC_SPECIAL___writegsdword
522 #define __INTRINSIC_SPECIAL___writegsqword
523 #define __INTRINSIC_SPECIAL___readfsbyte
524 #define __INTRINSIC_SPECIAL___readfsword
525 #define __INTRINSIC_SPECIAL___readfsdword
526 #define __INTRINSIC_SPECIAL___writefsbyte
527 #define __INTRINSIC_SPECIAL___writefsword
528 #define __INTRINSIC_SPECIAL___writefsdword
529 #define __INTRINSIC_SPECIAL__BitScanForward
530 #define __INTRINSIC_SPECIAL__BitScanForward64
531 #define __INTRINSIC_SPECIAL__BitScanReverse
532 #define __INTRINSIC_SPECIAL__BitScanReverse64
533 #define __INTRINSIC_SPECIAL__bittest
534 #define __INTRINSIC_SPECIAL__bittestandset
535 #define __INTRINSIC_SPECIAL__bittestandreset
536 #define __INTRINSIC_SPECIAL__bittestandcomplement
537 #define __INTRINSIC_SPECIAL__bittest64
538 #define __INTRINSIC_SPECIAL__bittestandset64
539 #define __INTRINSIC_SPECIAL__bittestandreset64
540 #define __INTRINSIC_SPECIAL__bittestandcomplement64
541 #define __INTRINSIC_SPECIAL___movsb
542 #define __INTRINSIC_SPECIAL___movsw
543 #define __INTRINSIC_SPECIAL___movsd
544 #define __INTRINSIC_SPECIAL___movsq
545 
546 #endif /* __INTRINSIC_GROUP_WINNT */
547 
548 #ifdef __INTRINSIC_GROUP_WINBASE
549 #undef __INTRINSIC_GROUP_WINBASE /* Remove this for efficiency if intrin-impl.h is included again */
550 
551 /* Note that this gets undefined at the end of this file */
552 #define __INTRINSIC_ONLYSPECIAL
553 
554 #define __INTRINSIC_SPECIAL__InterlockedIncrement
555 #define __INTRINSIC_SPECIAL__InterlockedDecrement
556 #define __INTRINSIC_SPECIAL__InterlockedAdd
557 #define __INTRINSIC_SPECIAL__InterlockedExchange
558 #define __INTRINSIC_SPECIAL__InterlockedExchangeAdd
559 #define __INTRINSIC_SPECIAL__InterlockedCompareExchange
560 #define __INTRINSIC_SPECIAL__InterlockedCompareExchangePointer
561 #define __INTRINSIC_SPECIAL__InterlockedExchangePointer
562 #define __INTRINSIC_SPECIAL__InterlockedAnd64
563 #define __INTRINSIC_SPECIAL__InterlockedOr64
564 #define __INTRINSIC_SPECIAL__InterlockedXor64
565 #define __INTRINSIC_SPECIAL__InterlockedIncrement64
566 #define __INTRINSIC_SPECIAL__InterlockedDecrement64
567 #define __INTRINSIC_SPECIAL__InterlockedAdd64
568 #define __INTRINSIC_SPECIAL__InterlockedExchange64
569 #define __INTRINSIC_SPECIAL__InterlockedExchangeAdd64
570 #define __INTRINSIC_SPECIAL__InterlockedCompareExchange64
571 
572 #endif /* __INTRINSIC_GROUP_WINBASE */
573 
574 /* To add an additional group, put the #ifdef and definitions here. */
575 
576 #endif /* __INTRINSIC_ONLYSPECIAL */
577 
578 #ifdef __cplusplus
579 extern "C" {
580 #endif
581 
582 /* Before 4.9.2, ia32intrin.h had broken versions of these. */
583 #undef _lrotl
584 #undef _lrotr
585 
586 #if __INTRINSIC_PROLOG(_lrotl)
587 unsigned long _lrotl(unsigned long __X, int __C);
588 #if !__has_builtin(_lrotl)
589 __INTRINSICS_USEINLINE
_lrotl(unsigned long __X,int __C)590 unsigned long _lrotl(unsigned long __X, int __C)
591 {
592   return (__X << __C) | (__X >> ((sizeof(long) * 8) - __C));
593 }
594 #endif
595 #define __INTRINSIC_DEFINED__lrotl
596 #endif /* __INTRINSIC_PROLOG */
597 
598 #if __INTRINSIC_PROLOG(_lrotr)
599 unsigned long _lrotr(unsigned long __X, int __C);
600 #if !__has_builtin(_lrotr)
601 __INTRINSICS_USEINLINE
_lrotr(unsigned long __X,int __C)602 unsigned long _lrotr(unsigned long __X, int __C)
603 {
604   return (__X >> __C) | (__X << ((sizeof(long) * 8) - __C));
605 }
606 #endif
607 #define __INTRINSIC_DEFINED__lrotr
608 #endif /* __INTRINSIC_PROLOG */
609 
610 #if defined(__x86_64__) || defined(_AMD64_)
611 
612 #if __INTRINSIC_PROLOG(__faststorefence)
613 void __faststorefence(void);
614 #if !__has_builtin(__faststorefence)
615 __INTRINSICS_USEINLINE
__faststorefence(void)616 void __faststorefence(void) {
617     /* Turns out this is actually faster than MS's "trick" on newer cpus.  Note
618     that this builtin performs an implicit ReadWriteBarrier. */
619     __builtin_ia32_sfence();
620 }
621 #endif
622 #define __INTRINSIC_DEFINED___faststorefence
623 #endif /* __INTRINSIC_PROLOG */
624 
625 #if __INTRINSIC_PROLOG(__stosq)
626 __MINGW_EXTENSION void __stosq(unsigned __int64 *, unsigned __int64, size_t);
627 #if !__has_builtin(__stosq)
628 __INTRINSICS_USEINLINE
629 __buildstos(__stosq, unsigned __int64, "q|q")
630 #endif
631 #define __INTRINSIC_DEFINED___stosq
632 #endif /* __INTRINSIC_PROLOG */
633 
634 #if __INTRINSIC_PROLOG(_interlockedbittestandset64)
635 __MINGW_EXTENSION unsigned char _interlockedbittestandset64(__int64 volatile *a, __int64 b);
636 #if !__has_builtin(_interlockedbittestandset64)
637 __INTRINSICS_USEINLINE
638 __buildbittesti(_interlockedbittestandset64, __int64, "lock bts{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
639 #endif
640 #define __INTRINSIC_DEFINED__interlockedbittestandset64
641 #endif /* __INTRINSIC_PROLOG */
642 
643 #if __INTRINSIC_PROLOG(_interlockedbittestandreset64)
644 __MINGW_EXTENSION unsigned char _interlockedbittestandreset64(__int64 volatile *a, __int64 b);
645 #if !__has_builtin(_interlockedbittestandreset64)
646 __INTRINSICS_USEINLINE
647 __buildbittesti(_interlockedbittestandreset64, __int64, "lock btr{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
648 #endif
649 #define __INTRINSIC_DEFINED__interlockedbittestandreset64
650 #endif /* __INTRINSIC_PROLOG */
651 
652 #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement64)
653 __MINGW_EXTENSION unsigned char _interlockedbittestandcomplement64(__int64 volatile *a, __int64 b);
654 #if !__has_builtin(_interlockedbittestandcomplement64)
655 __INTRINSICS_USEINLINE
656 __buildbittesti(_interlockedbittestandcomplement64, __int64, "lock btc{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
657 #endif
658 #define __INTRINSIC_DEFINED__interlockedbittestandcomplement64
659 #endif /* __INTRINSIC_PROLOG */
660 
661 #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet64)
662 __MINGW_EXTENSION unsigned char InterlockedBitTestAndSet64(volatile __int64 *a, __int64 b);
663 #if !__has_builtin(InterlockedBitTestAndSet64)
664 __INTRINSICS_USEINLINE
665 __buildbittesti(InterlockedBitTestAndSet64, __int64, "lock bts{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
666 #endif
667 #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet64
668 #endif /* __INTRINSIC_PROLOG */
669 
670 #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset64)
671 __MINGW_EXTENSION unsigned char InterlockedBitTestAndReset64(volatile __int64 *a, __int64 b);
672 #if !__has_builtin(InterlockedBitTestAndReset64)
673 __INTRINSICS_USEINLINE
674 __buildbittesti(InterlockedBitTestAndReset64, __int64, "lock btr{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
675 #endif
676 #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset64
677 #endif /* __INTRINSIC_PROLOG */
678 
679 #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement64)
680 __MINGW_EXTENSION unsigned char InterlockedBitTestAndComplement64(volatile __int64 *a, __int64 b);
681 #if !__has_builtin(InterlockedBitTestAndComplement64)
682 __INTRINSICS_USEINLINE
683 __buildbittesti(InterlockedBitTestAndComplement64, __int64, "lock btc{q %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "J")
684 #endif
685 #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement64
686 #endif /* __INTRINSIC_PROLOG */
687 
688 #if __INTRINSIC_PROLOG(_InterlockedAnd64)
689 __MINGW_EXTENSION __int64 _InterlockedAnd64(__int64 volatile *, __int64);
690 #if !__has_builtin(_InterlockedAnd64)
691 __INTRINSICS_USEINLINE
692 __buildlogicali(_InterlockedAnd64, __int64, and)
693 #endif
694 #define __INTRINSIC_DEFINED__InterlockedAnd64
695 #endif /* __INTRINSIC_PROLOG */
696 
697 #if __INTRINSIC_PROLOG(_InterlockedOr64)
698 __MINGW_EXTENSION __int64 _InterlockedOr64(__int64 volatile *, __int64);
699 #if !__has_builtin(_InterlockedOr64)
700 __INTRINSICS_USEINLINE
701 __buildlogicali(_InterlockedOr64, __int64, or)
702 #endif
703 #define __INTRINSIC_DEFINED__InterlockedOr64
704 #endif /* __INTRINSIC_PROLOG */
705 
706 #if __INTRINSIC_PROLOG(_InterlockedXor64)
707 __MINGW_EXTENSION __int64 _InterlockedXor64(__int64 volatile *, __int64);
708 #if !__has_builtin(_InterlockedXor64)
709 __INTRINSICS_USEINLINE
710 __buildlogicali(_InterlockedXor64, __int64, xor)
711 #endif
712 #define __INTRINSIC_DEFINED__InterlockedXor64
713 #endif /* __INTRINSIC_PROLOG */
714 
715 #if __INTRINSIC_PROLOG(_InterlockedIncrement64)
716 __MINGW_EXTENSION __int64 _InterlockedIncrement64(__int64 volatile *Addend);
717 #if !__has_builtin(_InterlockedIncrement64)
718 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedIncrement64(__int64 volatile * Addend)719 __int64 _InterlockedIncrement64(__int64 volatile *Addend) {
720     return __sync_add_and_fetch(Addend, 1);
721 }
722 #endif
723 #define __INTRINSIC_DEFINED__InterlockedIncrement64
724 #endif /* __INTRINSIC_PROLOG */
725 
726 #if __INTRINSIC_PROLOG(_InterlockedDecrement64)
727 __MINGW_EXTENSION __int64 _InterlockedDecrement64(__int64 volatile *Addend);
728 #if !__has_builtin(_InterlockedDecrement64)
729 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedDecrement64(__int64 volatile * Addend)730 __int64 _InterlockedDecrement64(__int64 volatile *Addend) {
731     return __sync_sub_and_fetch(Addend, 1);
732 }
733 #endif
734 #define __INTRINSIC_DEFINED__InterlockedDecrement64
735 #endif /* __INTRINSIC_PROLOG */
736 
737 #if __INTRINSIC_PROLOG(_InterlockedExchange64)
738 __MINGW_EXTENSION __int64 _InterlockedExchange64(__int64 volatile *Target, __int64 Value);
739 #if !__has_builtin(_InterlockedExchange64)
740 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedExchange64(__int64 volatile * Target,__int64 Value)741 __int64 _InterlockedExchange64(__int64 volatile *Target, __int64 Value) {
742     return __sync_lock_test_and_set(Target, Value);
743 }
744 #endif
745 #define __INTRINSIC_DEFINED__InterlockedExchange64
746 #endif /* __INTRINSIC_PROLOG */
747 
748 #if __INTRINSIC_PROLOG(_InterlockedExchangeAdd64)
749 __MINGW_EXTENSION __int64 _InterlockedExchangeAdd64(__int64 volatile *Addend, __int64 Value);
750 #if !__has_builtin(_InterlockedExchangeAdd64)
751 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedExchangeAdd64(__int64 volatile * Addend,__int64 Value)752 __int64 _InterlockedExchangeAdd64(__int64 volatile *Addend, __int64 Value) {
753     return __sync_fetch_and_add(Addend, Value);
754 }
755 #endif
756 #define __INTRINSIC_DEFINED__InterlockedExchangeAdd64
757 #endif /* __INTRINSIC_PROLOG */
758 
759 #if __INTRINSIC_PROLOG(__readgsbyte)
760 unsigned char __readgsbyte(unsigned __LONG32 Offset);
761 #if !__has_builtin(__readgsbyte)
762 __INTRINSICS_USEINLINE
763 __buildreadseg(__readgsbyte, unsigned char, "gs", "b")
764 #endif
765 #define __INTRINSIC_DEFINED___readgsbyte
766 #endif /* __INTRINSIC_PROLOG */
767 
768 #if __INTRINSIC_PROLOG(__readgsword)
769 unsigned short __readgsword(unsigned __LONG32 Offset);
770 #if !__has_builtin(__readgsword)
771 __INTRINSICS_USEINLINE
772 __buildreadseg(__readgsword, unsigned short, "gs", "w")
773 #endif
774 #define __INTRINSIC_DEFINED___readgsword
775 #endif /* __INTRINSIC_PROLOG */
776 
777 #if __INTRINSIC_PROLOG(__readgsdword)
778 unsigned __LONG32 __readgsdword(unsigned __LONG32 Offset);
779 #if !__has_builtin(__readgsdword)
780 __INTRINSICS_USEINLINE
781 __buildreadseg(__readgsdword, unsigned __LONG32, "gs", "l")
782 #endif
783 #define __INTRINSIC_DEFINED___readgsdword
784 #endif /* __INTRINSIC_PROLOG */
785 
786 #if __INTRINSIC_PROLOG(__readgsqword)
787 __MINGW_EXTENSION unsigned __int64 __readgsqword(unsigned __LONG32 Offset);
788 #if !__has_builtin(__readgsqword)
789 __MINGW_EXTENSION __INTRINSICS_USEINLINE
790 __buildreadseg(__readgsqword, unsigned __int64, "gs", "q")
791 #endif
792 #define __INTRINSIC_DEFINED___readgsqword
793 #endif /* __INTRINSIC_PROLOG */
794 
795 #if __INTRINSIC_PROLOG(__writegsbyte)
796 void __writegsbyte(unsigned __LONG32 Offset,unsigned char Data);
797 #if !__has_builtin(__writegsbyte)
798 __INTRINSICS_USEINLINE
799 __buildwriteseg(__writegsbyte, unsigned char, "gs", "b")
800 #endif
801 #define __INTRINSIC_DEFINED___writegsbyte
802 #endif /* __INTRINSIC_PROLOG */
803 
804 #if __INTRINSIC_PROLOG(__writegsword)
805 void __writegsword(unsigned __LONG32 Offset,unsigned short Data);
806 #if !__has_builtin(__writegsword)
807 __INTRINSICS_USEINLINE
808 __buildwriteseg(__writegsword, unsigned short, "gs", "w")
809 #endif
810 #define __INTRINSIC_DEFINED___writegsword
811 #endif /* __INTRINSIC_PROLOG */
812 
813 #if __INTRINSIC_PROLOG(__writegsdword)
814 void __writegsdword(unsigned __LONG32 Offset,unsigned __LONG32 Data);
815 #if !__has_builtin(__writegsdword)
816 __INTRINSICS_USEINLINE
817 __buildwriteseg(__writegsdword, unsigned __LONG32, "gs", "l")
818 #endif
819 #define __INTRINSIC_DEFINED___writegsdword
820 #endif /* __INTRINSIC_PROLOG */
821 
822 #if __INTRINSIC_PROLOG(__writegsqword)
823 __MINGW_EXTENSION void __writegsqword(unsigned __LONG32 Offset,unsigned __int64 Data);
824 #if !__has_builtin(__writegsqword)
825 __MINGW_EXTENSION __INTRINSICS_USEINLINE
826 __buildwriteseg(__writegsqword, unsigned __int64, "gs", "q")
827 #endif
828 #define __INTRINSIC_DEFINED___writegsqword
829 #endif /* __INTRINSIC_PROLOG */
830 
831 #if __INTRINSIC_PROLOG(_BitScanForward64)
832 __MINGW_EXTENSION unsigned char _BitScanForward64(unsigned __LONG32 *Index, unsigned __int64 Mask);
833 #if !__has_builtin(_BitScanForward64)
834 __MINGW_EXTENSION __INTRINSICS_USEINLINE
835 __buildbitscan(_BitScanForward64, unsigned __int64, "bsf{q %[Mask],%[Index] | %[Index],%[Mask]}")
836 #endif
837 #define __INTRINSIC_DEFINED__BitScanForward64
838 #endif /* __INTRINSIC_PROLOG */
839 
840 #if __INTRINSIC_PROLOG(_BitScanReverse64)
841 __MINGW_EXTENSION unsigned char _BitScanReverse64(unsigned __LONG32 *Index, unsigned __int64 Mask);
842 #if !__has_builtin(_BitScanReverse64)
843 __MINGW_EXTENSION __INTRINSICS_USEINLINE
844 __buildbitscan(_BitScanReverse64, unsigned __int64, "bsr{q %[Mask],%[Index] | %[Index],%[Mask]}")
845 #endif
846 #define __INTRINSIC_DEFINED__BitScanReverse64
847 #endif /* __INTRINSIC_PROLOG */
848 
849 #if __INTRINSIC_PROLOG(_bittest64)
850 __MINGW_EXTENSION unsigned char _bittest64(__int64 const *a, __int64 b);
851 #if !__has_builtin(_bittest64)
852 __MINGW_EXTENSION __INTRINSICS_USEINLINE
853 __buildbittest(_bittest64, __int64, "q", "J")
854 #endif
855 #define __INTRINSIC_DEFINED__bittest64
856 #endif /* __INTRINSIC_PROLOG */
857 
858 #if __INTRINSIC_PROLOG(_bittestandset64)
859 __MINGW_EXTENSION unsigned char _bittestandset64(__int64 *a, __int64 b);
860 #if !__has_builtin(_bittestandset64)
861 __MINGW_EXTENSION __INTRINSICS_USEINLINE
862 __buildbittestand(_bittestandset64, __int64, "bts", "J", "q")
863 #endif
864 #define __INTRINSIC_DEFINED__bittestandset64
865 #endif /* __INTRINSIC_PROLOG */
866 
867 #if __INTRINSIC_PROLOG(_bittestandreset64)
868 __MINGW_EXTENSION unsigned char _bittestandreset64(__int64 *a, __int64 b);
869 #if !__has_builtin(_bittestandreset64)
870 __MINGW_EXTENSION __INTRINSICS_USEINLINE
871 __buildbittestand(_bittestandreset64, __int64, "btr", "J", "q")
872 #endif
873 #define __INTRINSIC_DEFINED__bittestandreset64
874 #endif /* __INTRINSIC_PROLOG */
875 
876 #if __INTRINSIC_PROLOG(_bittestandcomplement64)
877 __MINGW_EXTENSION unsigned char _bittestandcomplement64(__int64 *a, __int64 b);
878 #if !__has_builtin(_bittestandcomplement64)
879 __MINGW_EXTENSION __INTRINSICS_USEINLINE
880 __buildbittestand(_bittestandcomplement64, __int64, "btc", "J", "q")
881 #endif
882 #define __INTRINSIC_DEFINED__bittestandcomplement64
883 #endif /* __INTRINSIC_PROLOG */
884 
885 #if __INTRINSIC_PROLOG(__readcr0)
886 __MINGW_EXTENSION unsigned __int64 __readcr0(void);
887 #if !__has_builtin(__readcr0)
888 __INTRINSICS_USEINLINE
889 __build_readcr(__readcr0, unsigned __int64, "0")
890 #endif
891 #define __INTRINSIC_DEFINED___readcr0
892 #endif /* __INTRINSIC_PROLOG */
893 
894 #if __INTRINSIC_PROLOG(__readcr2)
895 __MINGW_EXTENSION unsigned __int64 __readcr2(void);
896 #if !__has_builtin(__readcr2)
897 __INTRINSICS_USEINLINE
898 __build_readcr(__readcr2, unsigned __int64, "2")
899 #endif
900 #define __INTRINSIC_DEFINED___readcr2
901 #endif /* __INTRINSIC_PROLOG */
902 
903 #if __INTRINSIC_PROLOG(__readcr3)
904 __MINGW_EXTENSION unsigned __int64 __readcr3(void);
905 #if !__has_builtin(__readcr3)
906 __INTRINSICS_USEINLINE
907 __build_readcr(__readcr3, unsigned __int64, "3")
908 #endif
909 #define __INTRINSIC_DEFINED___readcr3
910 #endif /* __INTRINSIC_PROLOG */
911 
912 #if __INTRINSIC_PROLOG(__readcr4)
913 __MINGW_EXTENSION unsigned __int64 __readcr4(void);
914 #if !__has_builtin(__readcr4)
915 __INTRINSICS_USEINLINE
916 __build_readcr(__readcr4, unsigned __int64, "4")
917 #endif
918 #define __INTRINSIC_DEFINED___readcr4
919 #endif /* __INTRINSIC_PROLOG */
920 
921 #if __INTRINSIC_PROLOG(__readcr8)
922 __MINGW_EXTENSION unsigned __int64 __readcr8(void);
923 #if !__has_builtin(__readcr8)
924 __INTRINSICS_USEINLINE
925 __build_readcr(__readcr8, unsigned __int64, "8")
926 #endif
927 #define __INTRINSIC_DEFINED___readcr8
928 #endif /* __INTRINSIC_PROLOG */
929 
930 #if __INTRINSIC_PROLOG(__writecr0)
931 __MINGW_EXTENSION void __writecr0(unsigned __int64);
932 #if !__has_builtin(__writecr0)
933 __INTRINSICS_USEINLINE
934 __build_writecr(__writecr0, unsigned __int64, "0")
935 #endif
936 #define __INTRINSIC_DEFINED___writecr0
937 #endif /* __INTRINSIC_PROLOG */
938 
939 #if __INTRINSIC_PROLOG(__writecr3)
940 __MINGW_EXTENSION void __writecr3(unsigned __int64);
941 #if !__has_builtin(__writecr3)
942 __INTRINSICS_USEINLINE
943 __build_writecr(__writecr3, unsigned __int64, "3")
944 #endif
945 #define __INTRINSIC_DEFINED___writecr3
946 #endif /* __INTRINSIC_PROLOG */
947 
948 #if __INTRINSIC_PROLOG(__writecr4)
949 __MINGW_EXTENSION void __writecr4(unsigned __int64);
950 #if !__has_builtin(__writecr4)
951 __INTRINSICS_USEINLINE
952 __build_writecr(__writecr4, unsigned __int64, "4")
953 #endif
954 #define __INTRINSIC_DEFINED___writecr4
955 #endif /* __INTRINSIC_PROLOG */
956 
957 #if __INTRINSIC_PROLOG(__writecr8)
958 __MINGW_EXTENSION void __writecr8(unsigned __int64);
959 #if !__has_builtin(__writecr8)
960 __INTRINSICS_USEINLINE
961 __build_writecr(__writecr8, unsigned __int64, "8")
962 #endif
963 #define __INTRINSIC_DEFINED___writecr8
964 #endif /* __INTRINSIC_PROLOG */
965 
966 #if __INTRINSIC_PROLOG(__movsq)
967 __MINGW_EXTENSION void __movsq(unsigned __int64 *Dest, unsigned __int64 const *Source, size_t Count);
968 #if !__has_builtin(__movsq)
969 __MINGW_EXTENSION __INTRINSICS_USEINLINE
970 __buildmov(__movsq, unsigned __int64, "q")
971 #endif
972 #define __INTRINSIC_DEFINED___movsq
973 #endif /* __INTRINSIC_PROLOG */
974 
975 #if __INTRINSIC_PROLOG(_umul128)
976 unsigned __int64 _umul128(unsigned __int64, unsigned __int64, unsigned __int64 *);
977 #if !__has_builtin(_umul128)
978 __INTRINSICS_USEINLINE
_umul128(unsigned __int64 a,unsigned __int64 b,unsigned __int64 * hi)979 unsigned __int64 _umul128(unsigned __int64 a, unsigned __int64 b, unsigned __int64 *hi)
980 {
981    __MINGW_EXTENSION union { unsigned __int128 v; unsigned __int64 sv[2]; } var;
982    var.v = a;
983    var.v *= b;
984    if (hi) *hi = var.sv[1];
985    return var.sv[0];
986 }
987 #endif
988 #define __INTRINSIC_DEFINED__umul128
989 #endif /* __INTRINSIC_PROLOG */
990 
991 #if __INTRINSIC_PROLOG(_mul128)
992 __int64 _mul128(__int64, __int64, __int64 *);
993 #if !__has_builtin(_mul128)
994 __INTRINSICS_USEINLINE
_mul128(__int64 a,__int64 b,__int64 * hi)995 __int64 _mul128(__int64 a, __int64 b, __int64 *hi)
996 {
997    __MINGW_EXTENSION union { __int128 v; __int64 sv[2]; } var;
998    var.v = a;
999    var.v *= b;
1000    if (hi) *hi = var.sv[1];
1001    return var.sv[0];
1002 }
1003 #endif
1004 #define __INTRINSIC_DEFINED__mul128
1005 #endif /* __INTRINSIC_PROLOG */
1006 
1007 #if __INTRINSIC_PROLOG(__shiftleft128)
1008 unsigned __int64 __shiftleft128(unsigned __int64  LowPart, unsigned __int64 HighPart, unsigned char Shift);
1009 #if !__has_builtin(__shiftleft128)
1010 __INTRINSICS_USEINLINE
__shiftleft128(unsigned __int64 LowPart,unsigned __int64 HighPart,unsigned char Shift)1011 unsigned __int64 __shiftleft128 (unsigned __int64  LowPart, unsigned __int64 HighPart, unsigned char Shift)
1012 {
1013    unsigned __int64 ret;
1014 
1015    __asm__ ("shld {%[Shift],%[LowPart],%[HighPart]|%[HighPart], %[LowPart], %[Shift]}"
1016       : [ret] "=r" (ret)
1017       : [LowPart] "r" (LowPart), [HighPart] "0" (HighPart), [Shift] "Jc" (Shift)
1018       : "cc");
1019 
1020    return ret;
1021 }
1022 #endif
1023 #define __INTRINSIC_DEFINED___shiftleft128
1024 #endif /* __INTRINSIC_PROLOG */
1025 
1026 #if __INTRINSIC_PROLOG(__shiftright128)
1027 unsigned __int64 __shiftright128 (unsigned __int64  LowPart, unsigned __int64 HighPart, unsigned char Shift);
1028 #if !__has_builtin(__shiftright128)
1029 __INTRINSICS_USEINLINE
__shiftright128(unsigned __int64 LowPart,unsigned __int64 HighPart,unsigned char Shift)1030 unsigned __int64 __shiftright128 (unsigned __int64  LowPart, unsigned __int64 HighPart, unsigned char Shift)
1031 {
1032    unsigned __int64 ret;
1033 
1034    __asm__ ("shrd {%[Shift],%[HighPart],%[LowPart]|%[LowPart], %[HighPart], %[Shift]}"
1035       : [ret] "=r" (ret)
1036       : [LowPart] "0" (LowPart), [HighPart] "r" (HighPart), [Shift] "Jc" (Shift)
1037       : "cc");
1038 
1039    return ret;
1040 }
1041 #endif
1042 #define __INTRINSIC_DEFINED___shiftright128
1043 #endif /* __INTRINSIC_PROLOG */
1044 
1045 #endif /* defined(__x86_64__) || defined(_AMD64_) */
1046 
1047 /* ***************************************************** */
1048 
1049 #if defined(__arm__) || defined(_ARM_)
1050 
1051 #if __INTRINSIC_PROLOG(_interlockedbittestandset)
1052 unsigned char _interlockedbittestandset(__LONG32 volatile *a, __LONG32 b);
1053 #if !__has_builtin(_interlockedbittestandset)
1054 __INTRINSICS_USEINLINE
1055 __buildbittesti(_interlockedbittestandset, __LONG32, "orr", /* unused param */)
1056 #endif
1057 #define __INTRINSIC_DEFINED__interlockedbittestandset
1058 #endif /* __INTRINSIC_PROLOG */
1059 
1060 #if __INTRINSIC_PROLOG(_interlockedbittestandreset)
1061 unsigned char _interlockedbittestandreset(__LONG32 volatile *a, __LONG32 b);
1062 __INTRINSICS_USEINLINE
1063 #if !__has_builtin(_interlockedbittestandreset)
1064 __buildbittesti(_interlockedbittestandreset, __LONG32, "bic", /* unused param */)
1065 #endif
1066 #define __INTRINSIC_DEFINED__interlockedbittestandreset
1067 #endif /* __INTRINSIC_PROLOG */
1068 
1069 #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement)
1070 unsigned char _interlockedbittestandcomplement(__LONG32 volatile *a, __LONG32 b);
1071 #if !__has_builtin(_interlockedbittestandcomplement)
1072 __INTRINSICS_USEINLINE
1073 __buildbittesti(_interlockedbittestandcomplement, __LONG32, "eor", /* unused param */)
1074 #endif
1075 #define __INTRINSIC_DEFINED__interlockedbittestandcomplement
1076 #endif /* __INTRINSIC_PROLOG */
1077 
1078 #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet)
1079 unsigned char InterlockedBitTestAndSet(volatile __LONG32 *a, __LONG32 b);
1080 #if !__has_builtin(InterlockedBitTestAndSet)
1081 __INTRINSICS_USEINLINE
1082 __buildbittesti(InterlockedBitTestAndSet, __LONG32, "orr", /* unused param */)
1083 #endif
1084 #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet
1085 #endif /* __INTRINSIC_PROLOG */
1086 
1087 #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset)
1088 unsigned char InterlockedBitTestAndReset(volatile __LONG32 *a, __LONG32 b);
1089 #if !__has_builtin(InterlockedBitTestAndReset)
1090 __INTRINSICS_USEINLINE
1091 __buildbittesti(InterlockedBitTestAndReset, __LONG32, "bic", /* unused param */)
1092 #endif
1093 #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset
1094 #endif /* __INTRINSIC_PROLOG */
1095 
1096 #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement)
1097 unsigned char InterlockedBitTestAndComplement(volatile __LONG32 *a, __LONG32 b);
1098 #if !__has_builtin(InterlockedBitTestAndComplement)
1099 __INTRINSICS_USEINLINE
1100 __buildbittesti(InterlockedBitTestAndComplement, __LONG32, "eor", /* unused param */)
1101 #endif
1102 #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement
1103 #endif /* __INTRINSIC_PROLOG */
1104 
1105 #if __INTRINSIC_PROLOG(_BitScanForward)
1106 __MINGW_EXTENSION unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
1107 #if !__has_builtin(_BitScanForward)
1108 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_BitScanForward(unsigned __LONG32 * Index,unsigned __LONG32 Mask)1109 unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask)
1110 {
1111     if (Mask == 0)
1112         return 0;
1113     *Index = __builtin_ctz(Mask);
1114     return 1;
1115 }
1116 #endif
1117 #define __INTRINSIC_DEFINED__BitScanForward
1118 #endif /* __INTRINSIC_PROLOG */
1119 
1120 #if __INTRINSIC_PROLOG(_BitScanReverse)
1121 __MINGW_EXTENSION unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
1122 #if !__has_builtin(_BitScanReverse)
1123 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_BitScanReverse(unsigned __LONG32 * Index,unsigned __LONG32 Mask)1124 unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask)
1125 {
1126     if (Mask == 0)
1127         return 0;
1128     *Index = 31 - __builtin_clz(Mask);
1129     return 1;
1130 }
1131 #endif
1132 #define __INTRINSIC_DEFINED__BitScanReverse
1133 #endif /* __INTRINSIC_PROLOG */
1134 
1135 #endif /* defined(__arm__) || defined(_ARM_) */
1136 
1137 #if defined(__aarch64__) || defined(_ARM64_)
1138 
1139 #if __INTRINSIC_PROLOG(_interlockedbittestandset)
1140 unsigned char _interlockedbittestandset(__LONG32 volatile *a, __LONG32 b);
1141 #if !__has_builtin(_interlockedbittestandset)
1142 __INTRINSICS_USEINLINE
1143 __buildbittesti(_interlockedbittestandset, __LONG32, "orr", /* unused param */)
1144 #endif
1145 #define __INTRINSIC_DEFINED__interlockedbittestandset
1146 #endif /* __INTRINSIC_PROLOG */
1147 
1148 #if __INTRINSIC_PROLOG(_interlockedbittestandreset)
1149 unsigned char _interlockedbittestandreset(__LONG32 volatile *a, __LONG32 b);
1150 __INTRINSICS_USEINLINE
1151 #if !__has_builtin(_interlockedbittestandreset)
1152 __buildbittesti(_interlockedbittestandreset, __LONG32, "bic", /* unused param */)
1153 #endif
1154 #define __INTRINSIC_DEFINED__interlockedbittestandreset
1155 #endif /* __INTRINSIC_PROLOG */
1156 
1157 #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement)
1158 unsigned char _interlockedbittestandcomplement(__LONG32 volatile *a, __LONG32 b);
1159 #if !__has_builtin(_interlockedbittestandcomplement)
1160 __INTRINSICS_USEINLINE
1161 __buildbittesti(_interlockedbittestandcomplement, __LONG32, "eor", /* unused param */)
1162 #endif
1163 #define __INTRINSIC_DEFINED__interlockedbittestandcomplement
1164 #endif /* __INTRINSIC_PROLOG */
1165 
1166 #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet)
1167 unsigned char InterlockedBitTestAndSet(volatile __LONG32 *a, __LONG32 b);
1168 #if !__has_builtin(InterlockedBitTestAndSet)
1169 __INTRINSICS_USEINLINE
1170 __buildbittesti(InterlockedBitTestAndSet, __LONG32, "orr", /* unused param */)
1171 #endif
1172 #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet
1173 #endif /* __INTRINSIC_PROLOG */
1174 
1175 #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset)
1176 unsigned char InterlockedBitTestAndReset(volatile __LONG32 *a, __LONG32 b);
1177 #if !__has_builtin(InterlockedBitTestAndReset)
1178 __INTRINSICS_USEINLINE
1179 __buildbittesti(InterlockedBitTestAndReset, __LONG32, "bic", /* unused param */)
1180 #endif
1181 #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset
1182 #endif /* __INTRINSIC_PROLOG */
1183 
1184 #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement)
1185 unsigned char InterlockedBitTestAndComplement(volatile __LONG32 *a, __LONG32 b);
1186 #if !__has_builtin(InterlockedBitTestAndComplement)
1187 __INTRINSICS_USEINLINE
1188 __buildbittesti(InterlockedBitTestAndComplement, __LONG32, "eor", /* unused param */)
1189 #endif
1190 #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement
1191 #endif /* __INTRINSIC_PROLOG */
1192 
1193 #if __INTRINSIC_PROLOG(_interlockedbittestandset64)
1194 unsigned char _interlockedbittestandset64(__int64 volatile *a, __int64 b);
1195 #if !__has_builtin(_interlockedbittestandset64)
1196 __INTRINSICS_USEINLINE
1197 __buildbittesti64(_interlockedbittestandset64, __int64, "orr", /* unused param */)
1198 #endif
1199 #define __INTRINSIC_DEFINED__interlockedbittestandset64
1200 #endif /* __INTRINSIC_PROLOG */
1201 
1202 #if __INTRINSIC_PROLOG(_interlockedbittestandreset64)
1203 unsigned char _interlockedbittestandreset64(__int64 volatile *a, __int64 b);
1204 __INTRINSICS_USEINLINE
1205 #if !__has_builtin(_interlockedbittestandreset64)
1206 __buildbittesti64(_interlockedbittestandreset64, __int64, "bic", /* unused param */)
1207 #endif
1208 #define __INTRINSIC_DEFINED__interlockedbittestandreset64
1209 #endif /* __INTRINSIC_PROLOG */
1210 
1211 #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement64)
1212 unsigned char _interlockedbittestandcomplement64(__int64 volatile *a, __int64 b);
1213 #if !__has_builtin(_interlockedbittestandcomplement64)
1214 __INTRINSICS_USEINLINE
1215 __buildbittesti64(_interlockedbittestandcomplement64, __int64, "eor", /* unused param */)
1216 #endif
1217 #define __INTRINSIC_DEFINED__interlockedbittestandcomplement64
1218 #endif /* __INTRINSIC_PROLOG */
1219 
1220 #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet64)
1221 unsigned char InterlockedBitTestAndSet64(volatile __int64 *a, __int64 b);
1222 #if !__has_builtin(InterlockedBitTestAndSet64)
1223 __INTRINSICS_USEINLINE
1224 __buildbittesti64(InterlockedBitTestAndSet64, __int64, "orr", /* unused param */)
1225 #endif
1226 #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet64
1227 #endif /* __INTRINSIC_PROLOG */
1228 
1229 #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset64)
1230 unsigned char InterlockedBitTestAndReset64(volatile __int64 *a, __int64 b);
1231 #if !__has_builtin(InterlockedBitTestAndReset64)
1232 __INTRINSICS_USEINLINE
1233 __buildbittesti64(InterlockedBitTestAndReset64, __int64, "bic", /* unused param */)
1234 #endif
1235 #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset64
1236 #endif /* __INTRINSIC_PROLOG */
1237 
1238 #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement64)
1239 unsigned char InterlockedBitTestAndComplement64(volatile __int64 *a, __int64 b);
1240 #if !__has_builtin(InterlockedBitTestAndComplement64)
1241 __INTRINSICS_USEINLINE
1242 __buildbittesti64(InterlockedBitTestAndComplement64, __int64, "eor", /* unused param */)
1243 #endif
1244 #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement64
1245 #endif /* __INTRINSIC_PROLOG */
1246 
1247 #if __INTRINSIC_PROLOG(_InterlockedAnd64)
1248 __MINGW_EXTENSION __int64 _InterlockedAnd64(__int64 volatile *, __int64);
1249 #if !__has_builtin(_InterlockedAnd64)
1250 __INTRINSICS_USEINLINE
1251 __buildlogicali(_InterlockedAnd64, __int64, and)
1252 #endif
1253 #define __INTRINSIC_DEFINED__InterlockedAnd64
1254 #endif /* __INTRINSIC_PROLOG */
1255 
1256 #if __INTRINSIC_PROLOG(_InterlockedOr64)
1257 __MINGW_EXTENSION __int64 _InterlockedOr64(__int64 volatile *, __int64);
1258 #if !__has_builtin(_InterlockedOr64)
1259 __INTRINSICS_USEINLINE
1260 __buildlogicali(_InterlockedOr64, __int64, or)
1261 #endif
1262 #define __INTRINSIC_DEFINED__InterlockedOr64
1263 #endif /* __INTRINSIC_PROLOG */
1264 
1265 #if __INTRINSIC_PROLOG(_InterlockedXor64)
1266 __MINGW_EXTENSION __int64 _InterlockedXor64(__int64 volatile *, __int64);
1267 #if !__has_builtin(_InterlockedXor64)
1268 __INTRINSICS_USEINLINE
1269 __buildlogicali(_InterlockedXor64, __int64, xor)
1270 #endif
1271 #define __INTRINSIC_DEFINED__InterlockedXor64
1272 #endif /* __INTRINSIC_PROLOG */
1273 
1274 #if __INTRINSIC_PROLOG(_InterlockedIncrement64)
1275 __MINGW_EXTENSION __int64 _InterlockedIncrement64(__int64 volatile *Addend);
1276 #if !__has_builtin(_InterlockedIncrement64)
1277 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedIncrement64(__int64 volatile * Addend)1278 __int64 _InterlockedIncrement64(__int64 volatile *Addend) {
1279     return __sync_add_and_fetch(Addend, 1);
1280 }
1281 #endif
1282 #define __INTRINSIC_DEFINED__InterlockedIncrement64
1283 #endif /* __INTRINSIC_PROLOG */
1284 
1285 #if __INTRINSIC_PROLOG(_InterlockedDecrement64)
1286 __MINGW_EXTENSION __int64 _InterlockedDecrement64(__int64 volatile *Addend);
1287 #if !__has_builtin(_InterlockedDecrement64)
1288 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedDecrement64(__int64 volatile * Addend)1289 __int64 _InterlockedDecrement64(__int64 volatile *Addend) {
1290     return __sync_sub_and_fetch(Addend, 1);
1291 }
1292 #endif
1293 #define __INTRINSIC_DEFINED__InterlockedDecrement64
1294 #endif /* __INTRINSIC_PROLOG */
1295 
1296 #if __INTRINSIC_PROLOG(_InterlockedExchange64)
1297 __MINGW_EXTENSION __int64 _InterlockedExchange64(__int64 volatile *Target, __int64 Value);
1298 #if !__has_builtin(_InterlockedExchange64)
1299 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedExchange64(__int64 volatile * Target,__int64 Value)1300 __int64 _InterlockedExchange64(__int64 volatile *Target, __int64 Value) {
1301     return __sync_lock_test_and_set(Target, Value);
1302 }
1303 #endif
1304 #define __INTRINSIC_DEFINED__InterlockedExchange64
1305 #endif /* __INTRINSIC_PROLOG */
1306 
1307 #if __INTRINSIC_PROLOG(_InterlockedExchangeAdd64)
1308 __MINGW_EXTENSION __int64 _InterlockedExchangeAdd64(__int64 volatile *Addend, __int64 Value);
1309 #if !__has_builtin(_InterlockedExchangeAdd64)
1310 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedExchangeAdd64(__int64 volatile * Addend,__int64 Value)1311 __int64 _InterlockedExchangeAdd64(__int64 volatile *Addend, __int64 Value) {
1312     return __sync_fetch_and_add(Addend, Value);
1313 }
1314 #endif
1315 #define __INTRINSIC_DEFINED__InterlockedExchangeAdd64
1316 #endif /* __INTRINSIC_PROLOG */
1317 
1318 #if __INTRINSIC_PROLOG(_BitScanForward)
1319 __MINGW_EXTENSION unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
1320 #if !__has_builtin(_BitScanForward)
1321 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_BitScanForward(unsigned __LONG32 * Index,unsigned __LONG32 Mask)1322 unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask)
1323 {
1324     if (Mask == 0)
1325         return 0;
1326     *Index = __builtin_ctz(Mask);
1327     return 1;
1328 }
1329 #endif
1330 #define __INTRINSIC_DEFINED__BitScanForward
1331 #endif /* __INTRINSIC_PROLOG */
1332 
1333 #if __INTRINSIC_PROLOG(_BitScanReverse)
1334 __MINGW_EXTENSION unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
1335 #if !__has_builtin(_BitScanReverse)
1336 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_BitScanReverse(unsigned __LONG32 * Index,unsigned __LONG32 Mask)1337 unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask)
1338 {
1339     if (Mask == 0)
1340         return 0;
1341     *Index = 31 - __builtin_clz(Mask);
1342     return 1;
1343 }
1344 #endif
1345 #define __INTRINSIC_DEFINED__BitScanReverse
1346 #endif /* __INTRINSIC_PROLOG */
1347 
1348 #if __INTRINSIC_PROLOG(_BitScanForward64)
1349 __MINGW_EXTENSION unsigned char _BitScanForward64(unsigned __LONG32 *Index, unsigned __int64 Mask);
1350 #if !__has_builtin(_BitScanForward64)
1351 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_BitScanForward64(unsigned __LONG32 * Index,unsigned __int64 Mask)1352 unsigned char _BitScanForward64(unsigned __LONG32 *Index, unsigned __int64 Mask)
1353 {
1354     if (Mask == 0)
1355         return 0;
1356     *Index = __builtin_ctzll(Mask);
1357     return 1;
1358 }
1359 #endif
1360 #define __INTRINSIC_DEFINED__BitScanForward64
1361 #endif /* __INTRINSIC_PROLOG */
1362 
1363 #if __INTRINSIC_PROLOG(_BitScanReverse64)
1364 __MINGW_EXTENSION unsigned char _BitScanReverse64(unsigned __LONG32 *Index, unsigned __int64 Mask);
1365 #if !__has_builtin(_BitScanReverse64)
1366 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_BitScanReverse64(unsigned __LONG32 * Index,unsigned __int64 Mask)1367 unsigned char _BitScanReverse64(unsigned __LONG32 *Index, unsigned __int64 Mask)
1368 {
1369     if (Mask == 0)
1370         return 0;
1371     *Index = 63 - __builtin_clzll(Mask);
1372     return 1;
1373 }
1374 #endif
1375 #define __INTRINSIC_DEFINED__BitScanReverse64
1376 #endif /* __INTRINSIC_PROLOG */
1377 
1378 #endif /* defined(__aarch64__) || define(_ARM64_) */
1379 /* ***************************************************** */
1380 
1381 #if defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) || defined(__arm__) || defined(_ARM_) || defined(__aarch64__) || defined(_ARM64_)
1382 
1383 #if __INTRINSIC_PROLOG(__popcnt16)
1384 unsigned short __popcnt16(unsigned short);
1385 #if !__has_builtin(__popcnt16)
1386 __INTRINSICS_USEINLINE
__popcnt16(unsigned short value)1387 unsigned short __popcnt16(unsigned short value)
1388 {
1389     return __builtin_popcount(value);
1390 }
1391 #endif
1392 #define __INTRINSIC_DEFINED___popcnt16
1393 #endif /* __INTRINSIC_PROLOG */
1394 
1395 #if __INTRINSIC_PROLOG(__popcnt)
1396 unsigned int __popcnt(unsigned int);
1397 #if !__has_builtin(__popcnt)
1398 __INTRINSICS_USEINLINE
__popcnt(unsigned int value)1399 unsigned int __popcnt(unsigned int value)
1400 {
1401     return __builtin_popcount(value);
1402 }
1403 #endif
1404 #define __INTRINSIC_DEFINED___popcnt
1405 #endif /* __INTRINSIC_PROLOG */
1406 
1407 #if __INTRINSIC_PROLOG(__popcnt64)
1408 unsigned __int64 __popcnt64(unsigned __int64);
1409 #if !__has_builtin(__popcnt64)
1410 __INTRINSICS_USEINLINE
__popcnt64(unsigned __int64 value)1411 unsigned __int64 __popcnt64(unsigned __int64 value)
1412 {
1413     return __builtin_popcountll(value);
1414 }
1415 #endif
1416 #define __INTRINSIC_DEFINED___popcnt64
1417 #endif /* __INTRINSIC_PROLOG */
1418 
1419 #if __INTRINSIC_PROLOG(_InterlockedAnd)
1420 __LONG32 _InterlockedAnd(__LONG32 volatile *, __LONG32);
1421 #if !__has_builtin(_InterlockedAnd)
1422 __INTRINSICS_USEINLINE
1423 __buildlogicali(_InterlockedAnd, __LONG32, and)
1424 #endif
1425 #define __INTRINSIC_DEFINED__InterlockedAnd
1426 #endif /* __INTRINSIC_PROLOG */
1427 
1428 #if __INTRINSIC_PROLOG(_InterlockedOr)
1429 __LONG32 _InterlockedOr(__LONG32 volatile *, __LONG32);
1430 #if !__has_builtin(_InterlockedOr)
1431 __INTRINSICS_USEINLINE
1432 __buildlogicali(_InterlockedOr, __LONG32, or)
1433 #endif
1434 #define __INTRINSIC_DEFINED__InterlockedOr
1435 #endif /* __INTRINSIC_PROLOG */
1436 
1437 #if __INTRINSIC_PROLOG(_InterlockedXor)
1438 __LONG32 _InterlockedXor(__LONG32 volatile *, __LONG32);
1439 #if !__has_builtin(_InterlockedXor)
1440 __INTRINSICS_USEINLINE
1441 __buildlogicali(_InterlockedXor, __LONG32, xor)
1442 #endif
1443 #define __INTRINSIC_DEFINED__InterlockedXor
1444 #endif /* __INTRINSIC_PROLOG */
1445 
1446 #if __INTRINSIC_PROLOG(_InterlockedIncrement16)
1447 short _InterlockedIncrement16(short volatile *Addend);
1448 #if !__has_builtin(_InterlockedIncrement16)
1449 __INTRINSICS_USEINLINE
_InterlockedIncrement16(short volatile * Addend)1450 short _InterlockedIncrement16(short volatile *Addend) {
1451     return __sync_add_and_fetch(Addend, 1);
1452 }
1453 #endif
1454 #define __INTRINSIC_DEFINED__InterlockedIncrement16
1455 #endif /* __INTRINSIC_PROLOG */
1456 
1457 #if __INTRINSIC_PROLOG(_InterlockedDecrement16)
1458 short _InterlockedDecrement16(short volatile *Addend);
1459 #if !__has_builtin(_InterlockedDecrement16)
1460 __INTRINSICS_USEINLINE
_InterlockedDecrement16(short volatile * Addend)1461 short _InterlockedDecrement16(short volatile *Addend) {
1462     return __sync_sub_and_fetch(Addend, 1);
1463 }
1464 #endif
1465 #define __INTRINSIC_DEFINED__InterlockedDecrement16
1466 #endif /* __INTRINSIC_PROLOG */
1467 
1468 #if __INTRINSIC_PROLOG(_InterlockedCompareExchange16)
1469 short _InterlockedCompareExchange16(short volatile *Destination, short ExChange, short Comperand);
1470 #if !__has_builtin(_InterlockedCompareExchange16)
1471 __INTRINSICS_USEINLINE
_InterlockedCompareExchange16(short volatile * Destination,short ExChange,short Comperand)1472 short _InterlockedCompareExchange16(short volatile *Destination, short ExChange, short Comperand) {
1473     return __sync_val_compare_and_swap(Destination, Comperand, ExChange);
1474 }
1475 #endif
1476 #define __INTRINSIC_DEFINED__InterlockedCompareExchange16
1477 #endif /* __INTRINSIC_PROLOG */
1478 
1479 #if __INTRINSIC_PROLOG(_InterlockedExchangeAdd)
1480 __LONG32 _InterlockedExchangeAdd(__LONG32 volatile *Addend, __LONG32 Value);
1481 #if !__has_builtin(_InterlockedExchangeAdd)
1482 __INTRINSICS_USEINLINE
_InterlockedExchangeAdd(__LONG32 volatile * Addend,__LONG32 Value)1483 __LONG32 _InterlockedExchangeAdd(__LONG32 volatile *Addend, __LONG32 Value) {
1484     return __sync_fetch_and_add(Addend, Value);
1485 }
1486 #endif
1487 #define __INTRINSIC_DEFINED__InterlockedExchangeAdd
1488 #endif /* __INTRINSIC_PROLOG */
1489 
1490 #if __INTRINSIC_PROLOG(_InterlockedCompareExchange)
1491 __LONG32 _InterlockedCompareExchange(__LONG32 volatile *Destination, __LONG32 ExChange, __LONG32 Comperand);
1492 #if !__has_builtin(_InterlockedCompareExchange)
1493 __INTRINSICS_USEINLINE
_InterlockedCompareExchange(__LONG32 volatile * Destination,__LONG32 ExChange,__LONG32 Comperand)1494 __LONG32 _InterlockedCompareExchange(__LONG32 volatile *Destination, __LONG32 ExChange, __LONG32 Comperand) {
1495     return __sync_val_compare_and_swap(Destination, Comperand, ExChange);
1496 }
1497 #endif
1498 #define __INTRINSIC_DEFINED__InterlockedCompareExchange
1499 #endif /* __INTRINSIC_PROLOG */
1500 
1501 #if __INTRINSIC_PROLOG(_InterlockedIncrement)
1502 __LONG32 _InterlockedIncrement(__LONG32 volatile *Addend);
1503 #if !__has_builtin(_InterlockedIncrement)
1504 __INTRINSICS_USEINLINE
_InterlockedIncrement(__LONG32 volatile * Addend)1505 __LONG32 _InterlockedIncrement(__LONG32 volatile *Addend) {
1506    return __sync_add_and_fetch(Addend, 1);
1507 }
1508 #endif
1509 #define __INTRINSIC_DEFINED__InterlockedIncrement
1510 #endif /* __INTRINSIC_PROLOG */
1511 
1512 #if __INTRINSIC_PROLOG(_InterlockedDecrement)
1513 __LONG32 _InterlockedDecrement(__LONG32 volatile *Addend);
1514 #if !__has_builtin(_InterlockedDecrement)
1515 __INTRINSICS_USEINLINE
_InterlockedDecrement(__LONG32 volatile * Addend)1516 __LONG32 _InterlockedDecrement(__LONG32 volatile *Addend) {
1517    return __sync_sub_and_fetch(Addend, 1);
1518 }
1519 #endif
1520 #define __INTRINSIC_DEFINED__InterlockedDecrement
1521 #endif /* __INTRINSIC_PROLOG */
1522 
1523 #if __INTRINSIC_PROLOG(_InterlockedAdd)
1524 __LONG32 _InterlockedAdd(__LONG32 volatile *Addend, __LONG32 Value);
1525 #if !__has_builtin(_InterlockedAdd)
1526 __INTRINSICS_USEINLINE
_InterlockedAdd(__LONG32 volatile * Addend,__LONG32 Value)1527 __LONG32 _InterlockedAdd(__LONG32 volatile *Addend, __LONG32 Value) {
1528     return __sync_add_and_fetch(Addend, Value);
1529 }
1530 #endif
1531 #define __INTRINSIC_DEFINED__InterlockedAdd
1532 #endif /* __INTRINSIC_PROLOG */
1533 
1534 #if __INTRINSIC_PROLOG(_InterlockedAdd64)
1535 __MINGW_EXTENSION __int64 _InterlockedAdd64(__int64 volatile *Addend, __int64 Value);
1536 #if !__has_builtin(_InterlockedAdd64)
1537 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedAdd64(__int64 volatile * Addend,__int64 Value)1538 __int64 _InterlockedAdd64(__int64 volatile *Addend, __int64 Value) {
1539     return __sync_add_and_fetch(Addend, Value);
1540 }
1541 #endif
1542 #define __INTRINSIC_DEFINED__InterlockedAdd64
1543 #endif /* __INTRINSIC_PROLOG */
1544 
1545 #if __INTRINSIC_PROLOG(_InterlockedExchange)
1546 __LONG32 _InterlockedExchange(__LONG32 volatile *Target, __LONG32 Value);
1547 #if !__has_builtin(_InterlockedExchange)
1548 __INTRINSICS_USEINLINE
_InterlockedExchange(__LONG32 volatile * Target,__LONG32 Value)1549 __LONG32 _InterlockedExchange(__LONG32 volatile *Target, __LONG32 Value) {
1550     return __sync_lock_test_and_set(Target, Value);
1551 }
1552 #endif
1553 #define __INTRINSIC_DEFINED__InterlockedExchange
1554 #endif /* __INTRINSIC_PROLOG */
1555 
1556 #if __INTRINSIC_PROLOG(_InterlockedCompareExchange64)
1557 __MINGW_EXTENSION __int64 _InterlockedCompareExchange64(__int64 volatile *Destination, __int64 ExChange, __int64 Comperand);
1558 #if !__has_builtin(_InterlockedCompareExchange64)
1559 __MINGW_EXTENSION __INTRINSICS_USEINLINE
_InterlockedCompareExchange64(__int64 volatile * Destination,__int64 ExChange,__int64 Comperand)1560 __int64 _InterlockedCompareExchange64(__int64 volatile *Destination, __int64 ExChange, __int64 Comperand) {
1561     return __sync_val_compare_and_swap(Destination, Comperand, ExChange);
1562 }
1563 #endif
1564 #define __INTRINSIC_DEFINED__InterlockedCompareExchange64
1565 #endif /* __INTRINSIC_PROLOG */
1566 
1567 #if __INTRINSIC_PROLOG(_InterlockedCompareExchangePointer)
1568 void *_InterlockedCompareExchangePointer(void * volatile *Destination, void *ExChange, void *Comperand);
1569 #if !__has_builtin(_InterlockedCompareExchangePointer)
1570 __INTRINSICS_USEINLINE
_InterlockedCompareExchangePointer(void * volatile * Destination,void * ExChange,void * Comperand)1571 void *_InterlockedCompareExchangePointer(void *volatile *Destination, void *ExChange, void *Comperand) {
1572     return __sync_val_compare_and_swap(Destination, Comperand, ExChange);
1573 }
1574 #endif
1575 #define __INTRINSIC_DEFINED__InterlockedCompareExchangePointer
1576 #endif /* __INTRINSIC_PROLOG */
1577 
1578 #if __INTRINSIC_PROLOG(_InterlockedExchangePointer)
1579 void *_InterlockedExchangePointer(void *volatile *Target,void *Value);
1580 #if !__has_builtin(_InterlockedExchangePointer)
1581 __INTRINSICS_USEINLINE
_InterlockedExchangePointer(void * volatile * Target,void * Value)1582 void *_InterlockedExchangePointer(void *volatile *Target,void *Value) {
1583     return __sync_lock_test_and_set(Target, Value);
1584 }
1585 #endif
1586 #define __INTRINSIC_DEFINED__InterlockedExchangePointer
1587 #endif /* __INTRINSIC_PROLOG */
1588 
1589 #endif /* defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) || defined(__arm__) || defined(_ARM_) || defined(__aarch64__) || defined(_ARM64_) */
1590 
1591 #if defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_)
1592 
1593 #if __INTRINSIC_PROLOG(__int2c)
1594 void __int2c(void);
1595 #if !__has_builtin(__int2c)
1596 __INTRINSICS_USEINLINE
__int2c(void)1597 void __int2c(void) {
1598     __buildint(0x2c);
1599 }
1600 #endif
1601 #define __INTRINSIC_DEFINED___int2c
1602 #endif /* __INTRINSIC_PROLOG */
1603 
1604 #if __INTRINSIC_PROLOG(__stosb)
1605 void __stosb(unsigned char *, unsigned char, size_t);
1606 #if !__has_builtin(__stosb)
1607 __INTRINSICS_USEINLINE
1608 __buildstos(__stosb, unsigned char, "b|b")
1609 #endif
1610 #define __INTRINSIC_DEFINED___stosb
1611 #endif /* __INTRINSIC_PROLOG */
1612 
1613 #if __INTRINSIC_PROLOG(__stosw)
1614 void __stosw(unsigned short *, unsigned short, size_t);
1615 #if !__has_builtin(__stosw)
1616 __INTRINSICS_USEINLINE
1617 __buildstos(__stosw, unsigned short, "w|w")
1618 #endif
1619 #define __INTRINSIC_DEFINED___stosw
1620 #endif /* __INTRINSIC_PROLOG */
1621 
1622 #if __INTRINSIC_PROLOG(__stosd)
1623 void __stosd(unsigned __LONG32 *, unsigned __LONG32, size_t);
1624 #if !__has_builtin(__stosd)
1625 __INTRINSICS_USEINLINE
1626 __buildstos(__stosd, unsigned __LONG32, "l|d")
1627 #endif
1628 #define __INTRINSIC_DEFINED___stosd
1629 #endif /* __INTRINSIC_PROLOG */
1630 
1631 #if __INTRINSIC_PROLOG(_interlockedbittestandset)
1632 unsigned char _interlockedbittestandset(__LONG32 volatile *a, __LONG32 b);
1633 #if !__has_builtin(_interlockedbittestandset)
1634 __INTRINSICS_USEINLINE
1635 __buildbittesti(_interlockedbittestandset, __LONG32, "lock bts{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
1636 #endif
1637 #define __INTRINSIC_DEFINED__interlockedbittestandset
1638 #endif /* __INTRINSIC_PROLOG */
1639 
1640 #if __INTRINSIC_PROLOG(_interlockedbittestandreset)
1641 unsigned char _interlockedbittestandreset(__LONG32 volatile *a, __LONG32 b);
1642 #if !__has_builtin(_interlockedbittestandreset)
1643 __INTRINSICS_USEINLINE
1644 __buildbittesti(_interlockedbittestandreset, __LONG32, "lock btr{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
1645 #endif
1646 #define __INTRINSIC_DEFINED__interlockedbittestandreset
1647 #endif /* __INTRINSIC_PROLOG */
1648 
1649 #if __INTRINSIC_PROLOG(_interlockedbittestandcomplement)
1650 unsigned char _interlockedbittestandcomplement(__LONG32 volatile *a, __LONG32 b);
1651 #if !__has_builtin(_interlockedbittestandcomplement)
1652 __INTRINSICS_USEINLINE
1653 __buildbittesti(_interlockedbittestandcomplement, __LONG32, "lock btc{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
1654 #endif
1655 #define __INTRINSIC_DEFINED__interlockedbittestandcomplement
1656 #endif /* __INTRINSIC_PROLOG */
1657 
1658 #if __INTRINSIC_PROLOG(InterlockedBitTestAndSet)
1659 unsigned char InterlockedBitTestAndSet(volatile __LONG32 *a, __LONG32 b);
1660 #if !__has_builtin(InterlockedBitTestAndSet)
1661 __INTRINSICS_USEINLINE
1662 __buildbittesti(InterlockedBitTestAndSet, __LONG32, "lock bts{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
1663 #endif
1664 #define __INTRINSIC_DEFINED_InterlockedBitTestAndSet
1665 #endif /* __INTRINSIC_PROLOG */
1666 
1667 #if __INTRINSIC_PROLOG(InterlockedBitTestAndReset)
1668 unsigned char InterlockedBitTestAndReset(volatile __LONG32 *a, __LONG32 b);
1669 #if !__has_builtin(InterlockedBitTestAndReset)
1670 __INTRINSICS_USEINLINE
1671 __buildbittesti(InterlockedBitTestAndReset, __LONG32, "lock btr{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
1672 #endif
1673 #define __INTRINSIC_DEFINED_InterlockedBitTestAndReset
1674 #endif /* __INTRINSIC_PROLOG */
1675 
1676 #if __INTRINSIC_PROLOG(InterlockedBitTestAndComplement)
1677 unsigned char InterlockedBitTestAndComplement(volatile __LONG32 *a, __LONG32 b);
1678 #if !__has_builtin(InterlockedBitTestAndComplement)
1679 __INTRINSICS_USEINLINE
1680 __buildbittesti(InterlockedBitTestAndComplement, __LONG32, "lock btc{l %[Offset],%[Base] | %[Base],%[Offset]}" __FLAGSET, "I")
1681 #endif
1682 #define __INTRINSIC_DEFINED_InterlockedBitTestAndComplement
1683 #endif /* __INTRINSIC_PROLOG */
1684 
1685 #if __INTRINSIC_PROLOG(_BitScanForward)
1686 unsigned char _BitScanForward(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
1687 #if !__has_builtin(_BitScanForward)
1688 __INTRINSICS_USEINLINE
1689 __buildbitscan(_BitScanForward, unsigned __LONG32, "bsf{l %[Mask],%[Index] | %[Index],%[Mask]}")
1690 #endif
1691 #define __INTRINSIC_DEFINED__BitScanForward
1692 #endif /* __INTRINSIC_PROLOG */
1693 
1694 #if __INTRINSIC_PROLOG(_BitScanReverse)
1695 unsigned char _BitScanReverse(unsigned __LONG32 *Index, unsigned __LONG32 Mask);
1696 #if !__has_builtin(_BitScanReverse)
1697 __INTRINSICS_USEINLINE
1698 __buildbitscan(_BitScanReverse, unsigned __LONG32, "bsr{l %[Mask],%[Index] | %[Index],%[Mask]}")
1699 #endif
1700 #define __INTRINSIC_DEFINED__BitScanReverse
1701 #endif /* __INTRINSIC_PROLOG */
1702 
1703 #if __INTRINSIC_PROLOG(_bittest)
1704 unsigned char _bittest(__LONG32 const *a, __LONG32 b);
1705 #if !__has_builtin(_bittest)
1706 __INTRINSICS_USEINLINE
1707 __buildbittest(_bittest, __LONG32, "l", "I")
1708 #endif
1709 #define __INTRINSIC_DEFINED__bittest
1710 #endif /* __INTRINSIC_PROLOG */
1711 
1712 #if __INTRINSIC_PROLOG(_bittestandset)
1713 unsigned char _bittestandset(__LONG32 *a, __LONG32 b);
1714 #if !__has_builtin(_bittestandset)
1715 __INTRINSICS_USEINLINE
1716 __buildbittestand(_bittestandset, __LONG32, "bts", "I", "l")
1717 #endif
1718 #define __INTRINSIC_DEFINED__bittestandset
1719 #endif /* __INTRINSIC_PROLOG */
1720 
1721 #if __INTRINSIC_PROLOG(_bittestandreset)
1722 unsigned char _bittestandreset(__LONG32 *a, __LONG32 b);
1723 #if !__has_builtin(_bittestandreset)
1724 __INTRINSICS_USEINLINE
1725 __buildbittestand(_bittestandreset, __LONG32, "btr", "I", "l")
1726 #endif
1727 #define __INTRINSIC_DEFINED__bittestandreset
1728 #endif /* __INTRINSIC_PROLOG */
1729 
1730 #if __INTRINSIC_PROLOG(_bittestandcomplement)
1731 unsigned char _bittestandcomplement(__LONG32 *a, __LONG32 b);
1732 #if !__has_builtin(_bittestandcomplement)
1733 __INTRINSICS_USEINLINE
1734 __buildbittestand(_bittestandcomplement, __LONG32, "btc", "I", "l")
1735 #endif
1736 #define __INTRINSIC_DEFINED__bittestandcomplement
1737 #endif /* __INTRINSIC_PROLOG */
1738 
1739 #if __INTRINSIC_PROLOG(__inbyte)
1740 unsigned char __inbyte(unsigned short Port);
1741 #if !__has_builtin(__inbyte)
1742 __INTRINSICS_USEINLINE
1743 __build_inport(__inbyte, unsigned char, "b")
1744 #endif
1745 #define __INTRINSIC_DEFINED___inbyte
1746 #endif /* __INTRINSIC_PROLOG */
1747 
1748 #if __INTRINSIC_PROLOG(__inword)
1749 unsigned short __inword(unsigned short Port);
1750 #if !__has_builtin(__inword)
1751 __INTRINSICS_USEINLINE
1752 __build_inport(__inword, unsigned short, "w")
1753 #endif
1754 #define __INTRINSIC_DEFINED___inword
1755 #endif /* __INTRINSIC_PROLOG */
1756 
1757 #if __INTRINSIC_PROLOG(__indword)
1758 unsigned __LONG32 __indword(unsigned short Port);
1759 #if !__has_builtin(__indword)
1760 __INTRINSICS_USEINLINE
1761 __build_inport(__indword, unsigned __LONG32, "l")
1762 #endif
1763 #define __INTRINSIC_DEFINED___indword
1764 #endif /* __INTRINSIC_PROLOG */
1765 
1766 #if __INTRINSIC_PROLOG(__outbyte)
1767 void __outbyte(unsigned short Port, unsigned char Data);
1768 #if !__has_builtin(__outbyte)
1769 __INTRINSICS_USEINLINE
1770 __build_outport(__outbyte, unsigned char, "b")
1771 #endif
1772 #define __INTRINSIC_DEFINED___outbyte
1773 #endif /* __INTRINSIC_PROLOG */
1774 
1775 #if __INTRINSIC_PROLOG(__outword)
1776 void __outword(unsigned short Port, unsigned short Data);
1777 #if !__has_builtin(__outword)
1778 __INTRINSICS_USEINLINE
1779 __build_outport(__outword, unsigned short, "w")
1780 #endif
1781 #define __INTRINSIC_DEFINED___outword
1782 #endif /* __INTRINSIC_PROLOG */
1783 
1784 #if __INTRINSIC_PROLOG(__outdword)
1785 void __outdword(unsigned short Port, unsigned __LONG32 Data);
1786 #if !__has_builtin(__outdword)
1787 __INTRINSICS_USEINLINE
1788 __build_outport(__outdword, unsigned __LONG32, "l")
1789 #endif
1790 #define __INTRINSIC_DEFINED___outdword
1791 #endif /* __INTRINSIC_PROLOG */
1792 
1793 #if __INTRINSIC_PROLOG(__inbytestring)
1794 void __inbytestring(unsigned short Port, unsigned char *Buffer, unsigned __LONG32 Count);
1795 #if !__has_builtin(__inbytestring)
1796 __INTRINSICS_USEINLINE
1797 __build_inportstring(__inbytestring, unsigned char, "b", "b")
1798 #endif
1799 #define __INTRINSIC_DEFINED___inbytestring
1800 #endif /* __INTRINSIC_PROLOG */
1801 
1802 #if __INTRINSIC_PROLOG(__inwordstring)
1803 void __inwordstring(unsigned short Port, unsigned short *Buffer, unsigned __LONG32 Count);
1804 #if !__has_builtin(__inwordstring)
1805 __INTRINSICS_USEINLINE
1806 __build_inportstring(__inwordstring, unsigned short, "w", "w")
1807 #endif
1808 #define __INTRINSIC_DEFINED___inwordstring
1809 #endif /* __INTRINSIC_PROLOG */
1810 
1811 #if __INTRINSIC_PROLOG(__indwordstring)
1812 void __indwordstring(unsigned short Port, unsigned __LONG32 *Buffer, unsigned __LONG32 Count);
1813 #if !__has_builtin(__indwordstring)
1814 __INTRINSICS_USEINLINE
1815 __build_inportstring(__indwordstring, unsigned __LONG32, "l", "d")
1816 #endif
1817 #define __INTRINSIC_DEFINED___indwordstring
1818 #endif /* __INTRINSIC_PROLOG */
1819 
1820 #if __INTRINSIC_PROLOG(__outbytestring)
1821 void __outbytestring(unsigned short Port, unsigned char *Buffer, unsigned __LONG32 Count);
1822 #if !__has_builtin(__outbytestring)
1823 __INTRINSICS_USEINLINE
1824 __build_outportstring(__outbytestring, unsigned char, "b", "b")
1825 #endif
1826 #define __INTRINSIC_DEFINED___outbytestring
1827 #endif /* __INTRINSIC_PROLOG */
1828 
1829 #if __INTRINSIC_PROLOG(__outwordstring)
1830 void __outwordstring(unsigned short Port, unsigned short *Buffer, unsigned __LONG32 Count);
1831 #if !__has_builtin(__outwordstring)
1832 __INTRINSICS_USEINLINE
1833 __build_outportstring(__outwordstring, unsigned short, "w", "w")
1834 #endif
1835 #define __INTRINSIC_DEFINED___outwordstring
1836 #endif /* __INTRINSIC_PROLOG */
1837 
1838 #if __INTRINSIC_PROLOG(__outdwordstring)
1839 void __outdwordstring(unsigned short Port, unsigned __LONG32 *Buffer, unsigned __LONG32 Count);
1840 #if !__has_builtin(__outdwordstring)
1841 __INTRINSICS_USEINLINE
1842 __build_outportstring(__outdwordstring, unsigned __LONG32, "l", "d")
1843 #endif
1844 #define __INTRINSIC_DEFINED___outdwordstring
1845 #endif /* __INTRINSIC_PROLOG */
1846 
1847 #if __INTRINSIC_PROLOG(__cpuid)
1848 void __cpuid(int CPUInfo[4], int InfoType);
1849 #if !__has_builtin(__cpuid)
1850 __INTRINSICS_USEINLINE
__cpuid(int CPUInfo[4],int InfoType)1851 void __cpuid(int CPUInfo[4], int InfoType) {
1852    __asm__ __volatile__ (
1853       "cpuid"
1854       : "=a" (CPUInfo [0]), "=b" (CPUInfo [1]), "=c" (CPUInfo [2]), "=d" (CPUInfo [3])
1855       : "a" (InfoType));
1856 }
1857 #endif
1858 #define __INTRINSIC_DEFINED___cpuid
1859 #endif /* __INTRINSIC_PROLOG */
1860 
1861 #if __INTRINSIC_PROLOG(__cpuidex)
1862 void __cpuidex(int CPUInfo[4], int, int);
1863 #if !__has_builtin(__cpuidex)
1864 __INTRINSICS_USEINLINE
__cpuidex(int CPUInfo[4],int function_id,int subfunction_id)1865 void __cpuidex(int CPUInfo[4], int function_id, int subfunction_id) {
1866    __asm__ __volatile__ (
1867       "cpuid"
1868       : "=a" (CPUInfo [0]), "=b" (CPUInfo [1]), "=c" (CPUInfo [2]), "=d" (CPUInfo [3])
1869       : "a" (function_id), "c" (subfunction_id));
1870 }
1871 #endif
1872 #define __INTRINSIC_DEFINED___cpuidex
1873 #endif /* __INTRINSIC_PROLOG */
1874 
1875 #if __INTRINSIC_PROLOG(__readmsr)
1876 __MINGW_EXTENSION unsigned __int64 __readmsr(unsigned __LONG32);
1877 #if !__has_builtin(__readmsr)
1878 __INTRINSICS_USEINLINE
__readmsr(unsigned __LONG32 msr)1879 unsigned __int64 __readmsr(unsigned __LONG32 msr)
1880 {
1881 #if defined(__x86_64__) || defined(_AMD64_)
1882    unsigned __int64 val1, val2;
1883 #else
1884    unsigned __LONG32 val1, val2;
1885 #endif /* defined(__x86_64__) || defined(_AMD64_) */
1886 
1887    __asm__ __volatile__(
1888       "rdmsr"
1889       : "=a" (val1), "=d" (val2)
1890       : "c" (msr));
1891 
1892    return ((unsigned __int64) val1) | (((unsigned __int64)val2) << 32);
1893 }
1894 #endif
1895 #define __INTRINSIC_DEFINED___readmsr
1896 #endif /* __INTRINSIC_PROLOG */
1897 
1898 #if __INTRINSIC_PROLOG(__writemsr)
1899 __MINGW_EXTENSION void __writemsr(unsigned __LONG32, unsigned __int64);
1900 #if !__has_builtin(__writemsr)
1901 __INTRINSICS_USEINLINE
__writemsr(unsigned __LONG32 msr,unsigned __int64 Value)1902 void __writemsr(unsigned __LONG32 msr, unsigned __int64 Value)
1903 {
1904    unsigned __LONG32 val1 = Value, val2 = Value >> 32;
1905    __asm__ __volatile__ (
1906       "wrmsr"
1907       :
1908       : "c" (msr), "a" (val1), "d" (val2));
1909 }
1910 #endif
1911 #define __INTRINSIC_DEFINED___writemsr
1912 #endif /* __INTRINSIC_PROLOG */
1913 
1914 #if __INTRINSIC_PROLOG(__movsb)
1915 void __movsb(unsigned char *Destination, unsigned char const *Source, size_t Count);
1916 #if !__has_builtin(__movsb)
1917 __INTRINSICS_USEINLINE
1918 __buildmov(__movsb, unsigned char, "b")
1919 #endif
1920 #define __INTRINSIC_DEFINED___movsb
1921 #endif /* __INTRINSIC_PROLOG */
1922 
1923 #if __INTRINSIC_PROLOG(__movsw)
1924 void __movsw(unsigned short *Dest, unsigned short const *Source, size_t Count);
1925 #if !__has_builtin(__movsw)
1926 __INTRINSICS_USEINLINE
1927 __buildmov(__movsw, unsigned short, "w")
1928 #endif
1929 #define __INTRINSIC_DEFINED___movsw
1930 #endif /* __INTRINSIC_PROLOG */
1931 
1932 #if __INTRINSIC_PROLOG(__movsd)
1933 void __movsd(unsigned __LONG32 *Dest, unsigned __LONG32 const *Source, size_t Count);
1934 #if !__has_builtin(__movsd)
1935 __INTRINSICS_USEINLINE
1936 __buildmov(__movsd, unsigned __LONG32, "d")
1937 #endif
1938 #define __INTRINSIC_DEFINED___movsd
1939 #endif /* __INTRINSIC_PROLOG */
1940 
1941 /* GCC 8 has already defined _xgetbv, Clang 9 has _xgetbv defined as a macro
1942  * redirecting to the __builtin_ia32_xgetbv builtin. */
1943 #if (!defined(__GNUC__) || __GNUC__ < 8) && !defined(_xgetbv)
1944 /* NOTE: This should be in immintrin.h */
1945 #if __INTRINSIC_PROLOG(_xgetbv)
1946 unsigned __int64 _xgetbv(unsigned int);
1947 #if !__has_builtin(_xgetbv)
1948 __INTRINSICS_USEINLINE
_xgetbv(unsigned int index)1949 unsigned __int64 _xgetbv(unsigned int index)
1950 {
1951 #if defined(__x86_64__) || defined(_AMD64_)
1952    unsigned __int64 val1, val2;
1953 #else
1954    unsigned __LONG32 val1, val2;
1955 #endif /* defined(__x86_64__) || defined(_AMD64_) */
1956 
1957    __asm__ __volatile__(
1958       "xgetbv"
1959       : "=a" (val1), "=d" (val2)
1960       : "c" (index));
1961 
1962    return (((unsigned __int64)val2) << 32) | val1;
1963 }
1964 #endif
1965 #define __INTRINSIC_DEFINED__xgetbv
1966 #endif /* __INTRINSIC_PROLOG */
1967 #endif /* __GNUC__ < 8 */
1968 
1969 #endif /* defined(__x86_64__) || defined(_AMD64_) || defined(__i386__) || defined(_X86_) */
1970 
1971 /* ***************************************************** */
1972 
1973 #if defined(__i386__) || defined(_X86_)
1974 
1975 #if __INTRINSIC_PROLOG(__readfsbyte)
1976 unsigned char __readfsbyte(unsigned __LONG32 Offset);
1977 #if !__has_builtin(__readfsbyte)
1978 __INTRINSICS_USEINLINE
1979 __buildreadseg(__readfsbyte, unsigned char, "fs", "b")
1980 #endif
1981 #define __INTRINSIC_DEFINED___readfsbyte
1982 #endif /* __INTRINSIC_PROLOG */
1983 
1984 #if __INTRINSIC_PROLOG(__readfsword)
1985 unsigned short __readfsword(unsigned __LONG32 Offset);
1986 #if !__has_builtin(__readfsword)
1987 __INTRINSICS_USEINLINE
1988 __buildreadseg(__readfsword, unsigned short, "fs", "w")
1989 #endif
1990 #define __INTRINSIC_DEFINED___readfsword
1991 #endif /* __INTRINSIC_PROLOG */
1992 
1993 #if __INTRINSIC_PROLOG(__readfsdword)
1994 unsigned __LONG32 __readfsdword(unsigned __LONG32 Offset);
1995 #if !__has_builtin(__readfsdword)
1996 __INTRINSICS_USEINLINE
1997 __buildreadseg(__readfsdword, unsigned __LONG32, "fs", "l")
1998 #endif
1999 #define __INTRINSIC_DEFINED___readfsdword
2000 #endif /* __INTRINSIC_PROLOG */
2001 
2002 #if __INTRINSIC_PROLOG(__writefsbyte)
2003 void __writefsbyte(unsigned __LONG32 Offset,unsigned char Data);
2004 #if !__has_builtin(__writefsbyte)
2005 __INTRINSICS_USEINLINE
2006 __buildwriteseg(__writefsbyte, unsigned char, "fs", "b")
2007 #endif
2008 #define __INTRINSIC_DEFINED___writefsbyte
2009 #endif /* __INTRINSIC_PROLOG */
2010 
2011 #if __INTRINSIC_PROLOG(__writefsword)
2012 void __writefsword(unsigned __LONG32 Offset,unsigned short Data);
2013 #if !__has_builtin(__writefsword)
2014 __INTRINSICS_USEINLINE
2015 __buildwriteseg(__writefsword, unsigned short, "fs", "w")
2016 #endif
2017 #define __INTRINSIC_DEFINED___writefsword
2018 #endif /* __INTRINSIC_PROLOG */
2019 
2020 #if __INTRINSIC_PROLOG(__writefsdword)
2021 void __writefsdword(unsigned __LONG32 Offset,unsigned __LONG32 Data);
2022 #if !__has_builtin(__writefsdword)
2023 __INTRINSICS_USEINLINE
2024 __buildwriteseg(__writefsdword, unsigned __LONG32, "fs", "l")
2025 #endif
2026 #define __INTRINSIC_DEFINED___writefsdword
2027 #endif /* __INTRINSIC_PROLOG */
2028 
2029 #if __INTRINSIC_PROLOG(__readcr0)
2030 unsigned __LONG32 __readcr0(void);
2031 #if !__has_builtin(__readcr0)
2032 __INTRINSICS_USEINLINE
2033 __build_readcr(__readcr0, unsigned __LONG32, "0")
2034 #endif
2035 #define __INTRINSIC_DEFINED___readcr0
2036 #endif /* __INTRINSIC_PROLOG */
2037 
2038 #if __INTRINSIC_PROLOG(__readcr2)
2039 unsigned __LONG32 __readcr2(void);
2040 #if !__has_builtin(__readcr2)
2041 __INTRINSICS_USEINLINE
2042 __build_readcr(__readcr2, unsigned __LONG32, "2")
2043 #endif
2044 #define __INTRINSIC_DEFINED___readcr2
2045 #endif /* __INTRINSIC_PROLOG */
2046 
2047 #if __INTRINSIC_PROLOG(__readcr3)
2048 unsigned __LONG32 __readcr3(void);
2049 #if !__has_builtin(__readcr3)
2050 __INTRINSICS_USEINLINE
2051 __build_readcr(__readcr3, unsigned __LONG32, "3")
2052 #endif
2053 #define __INTRINSIC_DEFINED___readcr3
2054 #endif /* __INTRINSIC_PROLOG */
2055 
2056 #if __INTRINSIC_PROLOG(__readcr4)
2057 unsigned __LONG32 __readcr4(void);
2058 #if !__has_builtin(__readcr4)
2059 __INTRINSICS_USEINLINE
2060 __build_readcr(__readcr4, unsigned __LONG32, "4")
2061 #endif
2062 #define __INTRINSIC_DEFINED___readcr4
2063 #endif /* __INTRINSIC_PROLOG */
2064 
2065 #if __INTRINSIC_PROLOG(__readcr8)
2066 unsigned __LONG32 __readcr8(void);
2067 #if !__has_builtin(__readcr8)
2068 __INTRINSICS_USEINLINE
2069 __build_readcr(__readcr8, unsigned __LONG32, "8")
2070 #endif
2071 #define __INTRINSIC_DEFINED___readcr8
2072 #endif /* __INTRINSIC_PROLOG */
2073 
2074 #if __INTRINSIC_PROLOG(__writecr0)
2075 void __writecr0(unsigned __LONG32);
2076 #if !__has_builtin(__writecr0)
2077 __INTRINSICS_USEINLINE
2078 __build_writecr(__writecr0, unsigned __LONG32, "0")
2079 #endif
2080 #define __INTRINSIC_DEFINED___writecr0
2081 #endif /* __INTRINSIC_PROLOG */
2082 
2083 #if __INTRINSIC_PROLOG(__writecr3)
2084 void __writecr3(unsigned __LONG32);
2085 #if !__has_builtin(__writecr3)
2086 __INTRINSICS_USEINLINE
2087 __build_writecr(__writecr3, unsigned __LONG32, "3")
2088 #endif
2089 #define __INTRINSIC_DEFINED___writecr3
2090 #endif /* __INTRINSIC_PROLOG */
2091 
2092 #if __INTRINSIC_PROLOG(__writecr4)
2093 void __writecr4(unsigned __LONG32);
2094 #if !__has_builtin(__writecr4)
2095 __INTRINSICS_USEINLINE
2096 __build_writecr(__writecr4, unsigned __LONG32, "4")
2097 #endif
2098 #define __INTRINSIC_DEFINED___writecr4
2099 #endif /* __INTRINSIC_PROLOG */
2100 
2101 #if __INTRINSIC_PROLOG(__writecr8)
2102 void __writecr8(unsigned __LONG32);
2103 #if !__has_builtin(__writecr8)
2104 __INTRINSICS_USEINLINE
2105 __build_writecr(__writecr8, unsigned __LONG32, "8")
2106 #endif
2107 #define __INTRINSIC_DEFINED___writecr8
2108 #endif /* __INTRINSIC_PROLOG */
2109 
2110 #endif /* defined(__i386__) || defined(_X86_) */
2111 
2112 #ifdef __cplusplus
2113 }
2114 #endif
2115 
2116 #undef __INTRINSIC_ONLYSPECIAL
2117 #undef __INTRINSIC_PROLOG
2118 #undef __INTRINSIC_EPILOG
2119 #undef __INTRINSICS_USEINLINE
2120 #undef __FLAGCONSTRAINT
2121 #undef __FLAGSET
2122 #undef __FLAGCLOBBER1
2123 #undef __FLAGCLOBBER2
2124 
2125 #pragma pop_macro("__has_builtin")
2126 
2127 #endif /* __MINGW_INTRIN_INLINE */
2128