1 /*===---- bmiintrin.h - BMI intrinsics -------------------------------------===
2 *
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
9 *
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
12 *
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 * THE SOFTWARE.
20 *
21 *===-----------------------------------------------------------------------===
22 */
23
24 #if !defined __X86INTRIN_H && !defined __IMMINTRIN_H
25 #error "Never use <bmiintrin.h> directly; include <x86intrin.h> instead."
26 #endif
27
28 #ifndef __BMIINTRIN_H
29 #define __BMIINTRIN_H
30
31 #define _tzcnt_u16(a) (__tzcnt_u16((a)))
32 #define _andn_u32(a, b) (__andn_u32((a), (b)))
33 /* _bextr_u32 != __bextr_u32 */
34 #define _blsi_u32(a) (__blsi_u32((a)))
35 #define _blsmsk_u32(a) (__blsmsk_u32((a)))
36 #define _blsr_u32(a) (__blsr_u32((a)))
37 #define _tzcnt_u32(a) (__tzcnt_u32((a)))
38
39 /* Define the default attributes for the functions in this file. */
40 #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__, __target__("bmi")))
41
42 /* Allow using the tzcnt intrinsics even for non-BMI targets. Since the TZCNT
43 instruction behaves as BSF on non-BMI targets, there is code that expects
44 to use it as a potentially faster version of BSF. */
45 #define __RELAXED_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
46
47 static __inline__ unsigned short __RELAXED_FN_ATTRS
__tzcnt_u16(unsigned short __X)48 __tzcnt_u16(unsigned short __X)
49 {
50 return __X ? __builtin_ctzs(__X) : 16;
51 }
52
53 static __inline__ unsigned int __DEFAULT_FN_ATTRS
__andn_u32(unsigned int __X,unsigned int __Y)54 __andn_u32(unsigned int __X, unsigned int __Y)
55 {
56 return ~__X & __Y;
57 }
58
59 /* AMD-specified, double-leading-underscore version of BEXTR */
60 static __inline__ unsigned int __DEFAULT_FN_ATTRS
__bextr_u32(unsigned int __X,unsigned int __Y)61 __bextr_u32(unsigned int __X, unsigned int __Y)
62 {
63 return __builtin_ia32_bextr_u32(__X, __Y);
64 }
65
66 /* Intel-specified, single-leading-underscore version of BEXTR */
67 static __inline__ unsigned int __DEFAULT_FN_ATTRS
_bextr_u32(unsigned int __X,unsigned int __Y,unsigned int __Z)68 _bextr_u32(unsigned int __X, unsigned int __Y, unsigned int __Z)
69 {
70 return __builtin_ia32_bextr_u32 (__X, ((__Y & 0xff) | ((__Z & 0xff) << 8)));
71 }
72
73 static __inline__ unsigned int __DEFAULT_FN_ATTRS
__blsi_u32(unsigned int __X)74 __blsi_u32(unsigned int __X)
75 {
76 return __X & -__X;
77 }
78
79 static __inline__ unsigned int __DEFAULT_FN_ATTRS
__blsmsk_u32(unsigned int __X)80 __blsmsk_u32(unsigned int __X)
81 {
82 return __X ^ (__X - 1);
83 }
84
85 static __inline__ unsigned int __DEFAULT_FN_ATTRS
__blsr_u32(unsigned int __X)86 __blsr_u32(unsigned int __X)
87 {
88 return __X & (__X - 1);
89 }
90
91 static __inline__ unsigned int __RELAXED_FN_ATTRS
__tzcnt_u32(unsigned int __X)92 __tzcnt_u32(unsigned int __X)
93 {
94 return __X ? __builtin_ctz(__X) : 32;
95 }
96
97 #ifdef __x86_64__
98
99 #define _andn_u64(a, b) (__andn_u64((a), (b)))
100 /* _bextr_u64 != __bextr_u64 */
101 #define _blsi_u64(a) (__blsi_u64((a)))
102 #define _blsmsk_u64(a) (__blsmsk_u64((a)))
103 #define _blsr_u64(a) (__blsr_u64((a)))
104 #define _tzcnt_u64(a) (__tzcnt_u64((a)))
105
106 static __inline__ unsigned long long __DEFAULT_FN_ATTRS
__andn_u64(unsigned long long __X,unsigned long long __Y)107 __andn_u64 (unsigned long long __X, unsigned long long __Y)
108 {
109 return ~__X & __Y;
110 }
111
112 /* AMD-specified, double-leading-underscore version of BEXTR */
113 static __inline__ unsigned long long __DEFAULT_FN_ATTRS
__bextr_u64(unsigned long long __X,unsigned long long __Y)114 __bextr_u64(unsigned long long __X, unsigned long long __Y)
115 {
116 return __builtin_ia32_bextr_u64(__X, __Y);
117 }
118
119 /* Intel-specified, single-leading-underscore version of BEXTR */
120 static __inline__ unsigned long long __DEFAULT_FN_ATTRS
_bextr_u64(unsigned long long __X,unsigned int __Y,unsigned int __Z)121 _bextr_u64(unsigned long long __X, unsigned int __Y, unsigned int __Z)
122 {
123 return __builtin_ia32_bextr_u64 (__X, ((__Y & 0xff) | ((__Z & 0xff) << 8)));
124 }
125
126 static __inline__ unsigned long long __DEFAULT_FN_ATTRS
__blsi_u64(unsigned long long __X)127 __blsi_u64(unsigned long long __X)
128 {
129 return __X & -__X;
130 }
131
132 static __inline__ unsigned long long __DEFAULT_FN_ATTRS
__blsmsk_u64(unsigned long long __X)133 __blsmsk_u64(unsigned long long __X)
134 {
135 return __X ^ (__X - 1);
136 }
137
138 static __inline__ unsigned long long __DEFAULT_FN_ATTRS
__blsr_u64(unsigned long long __X)139 __blsr_u64(unsigned long long __X)
140 {
141 return __X & (__X - 1);
142 }
143
144 static __inline__ unsigned long long __RELAXED_FN_ATTRS
__tzcnt_u64(unsigned long long __X)145 __tzcnt_u64(unsigned long long __X)
146 {
147 return __X ? __builtin_ctzll(__X) : 64;
148 }
149
150 #endif /* __x86_64__ */
151
152 #undef __DEFAULT_FN_ATTRS
153 #undef __RELAXED_FN_ATTRS
154
155 #endif /* __BMIINTRIN_H */
156