1 /**************************************************************************
2 *
3 * Copyright 2008 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28
29 #ifndef BITSCAN_H
30 #define BITSCAN_H
31
32 #include <assert.h>
33 #include <stdint.h>
34 #include <stdbool.h>
35 #include <string.h>
36
37 #if defined(_MSC_VER)
38 #include <intrin.h>
39 #endif
40
41 #if defined(__POPCNT__)
42 #include <popcntintrin.h>
43 #endif
44
45 #include "c99_compat.h"
46
47 #ifdef __cplusplus
48 extern "C" {
49 #endif
50
51
52 /**
53 * Find first bit set in word. Least significant bit is 1.
54 * Return 0 if no bits set.
55 */
56 #ifdef HAVE___BUILTIN_FFS
57 #define ffs __builtin_ffs
58 #elif defined(_MSC_VER) && (_M_IX86 || _M_ARM || _M_AMD64 || _M_IA64)
59 static inline
60 int ffs(int i)
61 {
62 unsigned long index;
63 if (_BitScanForward(&index, i))
64 return index + 1;
65 else
66 return 0;
67 }
68 #else
69 extern
70 int ffs(int i);
71 #endif
72
73 #ifdef HAVE___BUILTIN_FFSLL
74 #define ffsll __builtin_ffsll
75 #elif defined(_MSC_VER) && (_M_AMD64 || _M_ARM64 || _M_IA64)
76 static inline int
77 ffsll(long long int i)
78 {
79 unsigned long index;
80 if (_BitScanForward64(&index, i))
81 return index + 1;
82 else
83 return 0;
84 }
85 #else
86 extern int
87 ffsll(long long int val);
88 #endif
89
90
91 /* Destructively loop over all of the bits in a mask as in:
92 *
93 * while (mymask) {
94 * int i = u_bit_scan(&mymask);
95 * ... process element i
96 * }
97 *
98 */
99 static inline int
u_bit_scan(unsigned * mask)100 u_bit_scan(unsigned *mask)
101 {
102 const int i = ffs(*mask) - 1;
103 *mask ^= (1u << i);
104 return i;
105 }
106
107 static inline int
u_bit_scan64(uint64_t * mask)108 u_bit_scan64(uint64_t *mask)
109 {
110 const int i = ffsll(*mask) - 1;
111 *mask ^= (((uint64_t)1) << i);
112 return i;
113 }
114
115 /* Determine if an unsigned value is a power of two.
116 *
117 * \note
118 * Zero is treated as a power of two.
119 */
120 static inline bool
util_is_power_of_two_or_zero(unsigned v)121 util_is_power_of_two_or_zero(unsigned v)
122 {
123 return (v & (v - 1)) == 0;
124 }
125
126 /* Determine if an uint64_t value is a power of two.
127 *
128 * \note
129 * Zero is treated as a power of two.
130 */
131 static inline bool
util_is_power_of_two_or_zero64(uint64_t v)132 util_is_power_of_two_or_zero64(uint64_t v)
133 {
134 return (v & (v - 1)) == 0;
135 }
136
137 /* Determine if an unsigned value is a power of two.
138 *
139 * \note
140 * Zero is \b not treated as a power of two.
141 */
142 static inline bool
util_is_power_of_two_nonzero(unsigned v)143 util_is_power_of_two_nonzero(unsigned v)
144 {
145 /* __POPCNT__ is different from HAVE___BUILTIN_POPCOUNT. The latter
146 * indicates the existence of the __builtin_popcount function. The former
147 * indicates that _mm_popcnt_u32 exists and is a native instruction.
148 *
149 * The other alternative is to use SSE 4.2 compile-time flags. This has
150 * two drawbacks. First, there is currently no build infrastructure for
151 * SSE 4.2 (only 4.1), so that would have to be added. Second, some AMD
152 * CPUs support POPCNT but not SSE 4.2 (e.g., Barcelona).
153 */
154 #ifdef __POPCNT__
155 return _mm_popcnt_u32(v) == 1;
156 #else
157 return v != 0 && (v & (v - 1)) == 0;
158 #endif
159 }
160
161 /* For looping over a bitmask when you want to loop over consecutive bits
162 * manually, for example:
163 *
164 * while (mask) {
165 * int start, count, i;
166 *
167 * u_bit_scan_consecutive_range(&mask, &start, &count);
168 *
169 * for (i = 0; i < count; i++)
170 * ... process element (start+i)
171 * }
172 */
173 static inline void
u_bit_scan_consecutive_range(unsigned * mask,int * start,int * count)174 u_bit_scan_consecutive_range(unsigned *mask, int *start, int *count)
175 {
176 if (*mask == 0xffffffff) {
177 *start = 0;
178 *count = 32;
179 *mask = 0;
180 return;
181 }
182 *start = ffs(*mask) - 1;
183 *count = ffs(~(*mask >> *start)) - 1;
184 *mask &= ~(((1u << *count) - 1) << *start);
185 }
186
187 static inline void
u_bit_scan_consecutive_range64(uint64_t * mask,int * start,int * count)188 u_bit_scan_consecutive_range64(uint64_t *mask, int *start, int *count)
189 {
190 if (*mask == ~0ull) {
191 *start = 0;
192 *count = 64;
193 *mask = 0;
194 return;
195 }
196 *start = ffsll(*mask) - 1;
197 *count = ffsll(~(*mask >> *start)) - 1;
198 *mask &= ~(((((uint64_t)1) << *count) - 1) << *start);
199 }
200
201
202 /**
203 * Find last bit set in a word. The least significant bit is 1.
204 * Return 0 if no bits are set.
205 * Essentially ffs() in the reverse direction.
206 */
207 static inline unsigned
util_last_bit(unsigned u)208 util_last_bit(unsigned u)
209 {
210 #if defined(HAVE___BUILTIN_CLZ)
211 return u == 0 ? 0 : 32 - __builtin_clz(u);
212 #elif defined(_MSC_VER) && (_M_IX86 || _M_ARM || _M_AMD64 || _M_IA64)
213 unsigned long index;
214 if (_BitScanReverse(&index, u))
215 return index + 1;
216 else
217 return 0;
218 #else
219 unsigned r = 0;
220 while (u) {
221 r++;
222 u >>= 1;
223 }
224 return r;
225 #endif
226 }
227
228 /**
229 * Find last bit set in a word. The least significant bit is 1.
230 * Return 0 if no bits are set.
231 * Essentially ffsll() in the reverse direction.
232 */
233 static inline unsigned
util_last_bit64(uint64_t u)234 util_last_bit64(uint64_t u)
235 {
236 #if defined(HAVE___BUILTIN_CLZLL)
237 return u == 0 ? 0 : 64 - __builtin_clzll(u);
238 #elif defined(_MSC_VER) && (_M_AMD64 || _M_ARM64 || _M_IA64)
239 unsigned long index;
240 if (_BitScanReverse64(&index, u))
241 return index + 1;
242 else
243 return 0;
244 #else
245 unsigned r = 0;
246 while (u) {
247 r++;
248 u >>= 1;
249 }
250 return r;
251 #endif
252 }
253
254 /**
255 * Find last bit in a word that does not match the sign bit. The least
256 * significant bit is 1.
257 * Return 0 if no bits are set.
258 */
259 static inline unsigned
util_last_bit_signed(int i)260 util_last_bit_signed(int i)
261 {
262 if (i >= 0)
263 return util_last_bit(i);
264 else
265 return util_last_bit(~(unsigned)i);
266 }
267
268 /* Returns a bitfield in which the first count bits starting at start are
269 * set.
270 */
271 static inline unsigned
u_bit_consecutive(unsigned start,unsigned count)272 u_bit_consecutive(unsigned start, unsigned count)
273 {
274 assert(start + count <= 32);
275 if (count == 32)
276 return ~0;
277 return ((1u << count) - 1) << start;
278 }
279
280 static inline uint64_t
u_bit_consecutive64(unsigned start,unsigned count)281 u_bit_consecutive64(unsigned start, unsigned count)
282 {
283 assert(start + count <= 64);
284 if (count == 64)
285 return ~(uint64_t)0;
286 return (((uint64_t)1 << count) - 1) << start;
287 }
288
289 /**
290 * Return number of bits set in n.
291 */
292 static inline unsigned
util_bitcount(unsigned n)293 util_bitcount(unsigned n)
294 {
295 #if defined(HAVE___BUILTIN_POPCOUNT)
296 return __builtin_popcount(n);
297 #else
298 /* K&R classic bitcount.
299 *
300 * For each iteration, clear the LSB from the bitfield.
301 * Requires only one iteration per set bit, instead of
302 * one iteration per bit less than highest set bit.
303 */
304 unsigned bits;
305 for (bits = 0; n; bits++) {
306 n &= n - 1;
307 }
308 return bits;
309 #endif
310 }
311
312 static inline unsigned
util_bitcount64(uint64_t n)313 util_bitcount64(uint64_t n)
314 {
315 #ifdef HAVE___BUILTIN_POPCOUNTLL
316 return __builtin_popcountll(n);
317 #else
318 return util_bitcount(n) + util_bitcount(n >> 32);
319 #endif
320 }
321
322 #ifdef __cplusplus
323 }
324 #endif
325
326 #endif /* BITSCAN_H */
327