• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  */
8 
9 #ifndef _ASM_BITOPS_H
10 #define _ASM_BITOPS_H
11 
12 #ifndef _LINUX_BITOPS_H
13 #error only <linux/bitops.h> can be included directly
14 #endif
15 
16 #ifndef __ASSEMBLY__
17 
18 #include <linux/types.h>
19 #include <linux/compiler.h>
20 #include <asm/barrier.h>
21 
22 /*
23  * Hardware assisted read-modify-write using ARC700 LLOCK/SCOND insns.
24  * The Kconfig glue ensures that in SMP, this is only set if the container
25  * SoC/platform has cross-core coherent LLOCK/SCOND
26  */
27 #if defined(CONFIG_ARC_HAS_LLSC)
28 
set_bit(unsigned long nr,volatile unsigned long * m)29 static inline void set_bit(unsigned long nr, volatile unsigned long *m)
30 {
31 	unsigned int temp;
32 
33 	m += nr >> 5;
34 
35 	if (__builtin_constant_p(nr))
36 		nr &= 0x1f;
37 
38 	__asm__ __volatile__(
39 	"1:	llock   %0, [%1]	\n"
40 	"	bset    %0, %0, %2	\n"
41 	"	scond   %0, [%1]	\n"
42 	"	bnz     1b	\n"
43 	: "=&r"(temp)
44 	: "r"(m), "ir"(nr)
45 	: "cc");
46 }
47 
clear_bit(unsigned long nr,volatile unsigned long * m)48 static inline void clear_bit(unsigned long nr, volatile unsigned long *m)
49 {
50 	unsigned int temp;
51 
52 	m += nr >> 5;
53 
54 	if (__builtin_constant_p(nr))
55 		nr &= 0x1f;
56 
57 	__asm__ __volatile__(
58 	"1:	llock   %0, [%1]	\n"
59 	"	bclr    %0, %0, %2	\n"
60 	"	scond   %0, [%1]	\n"
61 	"	bnz     1b	\n"
62 	: "=&r"(temp)
63 	: "r"(m), "ir"(nr)
64 	: "cc");
65 }
66 
change_bit(unsigned long nr,volatile unsigned long * m)67 static inline void change_bit(unsigned long nr, volatile unsigned long *m)
68 {
69 	unsigned int temp;
70 
71 	m += nr >> 5;
72 
73 	if (__builtin_constant_p(nr))
74 		nr &= 0x1f;
75 
76 	__asm__ __volatile__(
77 	"1:	llock   %0, [%1]	\n"
78 	"	bxor    %0, %0, %2	\n"
79 	"	scond   %0, [%1]	\n"
80 	"	bnz     1b		\n"
81 	: "=&r"(temp)
82 	: "r"(m), "ir"(nr)
83 	: "cc");
84 }
85 
86 /*
87  * Semantically:
88  *    Test the bit
89  *    if clear
90  *        set it and return 0 (old value)
91  *    else
92  *        return 1 (old value).
93  *
94  * Since ARC lacks a equivalent h/w primitive, the bit is set unconditionally
95  * and the old value of bit is returned
96  */
test_and_set_bit(unsigned long nr,volatile unsigned long * m)97 static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *m)
98 {
99 	unsigned long old, temp;
100 
101 	m += nr >> 5;
102 
103 	if (__builtin_constant_p(nr))
104 		nr &= 0x1f;
105 
106 	/*
107 	 * Explicit full memory barrier needed before/after as
108 	 * LLOCK/SCOND themselves don't provide any such semantics
109 	 */
110 	smp_mb();
111 
112 	__asm__ __volatile__(
113 	"1:	llock   %0, [%2]	\n"
114 	"	bset    %1, %0, %3	\n"
115 	"	scond   %1, [%2]	\n"
116 	"	bnz     1b		\n"
117 	: "=&r"(old), "=&r"(temp)
118 	: "r"(m), "ir"(nr)
119 	: "cc");
120 
121 	smp_mb();
122 
123 	return (old & (1 << nr)) != 0;
124 }
125 
126 static inline int
test_and_clear_bit(unsigned long nr,volatile unsigned long * m)127 test_and_clear_bit(unsigned long nr, volatile unsigned long *m)
128 {
129 	unsigned int old, temp;
130 
131 	m += nr >> 5;
132 
133 	if (__builtin_constant_p(nr))
134 		nr &= 0x1f;
135 
136 	smp_mb();
137 
138 	__asm__ __volatile__(
139 	"1:	llock   %0, [%2]	\n"
140 	"	bclr    %1, %0, %3	\n"
141 	"	scond   %1, [%2]	\n"
142 	"	bnz     1b		\n"
143 	: "=&r"(old), "=&r"(temp)
144 	: "r"(m), "ir"(nr)
145 	: "cc");
146 
147 	smp_mb();
148 
149 	return (old & (1 << nr)) != 0;
150 }
151 
152 static inline int
test_and_change_bit(unsigned long nr,volatile unsigned long * m)153 test_and_change_bit(unsigned long nr, volatile unsigned long *m)
154 {
155 	unsigned int old, temp;
156 
157 	m += nr >> 5;
158 
159 	if (__builtin_constant_p(nr))
160 		nr &= 0x1f;
161 
162 	smp_mb();
163 
164 	__asm__ __volatile__(
165 	"1:	llock   %0, [%2]	\n"
166 	"	bxor    %1, %0, %3	\n"
167 	"	scond   %1, [%2]	\n"
168 	"	bnz     1b		\n"
169 	: "=&r"(old), "=&r"(temp)
170 	: "r"(m), "ir"(nr)
171 	: "cc");
172 
173 	smp_mb();
174 
175 	return (old & (1 << nr)) != 0;
176 }
177 
178 #else	/* !CONFIG_ARC_HAS_LLSC */
179 
180 #include <asm/smp.h>
181 
182 /*
183  * Non hardware assisted Atomic-R-M-W
184  * Locking would change to irq-disabling only (UP) and spinlocks (SMP)
185  *
186  * There's "significant" micro-optimization in writing our own variants of
187  * bitops (over generic variants)
188  *
189  * (1) The generic APIs have "signed" @nr while we have it "unsigned"
190  *     This avoids extra code to be generated for pointer arithmatic, since
191  *     is "not sure" that index is NOT -ve
192  * (2) Utilize the fact that ARCompact bit fidding insn (BSET/BCLR/ASL) etc
193  *     only consider bottom 5 bits of @nr, so NO need to mask them off.
194  *     (GCC Quirk: however for constant @nr we still need to do the masking
195  *             at compile time)
196  */
197 
set_bit(unsigned long nr,volatile unsigned long * m)198 static inline void set_bit(unsigned long nr, volatile unsigned long *m)
199 {
200 	unsigned long temp, flags;
201 	m += nr >> 5;
202 
203 	if (__builtin_constant_p(nr))
204 		nr &= 0x1f;
205 
206 	bitops_lock(flags);
207 
208 	temp = *m;
209 	*m = temp | (1UL << nr);
210 
211 	bitops_unlock(flags);
212 }
213 
clear_bit(unsigned long nr,volatile unsigned long * m)214 static inline void clear_bit(unsigned long nr, volatile unsigned long *m)
215 {
216 	unsigned long temp, flags;
217 	m += nr >> 5;
218 
219 	if (__builtin_constant_p(nr))
220 		nr &= 0x1f;
221 
222 	bitops_lock(flags);
223 
224 	temp = *m;
225 	*m = temp & ~(1UL << nr);
226 
227 	bitops_unlock(flags);
228 }
229 
change_bit(unsigned long nr,volatile unsigned long * m)230 static inline void change_bit(unsigned long nr, volatile unsigned long *m)
231 {
232 	unsigned long temp, flags;
233 	m += nr >> 5;
234 
235 	if (__builtin_constant_p(nr))
236 		nr &= 0x1f;
237 
238 	bitops_lock(flags);
239 
240 	temp = *m;
241 	*m = temp ^ (1UL << nr);
242 
243 	bitops_unlock(flags);
244 }
245 
test_and_set_bit(unsigned long nr,volatile unsigned long * m)246 static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *m)
247 {
248 	unsigned long old, flags;
249 	m += nr >> 5;
250 
251 	if (__builtin_constant_p(nr))
252 		nr &= 0x1f;
253 
254 	/*
255 	 * spin lock/unlock provide the needed smp_mb() before/after
256 	 */
257 	bitops_lock(flags);
258 
259 	old = *m;
260 	*m = old | (1 << nr);
261 
262 	bitops_unlock(flags);
263 
264 	return (old & (1 << nr)) != 0;
265 }
266 
267 static inline int
test_and_clear_bit(unsigned long nr,volatile unsigned long * m)268 test_and_clear_bit(unsigned long nr, volatile unsigned long *m)
269 {
270 	unsigned long old, flags;
271 	m += nr >> 5;
272 
273 	if (__builtin_constant_p(nr))
274 		nr &= 0x1f;
275 
276 	bitops_lock(flags);
277 
278 	old = *m;
279 	*m = old & ~(1 << nr);
280 
281 	bitops_unlock(flags);
282 
283 	return (old & (1 << nr)) != 0;
284 }
285 
286 static inline int
test_and_change_bit(unsigned long nr,volatile unsigned long * m)287 test_and_change_bit(unsigned long nr, volatile unsigned long *m)
288 {
289 	unsigned long old, flags;
290 	m += nr >> 5;
291 
292 	if (__builtin_constant_p(nr))
293 		nr &= 0x1f;
294 
295 	bitops_lock(flags);
296 
297 	old = *m;
298 	*m = old ^ (1 << nr);
299 
300 	bitops_unlock(flags);
301 
302 	return (old & (1 << nr)) != 0;
303 }
304 
305 #endif /* CONFIG_ARC_HAS_LLSC */
306 
307 /***************************************
308  * Non atomic variants
309  **************************************/
310 
__set_bit(unsigned long nr,volatile unsigned long * m)311 static inline void __set_bit(unsigned long nr, volatile unsigned long *m)
312 {
313 	unsigned long temp;
314 	m += nr >> 5;
315 
316 	if (__builtin_constant_p(nr))
317 		nr &= 0x1f;
318 
319 	temp = *m;
320 	*m = temp | (1UL << nr);
321 }
322 
__clear_bit(unsigned long nr,volatile unsigned long * m)323 static inline void __clear_bit(unsigned long nr, volatile unsigned long *m)
324 {
325 	unsigned long temp;
326 	m += nr >> 5;
327 
328 	if (__builtin_constant_p(nr))
329 		nr &= 0x1f;
330 
331 	temp = *m;
332 	*m = temp & ~(1UL << nr);
333 }
334 
__change_bit(unsigned long nr,volatile unsigned long * m)335 static inline void __change_bit(unsigned long nr, volatile unsigned long *m)
336 {
337 	unsigned long temp;
338 	m += nr >> 5;
339 
340 	if (__builtin_constant_p(nr))
341 		nr &= 0x1f;
342 
343 	temp = *m;
344 	*m = temp ^ (1UL << nr);
345 }
346 
347 static inline int
__test_and_set_bit(unsigned long nr,volatile unsigned long * m)348 __test_and_set_bit(unsigned long nr, volatile unsigned long *m)
349 {
350 	unsigned long old;
351 	m += nr >> 5;
352 
353 	if (__builtin_constant_p(nr))
354 		nr &= 0x1f;
355 
356 	old = *m;
357 	*m = old | (1 << nr);
358 
359 	return (old & (1 << nr)) != 0;
360 }
361 
362 static inline int
__test_and_clear_bit(unsigned long nr,volatile unsigned long * m)363 __test_and_clear_bit(unsigned long nr, volatile unsigned long *m)
364 {
365 	unsigned long old;
366 	m += nr >> 5;
367 
368 	if (__builtin_constant_p(nr))
369 		nr &= 0x1f;
370 
371 	old = *m;
372 	*m = old & ~(1 << nr);
373 
374 	return (old & (1 << nr)) != 0;
375 }
376 
377 static inline int
__test_and_change_bit(unsigned long nr,volatile unsigned long * m)378 __test_and_change_bit(unsigned long nr, volatile unsigned long *m)
379 {
380 	unsigned long old;
381 	m += nr >> 5;
382 
383 	if (__builtin_constant_p(nr))
384 		nr &= 0x1f;
385 
386 	old = *m;
387 	*m = old ^ (1 << nr);
388 
389 	return (old & (1 << nr)) != 0;
390 }
391 
392 /*
393  * This routine doesn't need to be atomic.
394  */
395 static inline int
__constant_test_bit(unsigned int nr,const volatile unsigned long * addr)396 __constant_test_bit(unsigned int nr, const volatile unsigned long *addr)
397 {
398 	return ((1UL << (nr & 31)) &
399 		(((const volatile unsigned int *)addr)[nr >> 5])) != 0;
400 }
401 
402 static inline int
__test_bit(unsigned int nr,const volatile unsigned long * addr)403 __test_bit(unsigned int nr, const volatile unsigned long *addr)
404 {
405 	unsigned long mask;
406 
407 	addr += nr >> 5;
408 
409 	/* ARC700 only considers 5 bits in bit-fiddling insn */
410 	mask = 1 << nr;
411 
412 	return ((mask & *addr) != 0);
413 }
414 
415 #define test_bit(nr, addr)	(__builtin_constant_p(nr) ? \
416 					__constant_test_bit((nr), (addr)) : \
417 					__test_bit((nr), (addr)))
418 
419 /*
420  * Count the number of zeros, starting from MSB
421  * Helper for fls( ) friends
422  * This is a pure count, so (1-32) or (0-31) doesn't apply
423  * It could be 0 to 32, based on num of 0's in there
424  * clz(0x8000_0000) = 0, clz(0xFFFF_FFFF)=0, clz(0) = 32, clz(1) = 31
425  */
clz(unsigned int x)426 static inline __attribute__ ((const)) int clz(unsigned int x)
427 {
428 	unsigned int res;
429 
430 	__asm__ __volatile__(
431 	"	norm.f  %0, %1		\n"
432 	"	mov.n   %0, 0		\n"
433 	"	add.p   %0, %0, 1	\n"
434 	: "=r"(res)
435 	: "r"(x)
436 	: "cc");
437 
438 	return res;
439 }
440 
constant_fls(int x)441 static inline int constant_fls(int x)
442 {
443 	int r = 32;
444 
445 	if (!x)
446 		return 0;
447 	if (!(x & 0xffff0000u)) {
448 		x <<= 16;
449 		r -= 16;
450 	}
451 	if (!(x & 0xff000000u)) {
452 		x <<= 8;
453 		r -= 8;
454 	}
455 	if (!(x & 0xf0000000u)) {
456 		x <<= 4;
457 		r -= 4;
458 	}
459 	if (!(x & 0xc0000000u)) {
460 		x <<= 2;
461 		r -= 2;
462 	}
463 	if (!(x & 0x80000000u)) {
464 		x <<= 1;
465 		r -= 1;
466 	}
467 	return r;
468 }
469 
470 /*
471  * fls = Find Last Set in word
472  * @result: [1-32]
473  * fls(1) = 1, fls(0x80000000) = 32, fls(0) = 0
474  */
fls(unsigned long x)475 static inline __attribute__ ((const)) int fls(unsigned long x)
476 {
477 	if (__builtin_constant_p(x))
478 	       return constant_fls(x);
479 
480 	return 32 - clz(x);
481 }
482 
483 /*
484  * __fls: Similar to fls, but zero based (0-31)
485  */
__fls(unsigned long x)486 static inline __attribute__ ((const)) int __fls(unsigned long x)
487 {
488 	if (!x)
489 		return 0;
490 	else
491 		return fls(x) - 1;
492 }
493 
494 /*
495  * ffs = Find First Set in word (LSB to MSB)
496  * @result: [1-32], 0 if all 0's
497  */
498 #define ffs(x)	({ unsigned long __t = (x); fls(__t & -__t); })
499 
500 /*
501  * __ffs: Similar to ffs, but zero based (0-31)
502  */
__ffs(unsigned long word)503 static inline __attribute__ ((const)) int __ffs(unsigned long word)
504 {
505 	if (!word)
506 		return word;
507 
508 	return ffs(word) - 1;
509 }
510 
511 /*
512  * ffz = Find First Zero in word.
513  * @return:[0-31], 32 if all 1's
514  */
515 #define ffz(x)	__ffs(~(x))
516 
517 #include <asm-generic/bitops/hweight.h>
518 #include <asm-generic/bitops/fls64.h>
519 #include <asm-generic/bitops/sched.h>
520 #include <asm-generic/bitops/lock.h>
521 
522 #include <asm-generic/bitops/find.h>
523 #include <asm-generic/bitops/le.h>
524 #include <asm-generic/bitops/ext2-atomic-setbit.h>
525 
526 #endif /* !__ASSEMBLY__ */
527 
528 #endif
529