• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef _ARCH_I386_LOCAL_H
2 #define _ARCH_I386_LOCAL_H
3 
4 #include <linux/percpu.h>
5 #include <asm/system.h>
6 #include <asm/atomic.h>
7 
8 typedef struct
9 {
10 	atomic_long_t a;
11 } local_t;
12 
13 #define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
14 
15 #define local_read(l)	atomic_long_read(&(l)->a)
16 #define local_set(l,i)	atomic_long_set(&(l)->a, (i))
17 
local_inc(local_t * l)18 static __inline__ void local_inc(local_t *l)
19 {
20 	__asm__ __volatile__(
21 		"incl %0"
22 		:"+m" (l->a.counter));
23 }
24 
local_dec(local_t * l)25 static __inline__ void local_dec(local_t *l)
26 {
27 	__asm__ __volatile__(
28 		"decl %0"
29 		:"+m" (l->a.counter));
30 }
31 
local_add(long i,local_t * l)32 static __inline__ void local_add(long i, local_t *l)
33 {
34 	__asm__ __volatile__(
35 		"addl %1,%0"
36 		:"+m" (l->a.counter)
37 		:"ir" (i));
38 }
39 
local_sub(long i,local_t * l)40 static __inline__ void local_sub(long i, local_t *l)
41 {
42 	__asm__ __volatile__(
43 		"subl %1,%0"
44 		:"+m" (l->a.counter)
45 		:"ir" (i));
46 }
47 
48 /**
49  * local_sub_and_test - subtract value from variable and test result
50  * @i: integer value to subtract
51  * @l: pointer of type local_t
52  *
53  * Atomically subtracts @i from @l and returns
54  * true if the result is zero, or false for all
55  * other cases.
56  */
local_sub_and_test(long i,local_t * l)57 static __inline__ int local_sub_and_test(long i, local_t *l)
58 {
59 	unsigned char c;
60 
61 	__asm__ __volatile__(
62 		"subl %2,%0; sete %1"
63 		:"+m" (l->a.counter), "=qm" (c)
64 		:"ir" (i) : "memory");
65 	return c;
66 }
67 
68 /**
69  * local_dec_and_test - decrement and test
70  * @l: pointer of type local_t
71  *
72  * Atomically decrements @l by 1 and
73  * returns true if the result is 0, or false for all other
74  * cases.
75  */
local_dec_and_test(local_t * l)76 static __inline__ int local_dec_and_test(local_t *l)
77 {
78 	unsigned char c;
79 
80 	__asm__ __volatile__(
81 		"decl %0; sete %1"
82 		:"+m" (l->a.counter), "=qm" (c)
83 		: : "memory");
84 	return c != 0;
85 }
86 
87 /**
88  * local_inc_and_test - increment and test
89  * @l: pointer of type local_t
90  *
91  * Atomically increments @l by 1
92  * and returns true if the result is zero, or false for all
93  * other cases.
94  */
local_inc_and_test(local_t * l)95 static __inline__ int local_inc_and_test(local_t *l)
96 {
97 	unsigned char c;
98 
99 	__asm__ __volatile__(
100 		"incl %0; sete %1"
101 		:"+m" (l->a.counter), "=qm" (c)
102 		: : "memory");
103 	return c != 0;
104 }
105 
106 /**
107  * local_add_negative - add and test if negative
108  * @l: pointer of type local_t
109  * @i: integer value to add
110  *
111  * Atomically adds @i to @l and returns true
112  * if the result is negative, or false when
113  * result is greater than or equal to zero.
114  */
local_add_negative(long i,local_t * l)115 static __inline__ int local_add_negative(long i, local_t *l)
116 {
117 	unsigned char c;
118 
119 	__asm__ __volatile__(
120 		"addl %2,%0; sets %1"
121 		:"+m" (l->a.counter), "=qm" (c)
122 		:"ir" (i) : "memory");
123 	return c;
124 }
125 
126 /**
127  * local_add_return - add and return
128  * @l: pointer of type local_t
129  * @i: integer value to add
130  *
131  * Atomically adds @i to @l and returns @i + @l
132  */
local_add_return(long i,local_t * l)133 static __inline__ long local_add_return(long i, local_t *l)
134 {
135 	long __i;
136 #ifdef CONFIG_M386
137 	unsigned long flags;
138 	if(unlikely(boot_cpu_data.x86 <= 3))
139 		goto no_xadd;
140 #endif
141 	/* Modern 486+ processor */
142 	__i = i;
143 	__asm__ __volatile__(
144 		"xaddl %0, %1;"
145 		:"+r" (i), "+m" (l->a.counter)
146 		: : "memory");
147 	return i + __i;
148 
149 #ifdef CONFIG_M386
150 no_xadd: /* Legacy 386 processor */
151 	local_irq_save(flags);
152 	__i = local_read(l);
153 	local_set(l, i + __i);
154 	local_irq_restore(flags);
155 	return i + __i;
156 #endif
157 }
158 
local_sub_return(long i,local_t * l)159 static __inline__ long local_sub_return(long i, local_t *l)
160 {
161 	return local_add_return(-i,l);
162 }
163 
164 #define local_inc_return(l)  (local_add_return(1,l))
165 #define local_dec_return(l)  (local_sub_return(1,l))
166 
167 #define local_cmpxchg(l, o, n) \
168 	(cmpxchg_local(&((l)->a.counter), (o), (n)))
169 /* Always has a lock prefix */
170 #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
171 
172 /**
173  * local_add_unless - add unless the number is a given value
174  * @l: pointer of type local_t
175  * @a: the amount to add to l...
176  * @u: ...unless l is equal to u.
177  *
178  * Atomically adds @a to @l, so long as it was not @u.
179  * Returns non-zero if @l was not @u, and zero otherwise.
180  */
181 #define local_add_unless(l, a, u)				\
182 ({								\
183 	long c, old;						\
184 	c = local_read(l);					\
185 	for (;;) {						\
186 		if (unlikely(c == (u)))				\
187 			break;					\
188 		old = local_cmpxchg((l), c, c + (a));	\
189 		if (likely(old == c))				\
190 			break;					\
191 		c = old;					\
192 	}							\
193 	c != (u);						\
194 })
195 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
196 
197 /* On x86, these are no better than the atomic variants. */
198 #define __local_inc(l)		local_inc(l)
199 #define __local_dec(l)		local_dec(l)
200 #define __local_add(i,l)	local_add((i),(l))
201 #define __local_sub(i,l)	local_sub((i),(l))
202 
203 /* Use these for per-cpu local_t variables: on some archs they are
204  * much more efficient than these naive implementations.  Note they take
205  * a variable, not an address.
206  */
207 
208 /* Need to disable preemption for the cpu local counters otherwise we could
209    still access a variable of a previous CPU in a non atomic way. */
210 #define cpu_local_wrap_v(l)	 	\
211 	({ local_t res__;		\
212 	   preempt_disable(); 		\
213 	   res__ = (l);			\
214 	   preempt_enable();		\
215 	   res__; })
216 #define cpu_local_wrap(l)		\
217 	({ preempt_disable();		\
218 	   l;				\
219 	   preempt_enable(); })		\
220 
221 #define cpu_local_read(l)    cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
222 #define cpu_local_set(l, i)  cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
223 #define cpu_local_inc(l)     cpu_local_wrap(local_inc(&__get_cpu_var(l)))
224 #define cpu_local_dec(l)     cpu_local_wrap(local_dec(&__get_cpu_var(l)))
225 #define cpu_local_add(i, l)  cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
226 #define cpu_local_sub(i, l)  cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
227 
228 #define __cpu_local_inc(l)	cpu_local_inc(l)
229 #define __cpu_local_dec(l)	cpu_local_dec(l)
230 #define __cpu_local_add(i, l)	cpu_local_add((i), (l))
231 #define __cpu_local_sub(i, l)	cpu_local_sub((i), (l))
232 
233 #endif /* _ARCH_I386_LOCAL_H */
234