• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef _ASM_M32R_ATOMIC_H
2 #define _ASM_M32R_ATOMIC_H
3 
4 /*
5  *  linux/include/asm-m32r/atomic.h
6  *
7  *  M32R version:
8  *    Copyright (C) 2001, 2002  Hitoshi Yamamoto
9  *    Copyright (C) 2004  Hirokazu Takata <takata at linux-m32r.org>
10  */
11 
12 #include <linux/types.h>
13 #include <asm/assembler.h>
14 #include <asm/cmpxchg.h>
15 #include <asm/dcache_clear.h>
16 #include <asm/barrier.h>
17 
18 /*
19  * Atomic operations that C can't guarantee us.  Useful for
20  * resource counting etc..
21  */
22 
23 #define ATOMIC_INIT(i)	{ (i) }
24 
25 /**
26  * atomic_read - read atomic variable
27  * @v: pointer of type atomic_t
28  *
29  * Atomically reads the value of @v.
30  */
31 #define atomic_read(v)	ACCESS_ONCE((v)->counter)
32 
33 /**
34  * atomic_set - set atomic variable
35  * @v: pointer of type atomic_t
36  * @i: required value
37  *
38  * Atomically sets the value of @v to @i.
39  */
40 #define atomic_set(v,i)	(((v)->counter) = (i))
41 
42 #ifdef CONFIG_CHIP_M32700_TS1
43 #define __ATOMIC_CLOBBER	, "r4"
44 #else
45 #define __ATOMIC_CLOBBER
46 #endif
47 
48 #define ATOMIC_OP(op)							\
49 static __inline__ void atomic_##op(int i, atomic_t *v)			\
50 {									\
51 	unsigned long flags;						\
52 	int result;							\
53 									\
54 	local_irq_save(flags);						\
55 	__asm__ __volatile__ (						\
56 		"# atomic_" #op "		\n\t"			\
57 		DCACHE_CLEAR("%0", "r4", "%1")				\
58 		M32R_LOCK" %0, @%1;		\n\t"			\
59 		#op " %0, %2;			\n\t"			\
60 		M32R_UNLOCK" %0, @%1;		\n\t"			\
61 		: "=&r" (result)					\
62 		: "r" (&v->counter), "r" (i)				\
63 		: "memory"						\
64 		__ATOMIC_CLOBBER					\
65 	);								\
66 	local_irq_restore(flags);					\
67 }									\
68 
69 #define ATOMIC_OP_RETURN(op)						\
70 static __inline__ int atomic_##op##_return(int i, atomic_t *v)		\
71 {									\
72 	unsigned long flags;						\
73 	int result;							\
74 									\
75 	local_irq_save(flags);						\
76 	__asm__ __volatile__ (						\
77 		"# atomic_" #op "_return	\n\t"			\
78 		DCACHE_CLEAR("%0", "r4", "%1")				\
79 		M32R_LOCK" %0, @%1;		\n\t"			\
80 		#op " %0, %2;			\n\t"			\
81 		M32R_UNLOCK" %0, @%1;		\n\t"			\
82 		: "=&r" (result)					\
83 		: "r" (&v->counter), "r" (i)				\
84 		: "memory"						\
85 		__ATOMIC_CLOBBER					\
86 	);								\
87 	local_irq_restore(flags);					\
88 									\
89 	return result;							\
90 }
91 
92 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
93 
94 ATOMIC_OPS(add)
ATOMIC_OPS(sub)95 ATOMIC_OPS(sub)
96 
97 #undef ATOMIC_OPS
98 #undef ATOMIC_OP_RETURN
99 #undef ATOMIC_OP
100 
101 /**
102  * atomic_sub_and_test - subtract value from variable and test result
103  * @i: integer value to subtract
104  * @v: pointer of type atomic_t
105  *
106  * Atomically subtracts @i from @v and returns
107  * true if the result is zero, or false for all
108  * other cases.
109  */
110 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
111 
112 /**
113  * atomic_inc_return - increment atomic variable and return it
114  * @v: pointer of type atomic_t
115  *
116  * Atomically increments @v by 1 and returns the result.
117  */
118 static __inline__ int atomic_inc_return(atomic_t *v)
119 {
120 	unsigned long flags;
121 	int result;
122 
123 	local_irq_save(flags);
124 	__asm__ __volatile__ (
125 		"# atomic_inc_return		\n\t"
126 		DCACHE_CLEAR("%0", "r4", "%1")
127 		M32R_LOCK" %0, @%1;		\n\t"
128 		"addi	%0, #1;			\n\t"
129 		M32R_UNLOCK" %0, @%1;		\n\t"
130 		: "=&r" (result)
131 		: "r" (&v->counter)
132 		: "memory"
133 		__ATOMIC_CLOBBER
134 	);
135 	local_irq_restore(flags);
136 
137 	return result;
138 }
139 
140 /**
141  * atomic_dec_return - decrement atomic variable and return it
142  * @v: pointer of type atomic_t
143  *
144  * Atomically decrements @v by 1 and returns the result.
145  */
atomic_dec_return(atomic_t * v)146 static __inline__ int atomic_dec_return(atomic_t *v)
147 {
148 	unsigned long flags;
149 	int result;
150 
151 	local_irq_save(flags);
152 	__asm__ __volatile__ (
153 		"# atomic_dec_return		\n\t"
154 		DCACHE_CLEAR("%0", "r4", "%1")
155 		M32R_LOCK" %0, @%1;		\n\t"
156 		"addi	%0, #-1;		\n\t"
157 		M32R_UNLOCK" %0, @%1;		\n\t"
158 		: "=&r" (result)
159 		: "r" (&v->counter)
160 		: "memory"
161 		__ATOMIC_CLOBBER
162 	);
163 	local_irq_restore(flags);
164 
165 	return result;
166 }
167 
168 /**
169  * atomic_inc - increment atomic variable
170  * @v: pointer of type atomic_t
171  *
172  * Atomically increments @v by 1.
173  */
174 #define atomic_inc(v) ((void)atomic_inc_return(v))
175 
176 /**
177  * atomic_dec - decrement atomic variable
178  * @v: pointer of type atomic_t
179  *
180  * Atomically decrements @v by 1.
181  */
182 #define atomic_dec(v) ((void)atomic_dec_return(v))
183 
184 /**
185  * atomic_inc_and_test - increment and test
186  * @v: pointer of type atomic_t
187  *
188  * Atomically increments @v by 1
189  * and returns true if the result is zero, or false for all
190  * other cases.
191  */
192 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
193 
194 /**
195  * atomic_dec_and_test - decrement and test
196  * @v: pointer of type atomic_t
197  *
198  * Atomically decrements @v by 1 and
199  * returns true if the result is 0, or false for all
200  * other cases.
201  */
202 #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
203 
204 /**
205  * atomic_add_negative - add and test if negative
206  * @v: pointer of type atomic_t
207  * @i: integer value to add
208  *
209  * Atomically adds @i to @v and returns true
210  * if the result is negative, or false when
211  * result is greater than or equal to zero.
212  */
213 #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
214 
215 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
216 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
217 
218 /**
219  * __atomic_add_unless - add unless the number is a given value
220  * @v: pointer of type atomic_t
221  * @a: the amount to add to v...
222  * @u: ...unless v is equal to u.
223  *
224  * Atomically adds @a to @v, so long as it was not @u.
225  * Returns the old value of @v.
226  */
__atomic_add_unless(atomic_t * v,int a,int u)227 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
228 {
229 	int c, old;
230 	c = atomic_read(v);
231 	for (;;) {
232 		if (unlikely(c == (u)))
233 			break;
234 		old = atomic_cmpxchg((v), c, c + (a));
235 		if (likely(old == c))
236 			break;
237 		c = old;
238 	}
239 	return c;
240 }
241 
242 
atomic_clear_mask(unsigned long mask,atomic_t * addr)243 static __inline__ void atomic_clear_mask(unsigned long  mask, atomic_t *addr)
244 {
245 	unsigned long flags;
246 	unsigned long tmp;
247 
248 	local_irq_save(flags);
249 	__asm__ __volatile__ (
250 		"# atomic_clear_mask		\n\t"
251 		DCACHE_CLEAR("%0", "r5", "%1")
252 		M32R_LOCK" %0, @%1;		\n\t"
253 		"and	%0, %2;			\n\t"
254 		M32R_UNLOCK" %0, @%1;		\n\t"
255 		: "=&r" (tmp)
256 		: "r" (addr), "r" (~mask)
257 		: "memory"
258 		__ATOMIC_CLOBBER
259 	);
260 	local_irq_restore(flags);
261 }
262 
atomic_set_mask(unsigned long mask,atomic_t * addr)263 static __inline__ void atomic_set_mask(unsigned long  mask, atomic_t *addr)
264 {
265 	unsigned long flags;
266 	unsigned long tmp;
267 
268 	local_irq_save(flags);
269 	__asm__ __volatile__ (
270 		"# atomic_set_mask		\n\t"
271 		DCACHE_CLEAR("%0", "r5", "%1")
272 		M32R_LOCK" %0, @%1;		\n\t"
273 		"or	%0, %2;			\n\t"
274 		M32R_UNLOCK" %0, @%1;		\n\t"
275 		: "=&r" (tmp)
276 		: "r" (addr), "r" (mask)
277 		: "memory"
278 		__ATOMIC_CLOBBER
279 	);
280 	local_irq_restore(flags);
281 }
282 
283 #endif	/* _ASM_M32R_ATOMIC_H */
284