• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef _ASM_M32R_ATOMIC_H
2 #define _ASM_M32R_ATOMIC_H
3 
4 /*
5  *  linux/include/asm-m32r/atomic.h
6  *
7  *  M32R version:
8  *    Copyright (C) 2001, 2002  Hitoshi Yamamoto
9  *    Copyright (C) 2004  Hirokazu Takata <takata at linux-m32r.org>
10  */
11 
12 #include <linux/types.h>
13 #include <asm/assembler.h>
14 #include <asm/cmpxchg.h>
15 #include <asm/dcache_clear.h>
16 #include <asm/barrier.h>
17 
18 /*
19  * Atomic operations that C can't guarantee us.  Useful for
20  * resource counting etc..
21  */
22 
23 #define ATOMIC_INIT(i)	{ (i) }
24 
25 /**
26  * atomic_read - read atomic variable
27  * @v: pointer of type atomic_t
28  *
29  * Atomically reads the value of @v.
30  */
31 #define atomic_read(v)	READ_ONCE((v)->counter)
32 
33 /**
34  * atomic_set - set atomic variable
35  * @v: pointer of type atomic_t
36  * @i: required value
37  *
38  * Atomically sets the value of @v to @i.
39  */
40 #define atomic_set(v,i)	WRITE_ONCE(((v)->counter), (i))
41 
42 #ifdef CONFIG_CHIP_M32700_TS1
43 #define __ATOMIC_CLOBBER	, "r4"
44 #else
45 #define __ATOMIC_CLOBBER
46 #endif
47 
48 #define ATOMIC_OP(op)							\
49 static __inline__ void atomic_##op(int i, atomic_t *v)			\
50 {									\
51 	unsigned long flags;						\
52 	int result;							\
53 									\
54 	local_irq_save(flags);						\
55 	__asm__ __volatile__ (						\
56 		"# atomic_" #op "		\n\t"			\
57 		DCACHE_CLEAR("%0", "r4", "%1")				\
58 		M32R_LOCK" %0, @%1;		\n\t"			\
59 		#op " %0, %2;			\n\t"			\
60 		M32R_UNLOCK" %0, @%1;		\n\t"			\
61 		: "=&r" (result)					\
62 		: "r" (&v->counter), "r" (i)				\
63 		: "memory"						\
64 		__ATOMIC_CLOBBER					\
65 	);								\
66 	local_irq_restore(flags);					\
67 }									\
68 
69 #define ATOMIC_OP_RETURN(op)						\
70 static __inline__ int atomic_##op##_return(int i, atomic_t *v)		\
71 {									\
72 	unsigned long flags;						\
73 	int result;							\
74 									\
75 	local_irq_save(flags);						\
76 	__asm__ __volatile__ (						\
77 		"# atomic_" #op "_return	\n\t"			\
78 		DCACHE_CLEAR("%0", "r4", "%1")				\
79 		M32R_LOCK" %0, @%1;		\n\t"			\
80 		#op " %0, %2;			\n\t"			\
81 		M32R_UNLOCK" %0, @%1;		\n\t"			\
82 		: "=&r" (result)					\
83 		: "r" (&v->counter), "r" (i)				\
84 		: "memory"						\
85 		__ATOMIC_CLOBBER					\
86 	);								\
87 	local_irq_restore(flags);					\
88 									\
89 	return result;							\
90 }
91 
92 #define ATOMIC_FETCH_OP(op)						\
93 static __inline__ int atomic_fetch_##op(int i, atomic_t *v)		\
94 {									\
95 	unsigned long flags;						\
96 	int result, val;						\
97 									\
98 	local_irq_save(flags);						\
99 	__asm__ __volatile__ (						\
100 		"# atomic_fetch_" #op "		\n\t"			\
101 		DCACHE_CLEAR("%0", "r4", "%2")				\
102 		M32R_LOCK" %1, @%2;		\n\t"			\
103 		"mv %0, %1			\n\t" 			\
104 		#op " %1, %3;			\n\t"			\
105 		M32R_UNLOCK" %1, @%2;		\n\t"			\
106 		: "=&r" (result), "=&r" (val)				\
107 		: "r" (&v->counter), "r" (i)				\
108 		: "memory"						\
109 		__ATOMIC_CLOBBER					\
110 	);								\
111 	local_irq_restore(flags);					\
112 									\
113 	return result;							\
114 }
115 
116 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
117 
118 ATOMIC_OPS(add)
ATOMIC_OPS(sub)119 ATOMIC_OPS(sub)
120 
121 #undef ATOMIC_OPS
122 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
123 
124 ATOMIC_OPS(and)
125 ATOMIC_OPS(or)
126 ATOMIC_OPS(xor)
127 
128 #undef ATOMIC_OPS
129 #undef ATOMIC_FETCH_OP
130 #undef ATOMIC_OP_RETURN
131 #undef ATOMIC_OP
132 
133 /**
134  * atomic_sub_and_test - subtract value from variable and test result
135  * @i: integer value to subtract
136  * @v: pointer of type atomic_t
137  *
138  * Atomically subtracts @i from @v and returns
139  * true if the result is zero, or false for all
140  * other cases.
141  */
142 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
143 
144 /**
145  * atomic_inc_return - increment atomic variable and return it
146  * @v: pointer of type atomic_t
147  *
148  * Atomically increments @v by 1 and returns the result.
149  */
150 static __inline__ int atomic_inc_return(atomic_t *v)
151 {
152 	unsigned long flags;
153 	int result;
154 
155 	local_irq_save(flags);
156 	__asm__ __volatile__ (
157 		"# atomic_inc_return		\n\t"
158 		DCACHE_CLEAR("%0", "r4", "%1")
159 		M32R_LOCK" %0, @%1;		\n\t"
160 		"addi	%0, #1;			\n\t"
161 		M32R_UNLOCK" %0, @%1;		\n\t"
162 		: "=&r" (result)
163 		: "r" (&v->counter)
164 		: "memory"
165 		__ATOMIC_CLOBBER
166 	);
167 	local_irq_restore(flags);
168 
169 	return result;
170 }
171 
172 /**
173  * atomic_dec_return - decrement atomic variable and return it
174  * @v: pointer of type atomic_t
175  *
176  * Atomically decrements @v by 1 and returns the result.
177  */
atomic_dec_return(atomic_t * v)178 static __inline__ int atomic_dec_return(atomic_t *v)
179 {
180 	unsigned long flags;
181 	int result;
182 
183 	local_irq_save(flags);
184 	__asm__ __volatile__ (
185 		"# atomic_dec_return		\n\t"
186 		DCACHE_CLEAR("%0", "r4", "%1")
187 		M32R_LOCK" %0, @%1;		\n\t"
188 		"addi	%0, #-1;		\n\t"
189 		M32R_UNLOCK" %0, @%1;		\n\t"
190 		: "=&r" (result)
191 		: "r" (&v->counter)
192 		: "memory"
193 		__ATOMIC_CLOBBER
194 	);
195 	local_irq_restore(flags);
196 
197 	return result;
198 }
199 
200 /**
201  * atomic_inc - increment atomic variable
202  * @v: pointer of type atomic_t
203  *
204  * Atomically increments @v by 1.
205  */
206 #define atomic_inc(v) ((void)atomic_inc_return(v))
207 
208 /**
209  * atomic_dec - decrement atomic variable
210  * @v: pointer of type atomic_t
211  *
212  * Atomically decrements @v by 1.
213  */
214 #define atomic_dec(v) ((void)atomic_dec_return(v))
215 
216 /**
217  * atomic_inc_and_test - increment and test
218  * @v: pointer of type atomic_t
219  *
220  * Atomically increments @v by 1
221  * and returns true if the result is zero, or false for all
222  * other cases.
223  */
224 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
225 
226 /**
227  * atomic_dec_and_test - decrement and test
228  * @v: pointer of type atomic_t
229  *
230  * Atomically decrements @v by 1 and
231  * returns true if the result is 0, or false for all
232  * other cases.
233  */
234 #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
235 
236 /**
237  * atomic_add_negative - add and test if negative
238  * @v: pointer of type atomic_t
239  * @i: integer value to add
240  *
241  * Atomically adds @i to @v and returns true
242  * if the result is negative, or false when
243  * result is greater than or equal to zero.
244  */
245 #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
246 
247 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
248 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
249 
250 /**
251  * __atomic_add_unless - add unless the number is a given value
252  * @v: pointer of type atomic_t
253  * @a: the amount to add to v...
254  * @u: ...unless v is equal to u.
255  *
256  * Atomically adds @a to @v, so long as it was not @u.
257  * Returns the old value of @v.
258  */
__atomic_add_unless(atomic_t * v,int a,int u)259 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
260 {
261 	int c, old;
262 	c = atomic_read(v);
263 	for (;;) {
264 		if (unlikely(c == (u)))
265 			break;
266 		old = atomic_cmpxchg((v), c, c + (a));
267 		if (likely(old == c))
268 			break;
269 		c = old;
270 	}
271 	return c;
272 }
273 
274 #endif	/* _ASM_M32R_ATOMIC_H */
275