• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef __ARCH_H8300_ATOMIC__
2 #define __ARCH_H8300_ATOMIC__
3 
4 #include <linux/types.h>
5 
6 /*
7  * Atomic operations that C can't guarantee us.  Useful for
8  * resource counting etc..
9  */
10 
11 #define ATOMIC_INIT(i)	{ (i) }
12 
13 #define atomic_read(v)		((v)->counter)
14 #define atomic_set(v, i)	(((v)->counter) = i)
15 
16 #include <asm/system.h>
17 #include <linux/kernel.h>
18 
atomic_add_return(int i,atomic_t * v)19 static __inline__ int atomic_add_return(int i, atomic_t *v)
20 {
21 	int ret,flags;
22 	local_irq_save(flags);
23 	ret = v->counter += i;
24 	local_irq_restore(flags);
25 	return ret;
26 }
27 
28 #define atomic_add(i, v) atomic_add_return(i, v)
29 #define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)
30 
atomic_sub_return(int i,atomic_t * v)31 static __inline__ int atomic_sub_return(int i, atomic_t *v)
32 {
33 	int ret,flags;
34 	local_irq_save(flags);
35 	ret = v->counter -= i;
36 	local_irq_restore(flags);
37 	return ret;
38 }
39 
40 #define atomic_sub(i, v) atomic_sub_return(i, v)
41 #define atomic_sub_and_test(i,v) (atomic_sub_return(i, v) == 0)
42 
atomic_inc_return(atomic_t * v)43 static __inline__ int atomic_inc_return(atomic_t *v)
44 {
45 	int ret,flags;
46 	local_irq_save(flags);
47 	v->counter++;
48 	ret = v->counter;
49 	local_irq_restore(flags);
50 	return ret;
51 }
52 
53 #define atomic_inc(v) atomic_inc_return(v)
54 
55 /*
56  * atomic_inc_and_test - increment and test
57  * @v: pointer of type atomic_t
58  *
59  * Atomically increments @v by 1
60  * and returns true if the result is zero, or false for all
61  * other cases.
62  */
63 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
64 
atomic_dec_return(atomic_t * v)65 static __inline__ int atomic_dec_return(atomic_t *v)
66 {
67 	int ret,flags;
68 	local_irq_save(flags);
69 	--v->counter;
70 	ret = v->counter;
71 	local_irq_restore(flags);
72 	return ret;
73 }
74 
75 #define atomic_dec(v) atomic_dec_return(v)
76 
atomic_dec_and_test(atomic_t * v)77 static __inline__ int atomic_dec_and_test(atomic_t *v)
78 {
79 	int ret,flags;
80 	local_irq_save(flags);
81 	--v->counter;
82 	ret = v->counter;
83 	local_irq_restore(flags);
84 	return ret == 0;
85 }
86 
atomic_cmpxchg(atomic_t * v,int old,int new)87 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
88 {
89 	int ret;
90 	unsigned long flags;
91 
92 	local_irq_save(flags);
93 	ret = v->counter;
94 	if (likely(ret == old))
95 		v->counter = new;
96 	local_irq_restore(flags);
97 	return ret;
98 }
99 
100 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
101 
atomic_add_unless(atomic_t * v,int a,int u)102 static inline int atomic_add_unless(atomic_t *v, int a, int u)
103 {
104 	int ret;
105 	unsigned long flags;
106 
107 	local_irq_save(flags);
108 	ret = v->counter;
109 	if (ret != u)
110 		v->counter += a;
111 	local_irq_restore(flags);
112 	return ret != u;
113 }
114 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
115 
atomic_clear_mask(unsigned long mask,unsigned long * v)116 static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v)
117 {
118 	__asm__ __volatile__("stc ccr,r1l\n\t"
119 	                     "orc #0x80,ccr\n\t"
120 	                     "mov.l %0,er0\n\t"
121 	                     "and.l %1,er0\n\t"
122 	                     "mov.l er0,%0\n\t"
123 	                     "ldc r1l,ccr"
124                              : "=m" (*v) : "g" (~(mask)) :"er0","er1");
125 }
126 
atomic_set_mask(unsigned long mask,unsigned long * v)127 static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
128 {
129 	__asm__ __volatile__("stc ccr,r1l\n\t"
130 	                     "orc #0x80,ccr\n\t"
131 	                     "mov.l %0,er0\n\t"
132 	                     "or.l %1,er0\n\t"
133 	                     "mov.l er0,%0\n\t"
134 	                     "ldc r1l,ccr"
135                              : "=m" (*v) : "g" (mask) :"er0","er1");
136 }
137 
138 /* Atomic operations are already serializing */
139 #define smp_mb__before_atomic_dec()    barrier()
140 #define smp_mb__after_atomic_dec() barrier()
141 #define smp_mb__before_atomic_inc()    barrier()
142 #define smp_mb__after_atomic_inc() barrier()
143 
144 #include <asm-generic/atomic.h>
145 #endif /* __ARCH_H8300_ATOMIC __ */
146