1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_CMPXCHG_H
3 #error Do not include xchg.h directly!
4 #else
5 /*
6 * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
7 * except that local version do not have the expensive memory barrier.
8 * So this file is included twice from asm/cmpxchg.h.
9 */
10
11 /*
12 * Atomic exchange.
13 * Since it can be used to implement critical sections
14 * it must clobber "memory" (also for interrupts in UP).
15 */
16
17 static inline unsigned long
____xchg(_u8,volatile char * m,unsigned long val)18 ____xchg(_u8, volatile char *m, unsigned long val)
19 {
20 unsigned long ret, tmp, addr64;
21
22 __asm__ __volatile__(
23 " andnot %4,7,%3\n"
24 " insbl %1,%4,%1\n"
25 "1: ldq_l %2,0(%3)\n"
26 " extbl %2,%4,%0\n"
27 " mskbl %2,%4,%2\n"
28 " or %1,%2,%2\n"
29 " stq_c %2,0(%3)\n"
30 " beq %2,2f\n"
31 ".subsection 2\n"
32 "2: br 1b\n"
33 ".previous"
34 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
35 : "r" ((long)m), "1" (val) : "memory");
36
37 return ret;
38 }
39
40 static inline unsigned long
____xchg(_u16,volatile short * m,unsigned long val)41 ____xchg(_u16, volatile short *m, unsigned long val)
42 {
43 unsigned long ret, tmp, addr64;
44
45 __asm__ __volatile__(
46 " andnot %4,7,%3\n"
47 " inswl %1,%4,%1\n"
48 "1: ldq_l %2,0(%3)\n"
49 " extwl %2,%4,%0\n"
50 " mskwl %2,%4,%2\n"
51 " or %1,%2,%2\n"
52 " stq_c %2,0(%3)\n"
53 " beq %2,2f\n"
54 ".subsection 2\n"
55 "2: br 1b\n"
56 ".previous"
57 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
58 : "r" ((long)m), "1" (val) : "memory");
59
60 return ret;
61 }
62
63 static inline unsigned long
____xchg(_u32,volatile int * m,unsigned long val)64 ____xchg(_u32, volatile int *m, unsigned long val)
65 {
66 unsigned long dummy;
67
68 __asm__ __volatile__(
69 "1: ldl_l %0,%4\n"
70 " bis $31,%3,%1\n"
71 " stl_c %1,%2\n"
72 " beq %1,2f\n"
73 ".subsection 2\n"
74 "2: br 1b\n"
75 ".previous"
76 : "=&r" (val), "=&r" (dummy), "=m" (*m)
77 : "rI" (val), "m" (*m) : "memory");
78
79 return val;
80 }
81
82 static inline unsigned long
____xchg(_u64,volatile long * m,unsigned long val)83 ____xchg(_u64, volatile long *m, unsigned long val)
84 {
85 unsigned long dummy;
86
87 __asm__ __volatile__(
88 "1: ldq_l %0,%4\n"
89 " bis $31,%3,%1\n"
90 " stq_c %1,%2\n"
91 " beq %1,2f\n"
92 ".subsection 2\n"
93 "2: br 1b\n"
94 ".previous"
95 : "=&r" (val), "=&r" (dummy), "=m" (*m)
96 : "rI" (val), "m" (*m) : "memory");
97
98 return val;
99 }
100
101 /* This function doesn't exist, so you'll get a linker error
102 if something tries to do an invalid xchg(). */
103 extern void __xchg_called_with_bad_pointer(void);
104
105 static __always_inline unsigned long
106 ____xchg(, volatile void *ptr, unsigned long x, int size)
107 {
108 switch (size) {
109 case 1:
110 return ____xchg(_u8, ptr, x);
111 case 2:
112 return ____xchg(_u16, ptr, x);
113 case 4:
114 return ____xchg(_u32, ptr, x);
115 case 8:
116 return ____xchg(_u64, ptr, x);
117 }
118 __xchg_called_with_bad_pointer();
119 return x;
120 }
121
122 /*
123 * Atomic compare and exchange. Compare OLD with MEM, if identical,
124 * store NEW in MEM. Return the initial value in MEM. Success is
125 * indicated by comparing RETURN with OLD.
126 */
127
128 static inline unsigned long
____cmpxchg(_u8,volatile char * m,unsigned char old,unsigned char new)129 ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
130 {
131 unsigned long prev, tmp, cmp, addr64;
132
133 __asm__ __volatile__(
134 " andnot %5,7,%4\n"
135 " insbl %1,%5,%1\n"
136 "1: ldq_l %2,0(%4)\n"
137 " extbl %2,%5,%0\n"
138 " cmpeq %0,%6,%3\n"
139 " beq %3,2f\n"
140 " mskbl %2,%5,%2\n"
141 " or %1,%2,%2\n"
142 " stq_c %2,0(%4)\n"
143 " beq %2,3f\n"
144 "2:\n"
145 ".subsection 2\n"
146 "3: br 1b\n"
147 ".previous"
148 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
149 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
150
151 return prev;
152 }
153
154 static inline unsigned long
____cmpxchg(_u16,volatile short * m,unsigned short old,unsigned short new)155 ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
156 {
157 unsigned long prev, tmp, cmp, addr64;
158
159 __asm__ __volatile__(
160 " andnot %5,7,%4\n"
161 " inswl %1,%5,%1\n"
162 "1: ldq_l %2,0(%4)\n"
163 " extwl %2,%5,%0\n"
164 " cmpeq %0,%6,%3\n"
165 " beq %3,2f\n"
166 " mskwl %2,%5,%2\n"
167 " or %1,%2,%2\n"
168 " stq_c %2,0(%4)\n"
169 " beq %2,3f\n"
170 "2:\n"
171 ".subsection 2\n"
172 "3: br 1b\n"
173 ".previous"
174 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
175 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
176
177 return prev;
178 }
179
180 static inline unsigned long
____cmpxchg(_u32,volatile int * m,int old,int new)181 ____cmpxchg(_u32, volatile int *m, int old, int new)
182 {
183 unsigned long prev, cmp;
184
185 __asm__ __volatile__(
186 "1: ldl_l %0,%5\n"
187 " cmpeq %0,%3,%1\n"
188 " beq %1,2f\n"
189 " mov %4,%1\n"
190 " stl_c %1,%2\n"
191 " beq %1,3f\n"
192 "2:\n"
193 ".subsection 2\n"
194 "3: br 1b\n"
195 ".previous"
196 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
197 : "r"((long) old), "r"(new), "m"(*m) : "memory");
198
199 return prev;
200 }
201
202 static inline unsigned long
____cmpxchg(_u64,volatile long * m,unsigned long old,unsigned long new)203 ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
204 {
205 unsigned long prev, cmp;
206
207 __asm__ __volatile__(
208 "1: ldq_l %0,%5\n"
209 " cmpeq %0,%3,%1\n"
210 " beq %1,2f\n"
211 " mov %4,%1\n"
212 " stq_c %1,%2\n"
213 " beq %1,3f\n"
214 "2:\n"
215 ".subsection 2\n"
216 "3: br 1b\n"
217 ".previous"
218 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
219 : "r"((long) old), "r"(new), "m"(*m) : "memory");
220
221 return prev;
222 }
223
224 /* This function doesn't exist, so you'll get a linker error
225 if something tries to do an invalid cmpxchg(). */
226 extern void __cmpxchg_called_with_bad_pointer(void);
227
228 static __always_inline unsigned long
229 ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
230 int size)
231 {
232 switch (size) {
233 case 1:
234 return ____cmpxchg(_u8, ptr, old, new);
235 case 2:
236 return ____cmpxchg(_u16, ptr, old, new);
237 case 4:
238 return ____cmpxchg(_u32, ptr, old, new);
239 case 8:
240 return ____cmpxchg(_u64, ptr, old, new);
241 }
242 __cmpxchg_called_with_bad_pointer();
243 return old;
244 }
245
246 #endif
247