1 /*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7 */
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10
11 #include <linux/bug.h>
12 #include <linux/irqflags.h>
13 #include <asm/compiler.h>
14 #include <asm/war.h>
15
16 /*
17 * Using a branch-likely instruction to check the result of an sc instruction
18 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
19 * cause ll-sc sequences to execute non-atomically.
20 */
21 #if R10000_LLSC_WAR
22 # define __scbeqz "beqzl"
23 #else
24 # define __scbeqz "beqz"
25 #endif
26
27 /*
28 * These functions doesn't exist, so if they are called you'll either:
29 *
30 * - Get an error at compile-time due to __compiletime_error, if supported by
31 * your compiler.
32 *
33 * or:
34 *
35 * - Get an error at link-time due to the call to the missing function.
36 */
37 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
38 __compiletime_error("Bad argument size for cmpxchg");
39 extern unsigned long __xchg_called_with_bad_pointer(void)
40 __compiletime_error("Bad argument size for xchg");
41
42 #define __xchg_asm(ld, st, m, val) \
43 ({ \
44 __typeof(*(m)) __ret; \
45 \
46 if (kernel_uses_llsc) { \
47 __asm__ __volatile__( \
48 " .set push \n" \
49 " .set noat \n" \
50 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
51 "1: " ld " %0, %2 # __xchg_asm \n" \
52 " .set mips0 \n" \
53 " move $1, %z3 \n" \
54 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
55 " " st " $1, %1 \n" \
56 "\t" __scbeqz " $1, 1b \n" \
57 " .set pop \n" \
58 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
59 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
60 : "memory"); \
61 } else { \
62 unsigned long __flags; \
63 \
64 raw_local_irq_save(__flags); \
65 __ret = *m; \
66 *m = val; \
67 raw_local_irq_restore(__flags); \
68 } \
69 \
70 __ret; \
71 })
72
73 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
74 unsigned int size);
75
76 static __always_inline
__xchg(volatile void * ptr,unsigned long x,int size)77 unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
78 {
79 switch (size) {
80 case 1:
81 case 2:
82 return __xchg_small(ptr, x, size);
83
84 case 4:
85 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
86
87 case 8:
88 if (!IS_ENABLED(CONFIG_64BIT))
89 return __xchg_called_with_bad_pointer();
90
91 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
92
93 default:
94 return __xchg_called_with_bad_pointer();
95 }
96 }
97
98 #define xchg(ptr, x) \
99 ({ \
100 __typeof__(*(ptr)) __res; \
101 \
102 smp_mb__before_llsc(); \
103 \
104 __res = (__typeof__(*(ptr))) \
105 __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
106 \
107 smp_llsc_mb(); \
108 \
109 __res; \
110 })
111
112 #define __cmpxchg_asm(ld, st, m, old, new) \
113 ({ \
114 __typeof(*(m)) __ret; \
115 \
116 if (kernel_uses_llsc) { \
117 __asm__ __volatile__( \
118 " .set push \n" \
119 " .set noat \n" \
120 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
121 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
122 " bne %0, %z3, 2f \n" \
123 " .set mips0 \n" \
124 " move $1, %z4 \n" \
125 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
126 " " st " $1, %1 \n" \
127 "\t" __scbeqz " $1, 1b \n" \
128 " .set pop \n" \
129 "2: \n" \
130 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
131 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
132 : "memory"); \
133 } else { \
134 unsigned long __flags; \
135 \
136 raw_local_irq_save(__flags); \
137 __ret = *m; \
138 if (__ret == old) \
139 *m = new; \
140 raw_local_irq_restore(__flags); \
141 } \
142 \
143 __ret; \
144 })
145
146 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
147 unsigned long new, unsigned int size);
148
149 static __always_inline
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,unsigned int size)150 unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
151 unsigned long new, unsigned int size)
152 {
153 switch (size) {
154 case 1:
155 case 2:
156 return __cmpxchg_small(ptr, old, new, size);
157
158 case 4:
159 return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
160 (u32)old, new);
161
162 case 8:
163 /* lld/scd are only available for MIPS64 */
164 if (!IS_ENABLED(CONFIG_64BIT))
165 return __cmpxchg_called_with_bad_pointer();
166
167 return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
168 (u64)old, new);
169
170 default:
171 return __cmpxchg_called_with_bad_pointer();
172 }
173 }
174
175 #define cmpxchg_local(ptr, old, new) \
176 ((__typeof__(*(ptr))) \
177 __cmpxchg((ptr), \
178 (unsigned long)(__typeof__(*(ptr)))(old), \
179 (unsigned long)(__typeof__(*(ptr)))(new), \
180 sizeof(*(ptr))))
181
182 #define cmpxchg(ptr, old, new) \
183 ({ \
184 __typeof__(*(ptr)) __res; \
185 \
186 smp_mb__before_llsc(); \
187 __res = cmpxchg_local((ptr), (old), (new)); \
188 smp_llsc_mb(); \
189 \
190 __res; \
191 })
192
193 #ifdef CONFIG_64BIT
194 #define cmpxchg64_local(ptr, o, n) \
195 ({ \
196 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
197 cmpxchg_local((ptr), (o), (n)); \
198 })
199
200 #define cmpxchg64(ptr, o, n) \
201 ({ \
202 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
203 cmpxchg((ptr), (o), (n)); \
204 })
205 #else
206 #include <asm-generic/cmpxchg-local.h>
207 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
208 #ifndef CONFIG_SMP
209 #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
210 #endif
211 #endif
212
213 #undef __scbeqz
214
215 #endif /* __ASM_CMPXCHG_H */
216