• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #ifndef _ASM_GENERIC_PERCPU_H_
2 #define _ASM_GENERIC_PERCPU_H_
3 
4 #include <linux/compiler.h>
5 #include <linux/threads.h>
6 #include <linux/percpu-defs.h>
7 
8 #ifdef CONFIG_SMP
9 
10 /*
11  * per_cpu_offset() is the offset that has to be added to a
12  * percpu variable to get to the instance for a certain processor.
13  *
14  * Most arches use the __per_cpu_offset array for those offsets but
15  * some arches have their own ways of determining the offset (x86_64, s390).
16  */
17 #ifndef __per_cpu_offset
18 extern unsigned long __per_cpu_offset[NR_CPUS];
19 
20 #define per_cpu_offset(x) (__per_cpu_offset[x])
21 #endif
22 
23 /*
24  * Determine the offset for the currently active processor.
25  * An arch may define __my_cpu_offset to provide a more effective
26  * means of obtaining the offset to the per cpu variables of the
27  * current processor.
28  */
29 #ifndef __my_cpu_offset
30 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
31 #endif
32 #ifdef CONFIG_DEBUG_PREEMPT
33 #define my_cpu_offset per_cpu_offset(smp_processor_id())
34 #else
35 #define my_cpu_offset __my_cpu_offset
36 #endif
37 
38 /*
39  * Arch may define arch_raw_cpu_ptr() to provide more efficient address
40  * translations for raw_cpu_ptr().
41  */
42 #ifndef arch_raw_cpu_ptr
43 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
44 #endif
45 
46 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
47 extern void setup_per_cpu_areas(void);
48 #endif
49 
50 #endif	/* SMP */
51 
52 #ifndef PER_CPU_BASE_SECTION
53 #ifdef CONFIG_SMP
54 #define PER_CPU_BASE_SECTION ".data..percpu"
55 #else
56 #define PER_CPU_BASE_SECTION ".data"
57 #endif
58 #endif
59 
60 #ifndef PER_CPU_ATTRIBUTES
61 #define PER_CPU_ATTRIBUTES
62 #endif
63 
64 #ifndef PER_CPU_DEF_ATTRIBUTES
65 #define PER_CPU_DEF_ATTRIBUTES
66 #endif
67 
68 #define raw_cpu_generic_to_op(pcp, val, op)				\
69 do {									\
70 	*raw_cpu_ptr(&(pcp)) op val;					\
71 } while (0)
72 
73 #define raw_cpu_generic_add_return(pcp, val)				\
74 ({									\
75 	raw_cpu_add(pcp, val);						\
76 	raw_cpu_read(pcp);						\
77 })
78 
79 #define raw_cpu_generic_xchg(pcp, nval)					\
80 ({									\
81 	typeof(pcp) __ret;						\
82 	__ret = raw_cpu_read(pcp);					\
83 	raw_cpu_write(pcp, nval);					\
84 	__ret;								\
85 })
86 
87 #define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
88 ({									\
89 	typeof(pcp) __ret;						\
90 	__ret = raw_cpu_read(pcp);					\
91 	if (__ret == (oval))						\
92 		raw_cpu_write(pcp, nval);				\
93 	__ret;								\
94 })
95 
96 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
97 ({									\
98 	int __ret = 0;							\
99 	if (raw_cpu_read(pcp1) == (oval1) &&				\
100 			 raw_cpu_read(pcp2)  == (oval2)) {		\
101 		raw_cpu_write(pcp1, nval1);				\
102 		raw_cpu_write(pcp2, nval2);				\
103 		__ret = 1;						\
104 	}								\
105 	(__ret);							\
106 })
107 
108 #define __this_cpu_generic_read_nopreempt(pcp)				\
109 ({									\
110 	typeof(pcp) __ret;						\
111 	preempt_disable();						\
112 	__ret = READ_ONCE(*raw_cpu_ptr(&(pcp)));			\
113 	preempt_enable();						\
114 	__ret;								\
115 })
116 
117 #define __this_cpu_generic_read_noirq(pcp)				\
118 ({									\
119 	typeof(pcp) __ret;						\
120 	unsigned long __flags;						\
121 	raw_local_irq_save(__flags);					\
122 	__ret = *raw_cpu_ptr(&(pcp));					\
123 	raw_local_irq_restore(__flags);					\
124 	__ret;								\
125 })
126 
127 #define this_cpu_generic_read(pcp)					\
128 ({									\
129 	typeof(pcp) __ret;						\
130 	if (__native_word(pcp))						\
131 		__ret = __this_cpu_generic_read_nopreempt(pcp);		\
132 	else								\
133 		__ret = __this_cpu_generic_read_noirq(pcp);		\
134 	__ret;								\
135 })
136 
137 #define this_cpu_generic_to_op(pcp, val, op)				\
138 do {									\
139 	unsigned long __flags;						\
140 	raw_local_irq_save(__flags);					\
141 	*raw_cpu_ptr(&(pcp)) op val;					\
142 	raw_local_irq_restore(__flags);					\
143 } while (0)
144 
145 #define this_cpu_generic_add_return(pcp, val)				\
146 ({									\
147 	typeof(pcp) __ret;						\
148 	unsigned long __flags;						\
149 	raw_local_irq_save(__flags);					\
150 	raw_cpu_add(pcp, val);						\
151 	__ret = raw_cpu_read(pcp);					\
152 	raw_local_irq_restore(__flags);					\
153 	__ret;								\
154 })
155 
156 #define this_cpu_generic_xchg(pcp, nval)				\
157 ({									\
158 	typeof(pcp) __ret;						\
159 	unsigned long __flags;						\
160 	raw_local_irq_save(__flags);					\
161 	__ret = raw_cpu_read(pcp);					\
162 	raw_cpu_write(pcp, nval);					\
163 	raw_local_irq_restore(__flags);					\
164 	__ret;								\
165 })
166 
167 #define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
168 ({									\
169 	typeof(pcp) __ret;						\
170 	unsigned long __flags;						\
171 	raw_local_irq_save(__flags);					\
172 	__ret = raw_cpu_read(pcp);					\
173 	if (__ret == (oval))						\
174 		raw_cpu_write(pcp, nval);				\
175 	raw_local_irq_restore(__flags);					\
176 	__ret;								\
177 })
178 
179 #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)	\
180 ({									\
181 	int __ret;							\
182 	unsigned long __flags;						\
183 	raw_local_irq_save(__flags);					\
184 	__ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2,		\
185 			oval1, oval2, nval1, nval2);			\
186 	raw_local_irq_restore(__flags);					\
187 	__ret;								\
188 })
189 
190 #ifndef raw_cpu_read_1
191 #define raw_cpu_read_1(pcp)		(*raw_cpu_ptr(&(pcp)))
192 #endif
193 #ifndef raw_cpu_read_2
194 #define raw_cpu_read_2(pcp)		(*raw_cpu_ptr(&(pcp)))
195 #endif
196 #ifndef raw_cpu_read_4
197 #define raw_cpu_read_4(pcp)		(*raw_cpu_ptr(&(pcp)))
198 #endif
199 #ifndef raw_cpu_read_8
200 #define raw_cpu_read_8(pcp)		(*raw_cpu_ptr(&(pcp)))
201 #endif
202 
203 #ifndef raw_cpu_write_1
204 #define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
205 #endif
206 #ifndef raw_cpu_write_2
207 #define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
208 #endif
209 #ifndef raw_cpu_write_4
210 #define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
211 #endif
212 #ifndef raw_cpu_write_8
213 #define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
214 #endif
215 
216 #ifndef raw_cpu_add_1
217 #define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
218 #endif
219 #ifndef raw_cpu_add_2
220 #define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
221 #endif
222 #ifndef raw_cpu_add_4
223 #define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
224 #endif
225 #ifndef raw_cpu_add_8
226 #define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
227 #endif
228 
229 #ifndef raw_cpu_and_1
230 #define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
231 #endif
232 #ifndef raw_cpu_and_2
233 #define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
234 #endif
235 #ifndef raw_cpu_and_4
236 #define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
237 #endif
238 #ifndef raw_cpu_and_8
239 #define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
240 #endif
241 
242 #ifndef raw_cpu_or_1
243 #define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
244 #endif
245 #ifndef raw_cpu_or_2
246 #define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
247 #endif
248 #ifndef raw_cpu_or_4
249 #define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
250 #endif
251 #ifndef raw_cpu_or_8
252 #define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
253 #endif
254 
255 #ifndef raw_cpu_add_return_1
256 #define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
257 #endif
258 #ifndef raw_cpu_add_return_2
259 #define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
260 #endif
261 #ifndef raw_cpu_add_return_4
262 #define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
263 #endif
264 #ifndef raw_cpu_add_return_8
265 #define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
266 #endif
267 
268 #ifndef raw_cpu_xchg_1
269 #define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
270 #endif
271 #ifndef raw_cpu_xchg_2
272 #define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
273 #endif
274 #ifndef raw_cpu_xchg_4
275 #define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
276 #endif
277 #ifndef raw_cpu_xchg_8
278 #define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
279 #endif
280 
281 #ifndef raw_cpu_cmpxchg_1
282 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
283 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
284 #endif
285 #ifndef raw_cpu_cmpxchg_2
286 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
287 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
288 #endif
289 #ifndef raw_cpu_cmpxchg_4
290 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
291 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
292 #endif
293 #ifndef raw_cpu_cmpxchg_8
294 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
295 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
296 #endif
297 
298 #ifndef raw_cpu_cmpxchg_double_1
299 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
300 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
301 #endif
302 #ifndef raw_cpu_cmpxchg_double_2
303 #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
304 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
305 #endif
306 #ifndef raw_cpu_cmpxchg_double_4
307 #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
308 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
309 #endif
310 #ifndef raw_cpu_cmpxchg_double_8
311 #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
312 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
313 #endif
314 
315 #ifndef this_cpu_read_1
316 #define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
317 #endif
318 #ifndef this_cpu_read_2
319 #define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
320 #endif
321 #ifndef this_cpu_read_4
322 #define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
323 #endif
324 #ifndef this_cpu_read_8
325 #define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
326 #endif
327 
328 #ifndef this_cpu_write_1
329 #define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
330 #endif
331 #ifndef this_cpu_write_2
332 #define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
333 #endif
334 #ifndef this_cpu_write_4
335 #define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
336 #endif
337 #ifndef this_cpu_write_8
338 #define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
339 #endif
340 
341 #ifndef this_cpu_add_1
342 #define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
343 #endif
344 #ifndef this_cpu_add_2
345 #define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
346 #endif
347 #ifndef this_cpu_add_4
348 #define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
349 #endif
350 #ifndef this_cpu_add_8
351 #define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
352 #endif
353 
354 #ifndef this_cpu_and_1
355 #define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
356 #endif
357 #ifndef this_cpu_and_2
358 #define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
359 #endif
360 #ifndef this_cpu_and_4
361 #define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
362 #endif
363 #ifndef this_cpu_and_8
364 #define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
365 #endif
366 
367 #ifndef this_cpu_or_1
368 #define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
369 #endif
370 #ifndef this_cpu_or_2
371 #define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
372 #endif
373 #ifndef this_cpu_or_4
374 #define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
375 #endif
376 #ifndef this_cpu_or_8
377 #define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
378 #endif
379 
380 #ifndef this_cpu_add_return_1
381 #define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
382 #endif
383 #ifndef this_cpu_add_return_2
384 #define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
385 #endif
386 #ifndef this_cpu_add_return_4
387 #define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
388 #endif
389 #ifndef this_cpu_add_return_8
390 #define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
391 #endif
392 
393 #ifndef this_cpu_xchg_1
394 #define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
395 #endif
396 #ifndef this_cpu_xchg_2
397 #define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
398 #endif
399 #ifndef this_cpu_xchg_4
400 #define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
401 #endif
402 #ifndef this_cpu_xchg_8
403 #define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
404 #endif
405 
406 #ifndef this_cpu_cmpxchg_1
407 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
408 	this_cpu_generic_cmpxchg(pcp, oval, nval)
409 #endif
410 #ifndef this_cpu_cmpxchg_2
411 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
412 	this_cpu_generic_cmpxchg(pcp, oval, nval)
413 #endif
414 #ifndef this_cpu_cmpxchg_4
415 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
416 	this_cpu_generic_cmpxchg(pcp, oval, nval)
417 #endif
418 #ifndef this_cpu_cmpxchg_8
419 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
420 	this_cpu_generic_cmpxchg(pcp, oval, nval)
421 #endif
422 
423 #ifndef this_cpu_cmpxchg_double_1
424 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
425 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
426 #endif
427 #ifndef this_cpu_cmpxchg_double_2
428 #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
429 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
430 #endif
431 #ifndef this_cpu_cmpxchg_double_4
432 #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
433 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
434 #endif
435 #ifndef this_cpu_cmpxchg_double_8
436 #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
437 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
438 #endif
439 
440 #endif /* _ASM_GENERIC_PERCPU_H_ */
441