Home
last modified time | relevance | path

Searched refs:ptr (Results 1 – 25 of 415) sorted by relevance

12345678910>>...17

/arch/arm/include/asm/
Dcmpxchg.h30 __arch_xchg(unsigned long x, volatile void *ptr, int size) in __arch_xchg() argument
41 prefetchw((const void *)ptr); in __arch_xchg()
53 : "r" (x), "r" (ptr) in __arch_xchg()
63 : "r" (x), "r" (ptr) in __arch_xchg()
74 : "r" (x), "r" (ptr) in __arch_xchg()
83 ret = *(volatile unsigned char *)ptr; in __arch_xchg()
84 *(volatile unsigned char *)ptr = x; in __arch_xchg()
90 ret = *(volatile unsigned long *)ptr; in __arch_xchg()
91 *(volatile unsigned long *)ptr = x; in __arch_xchg()
99 : "r" (x), "r" (ptr) in __arch_xchg()
[all …]
/arch/parisc/include/asm/
Dcmpxchg.h25 __arch_xchg(unsigned long x, volatile void *ptr, int size) in __arch_xchg() argument
29 case 8: return __xchg64(x, (volatile unsigned long *) ptr); in __arch_xchg()
31 case 4: return __xchg32((int) x, (volatile int *) ptr); in __arch_xchg()
32 case 1: return __xchg8((char) x, (volatile char *) ptr); in __arch_xchg()
47 #define arch_xchg(ptr, x) \ argument
49 __typeof__(*(ptr)) __ret; \
50 __typeof__(*(ptr)) _x_ = (x); \
51 __ret = (__typeof__(*(ptr))) \
52 __arch_xchg((unsigned long)_x_, (ptr), sizeof(*(ptr))); \
60 extern u8 __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new_);
[all …]
Duaccess.h23 #define LDD_USER(sr, val, ptr) __get_user_asm64(sr, val, ptr) argument
24 #define STD_USER(sr, x, ptr) __put_user_asm64(sr, x, ptr) argument
26 #define LDD_USER(sr, val, ptr) __get_user_asm(sr, val, "ldd", ptr) argument
27 #define STD_USER(sr, x, ptr) __put_user_asm(sr, "std", x, ptr) argument
30 #define __get_user_internal(sr, val, ptr) \ argument
34 switch (sizeof(*(ptr))) { \
35 case 1: __get_user_asm(sr, val, "ldb", ptr); break; \
36 case 2: __get_user_asm(sr, val, "ldh", ptr); break; \
37 case 4: __get_user_asm(sr, val, "ldw", ptr); break; \
38 case 8: LDD_USER(sr, val, ptr); break; \
[all …]
/arch/x86/include/asm/
Dcmpxchg.h42 #define __xchg_op(ptr, arg, op, lock) \ argument
44 __typeof__ (*(ptr)) __ret = (arg); \
45 switch (sizeof(*(ptr))) { \
48 : "+q" (__ret), "+m" (*(ptr)) \
53 : "+r" (__ret), "+m" (*(ptr)) \
58 : "+r" (__ret), "+m" (*(ptr)) \
63 : "+r" (__ret), "+m" (*(ptr)) \
78 #define arch_xchg(ptr, v) __xchg_op((ptr), (v), xchg, "") argument
85 #define __raw_cmpxchg(ptr, old, new, size, lock) \ argument
87 __typeof__(*(ptr)) __ret; \
[all …]
/arch/xtensa/variants/test_kc705_hifi/include/variant/
Dtie-asm.h77 .macro xchal_ncp_store ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
81 xchal_sa_align \ptr, 0, 1020, 4, 4
83 s32i \at1, \ptr, .Lxchal_ofs_+0
86 xchal_sa_align \ptr, 0, 1020, 4, 4
91 xchal_sa_align \ptr, 0, 1016, 4, 4
93 s32i \at1, \ptr, .Lxchal_ofs_+0
95 s32i \at1, \ptr, .Lxchal_ofs_+4
98 xchal_sa_align \ptr, 0, 1016, 4, 4
103 xchal_sa_align \ptr, 0, 1000, 4, 4
105 s32i \at1, \ptr, .Lxchal_ofs_+0
[all …]
/arch/loongarch/include/asm/
Dcmpxchg.h25 static inline unsigned int __xchg_small(volatile void *ptr, unsigned int val, in __xchg_small() argument
41 shift = (unsigned long)ptr & 0x3; in __xchg_small()
49 ptr32 = (volatile u32 *)((unsigned long)ptr & ~0x3); in __xchg_small()
65 __arch_xchg(volatile void *ptr, unsigned long x, int size) in __arch_xchg() argument
70 return __xchg_small(ptr, x, size); in __arch_xchg()
73 return __xchg_asm("amswap_db.w", (volatile u32 *)ptr, (u32)x); in __arch_xchg()
76 return __xchg_asm("amswap_db.d", (volatile u64 *)ptr, (u64)x); in __arch_xchg()
85 #define arch_xchg(ptr, x) \ argument
87 __typeof__(*(ptr)) __res; \
89 __res = (__typeof__(*(ptr))) \
[all …]
Duaccess.h45 #define get_user(x, ptr) \ argument
47 const __typeof__(*(ptr)) __user *__p = (ptr); \
71 #define put_user(x, ptr) \ argument
73 __typeof__(*(ptr)) __user *__p = (ptr); \
100 #define __get_user(x, ptr) \ argument
104 __chk_user_ptr(ptr); \
105 __get_user_common((x), sizeof(*(ptr)), ptr); \
129 #define __put_user(x, ptr) \ argument
132 __typeof__(*(ptr)) __pu_val; \
135 __chk_user_ptr(ptr); \
[all …]
/arch/csky/include/asm/
Dcmpxchg.h11 #define __xchg_relaxed(new, ptr, size) \ argument
13 __typeof__(ptr) __ptr = (ptr); \
15 __typeof__(*(ptr)) __ret; \
22 __ptr = (__typeof__(ptr))((ulong)__ptr & ~2); \
34 __ret = (__typeof__(*(ptr))) \
54 #define arch_xchg_relaxed(ptr, x) \ argument
55 (__xchg_relaxed((x), (ptr), sizeof(*(ptr))))
57 #define __cmpxchg_relaxed(ptr, old, new, size) \ argument
59 __typeof__(ptr) __ptr = (ptr); \
63 __typeof__(*(ptr)) __ret; \
[all …]
Duaccess.h11 #define __put_user_asm_b(x, ptr, err) \ argument
24 : "=r"(err), "=r"(x), "=r"(ptr), "=r"(errcode) \
25 : "0"(err), "1"(x), "2"(ptr), "3"(-EFAULT) \
29 #define __put_user_asm_h(x, ptr, err) \ argument
42 : "=r"(err), "=r"(x), "=r"(ptr), "=r"(errcode) \
43 : "0"(err), "1"(x), "2"(ptr), "3"(-EFAULT) \
47 #define __put_user_asm_w(x, ptr, err) \ argument
60 : "=r"(err), "=r"(x), "=r"(ptr), "=r"(errcode) \
61 : "0"(err), "1"(x), "2"(ptr), "3"(-EFAULT) \
65 #define __put_user_asm_64(x, ptr, err) \ argument
[all …]
/arch/mips/include/asm/
Dcmpxchg.h67 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
71 unsigned long __arch_xchg(volatile void *ptr, unsigned long x, int size) in __arch_xchg() argument
76 return __xchg_small(ptr, x, size); in __arch_xchg()
79 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x); in __arch_xchg()
85 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x); in __arch_xchg()
92 #define arch_xchg(ptr, x) \ argument
94 __typeof__(*(ptr)) __res; \
104 __res = (__typeof__(*(ptr))) \
105 __arch_xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
148 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
[all …]
/arch/sparc/include/asm/
Dcmpxchg_64.h55 #define arch_xchg(ptr,x) \ argument
56 ({ __typeof__(*(ptr)) __ret; \
57 __ret = (__typeof__(*(ptr))) \
58 __arch_xchg((unsigned long)(x), (ptr), sizeof(*(ptr))); \
75 unsigned int *ptr = (unsigned int *) (maddr & ~2); in xchg16() local
79 load32 = *ptr; in xchg16()
84 load32 = __cmpxchg_u32(ptr, old32, new32); in xchg16()
91 __arch_xchg(unsigned long x, __volatile__ void * ptr, int size) in __arch_xchg() argument
95 return xchg16(ptr, x); in __arch_xchg()
97 return xchg32(ptr, x); in __arch_xchg()
[all …]
Dcmpxchg_32.h18 static __always_inline unsigned long __arch_xchg(unsigned long x, __volatile__ void * ptr, int size) in __arch_xchg() argument
22 return __xchg_u32(ptr, x); in __arch_xchg()
28 #define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))… argument
47 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new_, int size) in __cmpxchg() argument
50 size == 1 ? __cmpxchg_u8(ptr, old, new_) : in __cmpxchg()
51 size == 2 ? __cmpxchg_u16(ptr, old, new_) : in __cmpxchg()
52 size == 4 ? __cmpxchg_u32(ptr, old, new_) : in __cmpxchg()
56 #define arch_cmpxchg(ptr, o, n) \ argument
58 __typeof__(*(ptr)) _o_ = (o); \
59 __typeof__(*(ptr)) _n_ = (n); \
[all …]
/arch/alpha/include/asm/
Dcmpxchg.h100 ____xchg(volatile void *ptr, unsigned long x, int size) in ____xchg() argument
103 size == 1 ? ____xchg_u8(ptr, x) : in ____xchg()
104 size == 2 ? ____xchg_u16(ptr, x) : in ____xchg()
105 size == 4 ? ____xchg_u32(ptr, x) : in ____xchg()
106 size == 8 ? ____xchg_u64(ptr, x) : in ____xchg()
217 ____cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, in ____cmpxchg() argument
221 size == 1 ? ____cmpxchg_u8(ptr, old, new) : in ____cmpxchg()
222 size == 2 ? ____cmpxchg_u16(ptr, old, new) : in ____cmpxchg()
223 size == 4 ? ____cmpxchg_u32(ptr, old, new) : in ____cmpxchg()
224 size == 8 ? ____cmpxchg_u64(ptr, old, new) : in ____cmpxchg()
[all …]
/arch/arc/include/asm/
Dcmpxchg.h22 #define __cmpxchg(ptr, old, new) \ argument
24 __typeof__(*(ptr)) _prev; \
33 : "r"(ptr), /* Not "m": llock only supports reg */ \
42 #define arch_cmpxchg_relaxed(ptr, old, new) \ argument
44 __typeof__(ptr) _p_ = (ptr); \
45 __typeof__(*(ptr)) _o_ = (old); \
46 __typeof__(*(ptr)) _n_ = (new); \
47 __typeof__(*(ptr)) _prev_; \
51 …_prev_ = (__typeof__(*(ptr)))cmpxchg_emu_u8((volatile u8 *__force)_p_, (uintptr_t)_o_, (uintptr_t)…
64 #define arch_cmpxchg(ptr, old, new) \ argument
[all …]
/arch/riscv/include/asm/
Dcmpxchg.h52 #define _arch_xchg(ptr, new, sc_sfx, swap_sfx, prepend, \ argument
55 __typeof__(ptr) __ptr = (ptr); \
79 #define arch_xchg_relaxed(ptr, x) \ argument
80 _arch_xchg(ptr, x, "", "", "", "", "")
82 #define arch_xchg_acquire(ptr, x) \ argument
83 _arch_xchg(ptr, x, "", "", "", \
86 #define arch_xchg_release(ptr, x) \ argument
87 _arch_xchg(ptr, x, "", "", RISCV_RELEASE_BARRIER, "", "")
89 #define arch_xchg(ptr, x) \ argument
90 _arch_xchg(ptr, x, ".rl", ".aqrl", "", RISCV_FULL_BARRIER, "")
[all …]
/arch/xtensa/variants/test_kc705_be/include/variant/
Dtie-asm.h76 .macro xchal_ncp_store ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
80 xchal_sa_align \ptr, 0, 1020, 4, 4
82 s32i \at1, \ptr, .Lxchal_ofs_+0
85 xchal_sa_align \ptr, 0, 1020, 4, 4
90 xchal_sa_align \ptr, 0, 1016, 4, 4
92 s32i \at1, \ptr, .Lxchal_ofs_+0
94 s32i \at1, \ptr, .Lxchal_ofs_+4
97 xchal_sa_align \ptr, 0, 1016, 4, 4
102 xchal_sa_align \ptr, 0, 1000, 4, 4
104 s32i \at1, \ptr, .Lxchal_ofs_+0
[all …]
/arch/arm/mach-s3c/
Dpm-common.c27 void s3c_pm_do_save(struct sleep_save *ptr, int count) in s3c_pm_do_save() argument
29 for (; count > 0; count--, ptr++) { in s3c_pm_do_save()
30 ptr->val = readl_relaxed(ptr->reg); in s3c_pm_do_save()
31 S3C_PMDBG("saved %p value %08lx\n", ptr->reg, ptr->val); in s3c_pm_do_save()
46 void s3c_pm_do_restore(const struct sleep_save *ptr, int count) in s3c_pm_do_restore() argument
48 for (; count > 0; count--, ptr++) { in s3c_pm_do_restore()
50 ptr->reg, ptr->val, readl_relaxed(ptr->reg)); in s3c_pm_do_restore()
52 writel_relaxed(ptr->val, ptr->reg); in s3c_pm_do_restore()
69 void s3c_pm_do_restore_core(const struct sleep_save *ptr, int count) in s3c_pm_do_restore_core() argument
71 for (; count > 0; count--, ptr++) in s3c_pm_do_restore_core()
[all …]
/arch/openrisc/include/asm/
Duaccess.h45 #define get_user(x, ptr) \ argument
46 __get_user_check((x), (ptr), sizeof(*(ptr)))
47 #define put_user(x, ptr) \ argument
48 __put_user_check((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr)))
50 #define __get_user(x, ptr) \ argument
51 __get_user_nocheck((x), (ptr), sizeof(*(ptr)))
52 #define __put_user(x, ptr) \ argument
53 __put_user_nocheck((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr)))
57 #define __put_user_nocheck(x, ptr, size) \ argument
60 __put_user_size((x), (ptr), (size), __pu_err); \
[all …]
/arch/m68k/include/asm/
Dcmpxchg.h13 static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size) in __arch_xchg() argument
21 swap(*(u8 *)ptr, x); in __arch_xchg()
24 swap(*(u16 *)ptr, x); in __arch_xchg()
27 swap(*(u32 *)ptr, x); in __arch_xchg()
30 x = __invalid_xchg_size(x, ptr, size); in __arch_xchg()
38 static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size) in __arch_xchg() argument
47 : "=&d" (x) : "d" (x), "m" (*__xg(u8, ptr)) : "memory"); in __arch_xchg()
55 : "=&d" (x) : "d" (x), "m" (*__xg(u16, ptr)) : "memory"); in __arch_xchg()
63 : "=&d" (x) : "d" (x), "m" (*__xg(u32, ptr)) : "memory"); in __arch_xchg()
66 x = __invalid_xchg_size(x, ptr, size); in __arch_xchg()
[all …]
/arch/xtensa/variants/test_mmuhifi_c3/include/variant/
Dtie-asm.h36 .macro xchal_ncp_store ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL
39 xchal_sa_align \ptr, 0, 1024-4, 4, 4
41 s32i \at1, \ptr, .Lxchal_ofs_ + 0
45 xchal_sa_align \ptr, 0, 1024-4, 4, 4
47 s32i \at1, \ptr, .Lxchal_ofs_ + 0
51 xchal_sa_align \ptr, 0, 1024-4, 4, 4
53 s32i \at1, \ptr, .Lxchal_ofs_ + 0
63 .macro xchal_ncp_load ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL
66 xchal_sa_align \ptr, 0, 1024-4, 4, 4
67 l32i \at1, \ptr, .Lxchal_ofs_ + 0
[all …]
/arch/parisc/lib/
Dbitops.c21 unsigned long notrace __xchg64(unsigned long x, volatile unsigned long *ptr) in __xchg64() argument
25 _atomic_spin_lock_irqsave(ptr, flags); in __xchg64()
26 temp = *ptr; in __xchg64()
27 *ptr = x; in __xchg64()
28 _atomic_spin_unlock_irqrestore(ptr, flags); in __xchg64()
33 unsigned long notrace __xchg32(int x, volatile int *ptr) in __xchg32() argument
38 _atomic_spin_lock_irqsave(ptr, flags); in __xchg32()
39 temp = (long) *ptr; /* XXX - sign extension wanted? */ in __xchg32()
40 *ptr = x; in __xchg32()
41 _atomic_spin_unlock_irqrestore(ptr, flags); in __xchg32()
[all …]
/arch/sh/include/asm/
Duaccess.h20 #define put_user(x,ptr) __put_user_check((x), (ptr), sizeof(*(ptr))) argument
21 #define get_user(x,ptr) __get_user_check((x), (ptr), sizeof(*(ptr))) argument
28 #define __put_user(x,ptr) __put_user_nocheck((x), (ptr), sizeof(*(ptr))) argument
29 #define __get_user(x,ptr) __get_user_nocheck((x), (ptr), sizeof(*(ptr))) argument
34 #define __get_user_nocheck(x,ptr,size) \ argument
38 const __typeof__(*(ptr)) __user *__gu_addr = (ptr); \
39 __chk_user_ptr(ptr); \
41 (x) = (__force __typeof__(*(ptr)))__gu_val; \
45 #define __get_user_check(x,ptr,size) \ argument
49 const __typeof__(*(ptr)) __user *__gu_addr = (ptr); \
[all …]
Dcmpxchg.h26 #define __arch_xchg(ptr, x, size) \ argument
29 volatile void *__xchg_ptr = (ptr); \
49 #define arch_xchg(ptr,x) \ argument
50 ((__typeof__(*(ptr)))__arch_xchg((ptr),(unsigned long)(x), sizeof(*(ptr))))
56 static inline unsigned long __cmpxchg(volatile void * ptr, unsigned long old, in __cmpxchg() argument
61 return cmpxchg_emu_u8(ptr, old, new); in __cmpxchg()
63 return __cmpxchg_u32(ptr, old, new); in __cmpxchg()
69 #define arch_cmpxchg(ptr,o,n) \ argument
71 __typeof__(*(ptr)) _o_ = (o); \
72 __typeof__(*(ptr)) _n_ = (n); \
[all …]
/arch/powerpc/include/asm/
Dcmpxchg.h218 __xchg_local(void *ptr, unsigned long x, unsigned int size) in __xchg_local() argument
222 return __xchg_u8_local(ptr, x); in __xchg_local()
224 return __xchg_u16_local(ptr, x); in __xchg_local()
226 return __xchg_u32_local(ptr, x); in __xchg_local()
229 return __xchg_u64_local(ptr, x); in __xchg_local()
237 __xchg_relaxed(void *ptr, unsigned long x, unsigned int size) in __xchg_relaxed() argument
241 return __xchg_u8_relaxed(ptr, x); in __xchg_relaxed()
243 return __xchg_u16_relaxed(ptr, x); in __xchg_relaxed()
245 return __xchg_u32_relaxed(ptr, x); in __xchg_relaxed()
248 return __xchg_u64_relaxed(ptr, x); in __xchg_relaxed()
[all …]
/arch/s390/include/asm/
Datomic_ops.h63 static __always_inline op_type op_name(op_type val, op_type *ptr) \
70 : [old] "=d" (old), [ptr] "+QS" (*ptr) \
93 static __always_inline void op_name(op_type val, op_type *ptr) \
98 : [ptr] "+QS" (*ptr) : [val] "i" (val) : "cc", "memory");\
114 static __always_inline int op_name(int val, int *ptr) \
123 : [old] "=d" (old), [new] "=&d" (new), [ptr] "+Q" (*ptr)\
124 : [val] "d" (val), "0" (*ptr) : "cc", "memory"); \
140 static __always_inline long op_name(long val, long *ptr) \
149 : [old] "=d" (old), [new] "=&d" (new), [ptr] "+QS" (*ptr)\
150 : [val] "d" (val), "0" (*ptr) : "cc", "memory"); \
[all …]

12345678910>>...17