/include/asm-generic/ |
D | cmpxchg.h | 24 unsigned long __generic_xchg(unsigned long x, volatile void *ptr, int size) in __generic_xchg() argument 31 return __xchg_u8(x, ptr); in __generic_xchg() 34 ret = *(volatile u8 *)ptr; in __generic_xchg() 35 *(volatile u8 *)ptr = (x & 0xffu); in __generic_xchg() 42 return __xchg_u16(x, ptr); in __generic_xchg() 45 ret = *(volatile u16 *)ptr; in __generic_xchg() 46 *(volatile u16 *)ptr = (x & 0xffffu); in __generic_xchg() 53 return __xchg_u32(x, ptr); in __generic_xchg() 56 ret = *(volatile u32 *)ptr; in __generic_xchg() 57 *(volatile u32 *)ptr = (x & 0xffffffffu); in __generic_xchg() [all …]
|
D | cmpxchg-local.h | 8 extern unsigned long wrong_size_cmpxchg(volatile void *ptr) 15 static inline unsigned long __generic_cmpxchg_local(volatile void *ptr, in __generic_cmpxchg_local() argument 24 wrong_size_cmpxchg(ptr); in __generic_cmpxchg_local() 28 case 1: prev = *(u8 *)ptr; in __generic_cmpxchg_local() 30 *(u8 *)ptr = (new & 0xffu); in __generic_cmpxchg_local() 32 case 2: prev = *(u16 *)ptr; in __generic_cmpxchg_local() 34 *(u16 *)ptr = (new & 0xffffu); in __generic_cmpxchg_local() 36 case 4: prev = *(u32 *)ptr; in __generic_cmpxchg_local() 38 *(u32 *)ptr = (new & 0xffffffffu); in __generic_cmpxchg_local() 40 case 8: prev = *(u64 *)ptr; in __generic_cmpxchg_local() [all …]
|
D | uaccess.h | 104 #define __put_user(x, ptr) \ argument 106 __typeof__(*(ptr)) __x = (x); \ 108 __chk_user_ptr(ptr); \ 109 switch (sizeof (*(ptr))) { \ 114 __pu_err = __put_user_fn(sizeof (*(ptr)), \ 115 ptr, &__x); \ 124 #define put_user(x, ptr) \ argument 126 void __user *__p = (ptr); \ 128 access_ok(__p, sizeof(*ptr)) ? \ 129 __put_user((x), ((__typeof__(*(ptr)) __user *)__p)) : \ [all …]
|
D | unaligned.h | 12 #define __get_unaligned_t(type, ptr) ({ \ argument 13 const struct { type x; } __packed *__pptr = (typeof(__pptr))(ptr); \ 17 #define __put_unaligned_t(type, val, ptr) do { \ argument 18 struct { type x; } __packed *__pptr = (typeof(__pptr))(ptr); \ 22 #define get_unaligned(ptr) __get_unaligned_t(typeof(*(ptr)), (ptr)) argument 23 #define put_unaligned(val, ptr) __put_unaligned_t(typeof(*(ptr)), (val), (ptr)) argument
|
/include/linux/ |
D | kmemleak.h | 18 extern void kmemleak_alloc(const void *ptr, size_t size, int min_count, 20 extern void kmemleak_alloc_percpu(const void __percpu *ptr, size_t size, 24 extern void kmemleak_free(const void *ptr) __ref; 25 extern void kmemleak_free_part(const void *ptr, size_t size) __ref; 26 extern void kmemleak_free_percpu(const void __percpu *ptr) __ref; 27 extern void kmemleak_update_trace(const void *ptr) __ref; 28 extern void kmemleak_not_leak(const void *ptr) __ref; 29 extern void kmemleak_ignore(const void *ptr) __ref; 30 extern void kmemleak_scan_area(const void *ptr, size_t size, gfp_t gfp) __ref; 31 extern void kmemleak_no_scan(const void *ptr) __ref; [all …]
|
D | err.h | 49 static inline long __must_check PTR_ERR(__force const void *ptr) in PTR_ERR() argument 51 return (long) ptr; in PTR_ERR() 59 static inline bool __must_check IS_ERR(__force const void *ptr) in IS_ERR() argument 61 return IS_ERR_VALUE((unsigned long)ptr); in IS_ERR() 70 static inline bool __must_check IS_ERR_OR_NULL(__force const void *ptr) in IS_ERR_OR_NULL() argument 72 return unlikely(!ptr) || IS_ERR_VALUE((unsigned long)ptr); in IS_ERR_OR_NULL() 82 static inline void * __must_check ERR_CAST(__force const void *ptr) in ERR_CAST() argument 85 return (void *) ptr; in ERR_CAST() 105 static inline int __must_check PTR_ERR_OR_ZERO(__force const void *ptr) in PTR_ERR_OR_ZERO() argument 107 if (IS_ERR(ptr)) in PTR_ERR_OR_ZERO() [all …]
|
D | container_of.h | 18 #define container_of(ptr, type, member) ({ \ argument 19 void *__mptr = (void *)(ptr); \ 20 static_assert(__same_type(*(ptr), ((type *)0)->member) || \ 21 __same_type(*(ptr), void), \ 32 #define container_of_const(ptr, type, member) \ argument 33 _Generic(ptr, \ 34 const typeof(*(ptr)) *: ((const type *)container_of(ptr, type, member)),\ 35 default: ((type *)container_of(ptr, type, member)) \
|
D | kcsan-checks.h | 37 void __kcsan_check_access(const volatile void *ptr, size_t size, int type); 142 const volatile void *ptr; member 173 kcsan_begin_scoped_access(const volatile void *ptr, size_t size, int type, 189 static inline void __kcsan_check_access(const volatile void *ptr, size_t size, in __kcsan_check_access() argument 209 kcsan_begin_scoped_access(const volatile void *ptr, size_t size, int type, in kcsan_begin_scoped_access() argument 229 static inline void kcsan_check_access(const volatile void *ptr, size_t size, in kcsan_check_access() argument 276 #define __kcsan_check_read(ptr, size) __kcsan_check_access(ptr, size, 0) argument 284 #define __kcsan_check_write(ptr, size) \ argument 285 __kcsan_check_access(ptr, size, KCSAN_ACCESS_WRITE) 293 #define __kcsan_check_read_write(ptr, size) \ argument [all …]
|
D | if_tun.h | 19 void *ptr; member 30 static inline bool tun_is_xdp_frame(void *ptr) in tun_is_xdp_frame() argument 32 return (unsigned long)ptr & TUN_XDP_FLAG; in tun_is_xdp_frame() 38 static inline struct xdp_frame *tun_ptr_to_xdp(void *ptr) in tun_ptr_to_xdp() argument 40 return (void *)((unsigned long)ptr & ~TUN_XDP_FLAG); in tun_ptr_to_xdp() 42 void tun_ptr_free(void *ptr); 56 static inline bool tun_is_xdp_frame(void *ptr) in tun_is_xdp_frame() argument 64 static inline struct xdp_frame *tun_ptr_to_xdp(void *ptr) in tun_ptr_to_xdp() argument 68 static inline void tun_ptr_free(void *ptr) in tun_ptr_free() argument
|
D | list_nulls.h | 29 #define INIT_HLIST_NULLS_HEAD(ptr, nulls) \ argument 30 ((ptr)->first = (struct hlist_nulls_node *) NULLS_MARKER(nulls)) 32 #define hlist_nulls_entry(ptr, type, member) container_of(ptr,type,member) argument 34 #define hlist_nulls_entry_safe(ptr, type, member) \ argument 35 ({ typeof(ptr) ____ptr = (ptr); \ 43 static inline int is_a_nulls(const struct hlist_nulls_node *ptr) in is_a_nulls() argument 45 return ((unsigned long)ptr & 1); in is_a_nulls() 54 static inline unsigned long get_nulls_value(const struct hlist_nulls_node *ptr) in get_nulls_value() argument 56 return ((unsigned long)ptr) >> 1; in get_nulls_value()
|
D | adfs_fs.h | 12 static inline int adfs_checkbblk(unsigned char *ptr) in adfs_checkbblk() argument 15 unsigned char *p = ptr + 511; in adfs_checkbblk() 20 } while (p != ptr); in adfs_checkbblk() 22 return (result & 0xff) != ptr[511]; in adfs_checkbblk()
|
D | ptr_ring.h | 104 static inline int __ptr_ring_produce(struct ptr_ring *r, void *ptr) in __ptr_ring_produce() argument 113 WRITE_ONCE(r->queue[r->producer++], ptr); in __ptr_ring_produce() 124 static inline int ptr_ring_produce(struct ptr_ring *r, void *ptr) in ptr_ring_produce() argument 129 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce() 135 static inline int ptr_ring_produce_irq(struct ptr_ring *r, void *ptr) in ptr_ring_produce_irq() argument 140 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_irq() 146 static inline int ptr_ring_produce_any(struct ptr_ring *r, void *ptr) in ptr_ring_produce_any() argument 152 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_any() 158 static inline int ptr_ring_produce_bh(struct ptr_ring *r, void *ptr) in ptr_ring_produce_bh() argument 163 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_bh() [all …]
|
D | kallsyms.h | 49 static inline void *dereference_symbol_descriptor(void *ptr) in dereference_symbol_descriptor() argument 54 ptr = dereference_kernel_function_descriptor(ptr); in dereference_symbol_descriptor() 55 if (is_ksym_addr((unsigned long)ptr)) in dereference_symbol_descriptor() 56 return ptr; in dereference_symbol_descriptor() 59 mod = __module_address((unsigned long)ptr); in dereference_symbol_descriptor() 63 ptr = dereference_module_function_descriptor(mod, ptr); in dereference_symbol_descriptor() 65 return ptr; in dereference_symbol_descriptor()
|
D | bpf_mem_alloc.h | 30 void bpf_mem_free(struct bpf_mem_alloc *ma, void *ptr); 31 void bpf_mem_free_rcu(struct bpf_mem_alloc *ma, void *ptr); 35 void bpf_mem_cache_free(struct bpf_mem_alloc *ma, void *ptr); 36 void bpf_mem_cache_free_rcu(struct bpf_mem_alloc *ma, void *ptr); 37 void bpf_mem_cache_raw_free(void *ptr);
|
/include/rdma/ |
D | iba.h | 12 static inline u32 _iba_get8(const u8 *ptr) in _iba_get8() argument 14 return *ptr; in _iba_get8() 17 static inline void _iba_set8(u8 *ptr, u32 mask, u32 prep_value) in _iba_set8() argument 19 *ptr = (*ptr & ~mask) | prep_value; in _iba_set8() 22 static inline u16 _iba_get16(const __be16 *ptr) in _iba_get16() argument 24 return be16_to_cpu(*ptr); in _iba_get16() 27 static inline void _iba_set16(__be16 *ptr, u16 mask, u16 prep_value) in _iba_set16() argument 29 *ptr = cpu_to_be16((be16_to_cpu(*ptr) & ~mask) | prep_value); in _iba_set16() 32 static inline u32 _iba_get32(const __be32 *ptr) in _iba_get32() argument 34 return be32_to_cpu(*ptr); in _iba_get32() [all …]
|
/include/linux/unaligned/ |
D | packed_struct.h | 12 const struct __una_u16 *ptr = (const struct __una_u16 *)p; in __get_unaligned_cpu16() local 13 return ptr->x; in __get_unaligned_cpu16() 18 const struct __una_u32 *ptr = (const struct __una_u32 *)p; in __get_unaligned_cpu32() local 19 return ptr->x; in __get_unaligned_cpu32() 24 const struct __una_u64 *ptr = (const struct __una_u64 *)p; in __get_unaligned_cpu64() local 25 return ptr->x; in __get_unaligned_cpu64() 30 struct __una_u16 *ptr = (struct __una_u16 *)p; in __put_unaligned_cpu16() local 31 ptr->x = val; in __put_unaligned_cpu16() 36 struct __una_u32 *ptr = (struct __una_u32 *)p; in __put_unaligned_cpu32() local 37 ptr->x = val; in __put_unaligned_cpu32() [all …]
|
/include/net/netns/ |
D | generic.h | 36 DECLARE_FLEX_ARRAY(void *, ptr); 43 void *ptr; in net_generic() local 47 ptr = ng->ptr[id]; in net_generic() 50 return ptr; in net_generic()
|
/include/linux/mlx4/ |
D | doorbell.h | 52 #define MLX4_INIT_DOORBELL_LOCK(ptr) do { } while (0) argument 53 #define MLX4_GET_DOORBELL_LOCK(ptr) (NULL) argument 70 #define MLX4_INIT_DOORBELL_LOCK(ptr) spin_lock_init(ptr) argument 71 #define MLX4_GET_DOORBELL_LOCK(ptr) (ptr) argument
|
/include/net/ |
D | snmp.h | 146 __typeof__((mib->mibs) + 0) ptr = mib->mibs; \ 147 this_cpu_inc(ptr[basefield##PKTS]); \ 148 this_cpu_add(ptr[basefield##OCTETS], addend); \ 152 __typeof__((mib->mibs) + 0) ptr = mib->mibs; \ 153 __this_cpu_inc(ptr[basefield##PKTS]); \ 154 __this_cpu_add(ptr[basefield##OCTETS], addend); \ 162 __typeof__(*mib) *ptr = raw_cpu_ptr(mib); \ 163 u64_stats_update_begin(&ptr->syncp); \ 164 ptr->mibs[field] += addend; \ 165 u64_stats_update_end(&ptr->syncp); \ [all …]
|
/include/trace/events/ |
D | percpu.h | 16 void __percpu *ptr, size_t bytes_alloc, gfp_t gfp_flags), 19 ptr, bytes_alloc, gfp_flags), 29 __field( void __percpu *, ptr ) 41 __entry->ptr = ptr; 50 __entry->base_addr, __entry->off, __entry->ptr, 56 TP_PROTO(void *base_addr, int off, void __percpu *ptr), 58 TP_ARGS(base_addr, off, ptr), 63 __field( void __percpu *, ptr ) 69 __entry->ptr = ptr; 73 __entry->base_addr, __entry->off, __entry->ptr)
|
D | kmem.h | 15 const void *ptr, 20 TP_ARGS(call_site, ptr, s, gfp_flags, node), 24 __field( const void *, ptr ) 34 __entry->ptr = ptr; 46 __entry->ptr, 57 const void *ptr, 63 TP_ARGS(call_site, ptr, bytes_req, bytes_alloc, gfp_flags, node), 67 __field( const void *, ptr ) 76 __entry->ptr = ptr; 85 __entry->ptr, [all …]
|
D | tegra_apb_dma.h | 28 TP_PROTO(struct dma_chan *dc, int count, void *ptr), 29 TP_ARGS(dc, count, ptr), 33 __field(void *, ptr) 38 __entry->ptr = ptr; 41 __get_str(chan), __entry->count, __entry->ptr)
|
/include/linux/soc/marvell/octeontx2/ |
D | asm.h | 39 static inline u64 otx2_atomic64_fetch_add(u64 incr, u64 *ptr) in otx2_atomic64_fetch_add() argument 45 : [r] "=r" (result), "+m" (*ptr) in otx2_atomic64_fetch_add() 46 : [i] "r" (incr), [b] "r" (ptr) in otx2_atomic64_fetch_add() 54 #define otx2_atomic64_fetch_add(incr, ptr) ({ incr; }) argument
|
/include/drm/ |
D | drm_atomic.h | 161 struct drm_plane *ptr; member 166 struct drm_crtc *ptr; member 184 struct drm_connector *ptr; member 344 struct drm_private_obj *ptr; member 766 for_each_if ((__state)->connectors[__i].ptr && \ 767 ((connector) = (__state)->connectors[__i].ptr, \ 788 for_each_if ((__state)->connectors[__i].ptr && \ 789 ((connector) = (__state)->connectors[__i].ptr, \ 809 for_each_if ((__state)->connectors[__i].ptr && \ 810 ((connector) = (__state)->connectors[__i].ptr, \ [all …]
|
/include/linux/atomic/ |
D | atomic-instrumented.h | 4711 #define xchg(ptr, ...) \ argument 4713 typeof(ptr) __ai_ptr = (ptr); \ 4719 #define xchg_acquire(ptr, ...) \ argument 4721 typeof(ptr) __ai_ptr = (ptr); \ 4726 #define xchg_release(ptr, ...) \ argument 4728 typeof(ptr) __ai_ptr = (ptr); \ 4734 #define xchg_relaxed(ptr, ...) \ argument 4736 typeof(ptr) __ai_ptr = (ptr); \ 4741 #define cmpxchg(ptr, ...) \ argument 4743 typeof(ptr) __ai_ptr = (ptr); \ [all …]
|