/include/linux/unaligned/ |
D | generic.h | 10 #define __get_unaligned_le(ptr) ((__force typeof(*(ptr)))({ \ argument 11 __builtin_choose_expr(sizeof(*(ptr)) == 1, *(ptr), \ 12 __builtin_choose_expr(sizeof(*(ptr)) == 2, get_unaligned_le16((ptr)), \ 13 __builtin_choose_expr(sizeof(*(ptr)) == 4, get_unaligned_le32((ptr)), \ 14 __builtin_choose_expr(sizeof(*(ptr)) == 8, get_unaligned_le64((ptr)), \ 18 #define __get_unaligned_be(ptr) ((__force typeof(*(ptr)))({ \ argument 19 __builtin_choose_expr(sizeof(*(ptr)) == 1, *(ptr), \ 20 __builtin_choose_expr(sizeof(*(ptr)) == 2, get_unaligned_be16((ptr)), \ 21 __builtin_choose_expr(sizeof(*(ptr)) == 4, get_unaligned_be32((ptr)), \ 22 __builtin_choose_expr(sizeof(*(ptr)) == 8, get_unaligned_be64((ptr)), \ [all …]
|
D | packed_struct.h | 12 const struct __una_u16 *ptr = (const struct __una_u16 *)p; in __get_unaligned_cpu16() local 13 return ptr->x; in __get_unaligned_cpu16() 18 const struct __una_u32 *ptr = (const struct __una_u32 *)p; in __get_unaligned_cpu32() local 19 return ptr->x; in __get_unaligned_cpu32() 24 const struct __una_u64 *ptr = (const struct __una_u64 *)p; in __get_unaligned_cpu64() local 25 return ptr->x; in __get_unaligned_cpu64() 30 struct __una_u16 *ptr = (struct __una_u16 *)p; in __put_unaligned_cpu16() local 31 ptr->x = val; in __put_unaligned_cpu16() 36 struct __una_u32 *ptr = (struct __una_u32 *)p; in __put_unaligned_cpu32() local 37 ptr->x = val; in __put_unaligned_cpu32() [all …]
|
/include/linux/ |
D | kmemleak.h | 27 extern void kmemleak_alloc(const void *ptr, size_t size, int min_count, 29 extern void kmemleak_alloc_percpu(const void __percpu *ptr, size_t size) __ref; 30 extern void kmemleak_free(const void *ptr) __ref; 31 extern void kmemleak_free_part(const void *ptr, size_t size) __ref; 32 extern void kmemleak_free_percpu(const void __percpu *ptr) __ref; 33 extern void kmemleak_padding(const void *ptr, unsigned long offset, 35 extern void kmemleak_not_leak(const void *ptr) __ref; 36 extern void kmemleak_ignore(const void *ptr) __ref; 37 extern void kmemleak_scan_area(const void *ptr, size_t size, gfp_t gfp) __ref; 38 extern void kmemleak_no_scan(const void *ptr) __ref; [all …]
|
D | err.h | 27 static inline long __must_check PTR_ERR(const void *ptr) in PTR_ERR() argument 29 return (long) ptr; in PTR_ERR() 32 static inline long __must_check IS_ERR(const void *ptr) in IS_ERR() argument 34 return IS_ERR_VALUE((unsigned long)ptr); in IS_ERR() 37 static inline long __must_check IS_ERR_OR_NULL(const void *ptr) in IS_ERR_OR_NULL() argument 39 return !ptr || IS_ERR_VALUE((unsigned long)ptr); in IS_ERR_OR_NULL() 49 static inline void * __must_check ERR_CAST(const void *ptr) in ERR_CAST() argument 52 return (void *) ptr; in ERR_CAST() 55 static inline int __must_check PTR_RET(const void *ptr) in PTR_RET() argument 57 if (IS_ERR(ptr)) in PTR_RET() [all …]
|
D | prio_tree.h | 58 #define __INIT_PRIO_TREE_ROOT(ptr, _raw) \ argument 60 (ptr)->prio_tree_node = NULL; \ 61 (ptr)->index_bits = 1; \ 62 (ptr)->raw = (_raw); \ 65 #define INIT_PRIO_TREE_ROOT(ptr) __INIT_PRIO_TREE_ROOT(ptr, 0) argument 66 #define INIT_RAW_PRIO_TREE_ROOT(ptr) __INIT_PRIO_TREE_ROOT(ptr, 1) argument 68 #define INIT_PRIO_TREE_NODE(ptr) \ argument 70 (ptr)->left = (ptr)->right = (ptr)->parent = (ptr); \ 73 #define INIT_PRIO_TREE_ITER(ptr) \ argument 75 (ptr)->cur = NULL; \ [all …]
|
D | list_nulls.h | 24 #define INIT_HLIST_NULLS_HEAD(ptr, nulls) \ argument 25 ((ptr)->first = (struct hlist_nulls_node *) (1UL | (((long)nulls) << 1))) 27 #define hlist_nulls_entry(ptr, type, member) container_of(ptr,type,member) argument 33 static inline int is_a_nulls(const struct hlist_nulls_node *ptr) in is_a_nulls() argument 35 return ((unsigned long)ptr & 1); in is_a_nulls() 44 static inline unsigned long get_nulls_value(const struct hlist_nulls_node *ptr) in get_nulls_value() argument 46 return ((unsigned long)ptr) >> 1; in get_nulls_value()
|
D | i2c-pxa.h | 12 void (*event)(void *ptr, i2c_slave_event_t event); 13 int (*read) (void *ptr); 14 void (*write)(void *ptr, unsigned int val);
|
D | compiler-intel.h | 21 #define RELOC_HIDE(ptr, off) \ argument 23 __ptr = (unsigned long) (ptr); \ 24 (typeof(ptr)) (__ptr + (off)); })
|
D | adfs_fs.h | 49 static inline int adfs_checkbblk(unsigned char *ptr) in adfs_checkbblk() argument 52 unsigned char *p = ptr + 511; in adfs_checkbblk() 57 } while (p != ptr); in adfs_checkbblk() 59 return (result & 0xff) != ptr[511]; in adfs_checkbblk()
|
D | kmemcheck.h | 70 #define kmemcheck_annotate_bitfield(ptr, name) \ argument 74 if (!ptr) \ 77 _n = (long) &((ptr)->name##_end) \ 78 - (long) &((ptr)->name##_begin); \ 81 kmemcheck_mark_initialized(&((ptr)->name##_begin), _n); \ 161 #define kmemcheck_annotate_bitfield(ptr, name) \ argument
|
D | async.h | 19 extern async_cookie_t async_schedule(async_func_ptr *ptr, void *data); 20 extern async_cookie_t async_schedule_domain(async_func_ptr *ptr, void *data,
|
D | rculist.h | 225 #define list_entry_rcu(ptr, type, member) \ argument 226 ({typeof (*ptr) __rcu *__ptr = (typeof (*ptr) __rcu __force *)ptr; \ 227 container_of((typeof(ptr))rcu_dereference_raw(__ptr), type, member); \ 241 #define list_first_entry_rcu(ptr, type, member) \ argument 242 list_entry_rcu((ptr)->next, type, member) 255 #define list_first_or_null_rcu(ptr, type, member) \ argument 256 ({struct list_head *__ptr = (ptr); \
|
/include/asm-generic/ |
D | cmpxchg.h | 25 unsigned long __xchg(unsigned long x, volatile void *ptr, int size) in __xchg() argument 32 return __xchg_u8(x, ptr); in __xchg() 35 ret = *(volatile u8 *)ptr; in __xchg() 36 *(volatile u8 *)ptr = x; in __xchg() 43 return __xchg_u16(x, ptr); in __xchg() 46 ret = *(volatile u16 *)ptr; in __xchg() 47 *(volatile u16 *)ptr = x; in __xchg() 54 return __xchg_u32(x, ptr); in __xchg() 57 ret = *(volatile u32 *)ptr; in __xchg() 58 *(volatile u32 *)ptr = x; in __xchg() [all …]
|
D | cmpxchg-local.h | 7 extern unsigned long wrong_size_cmpxchg(volatile void *ptr); 13 static inline unsigned long __cmpxchg_local_generic(volatile void *ptr, in __cmpxchg_local_generic() argument 22 wrong_size_cmpxchg(ptr); in __cmpxchg_local_generic() 26 case 1: prev = *(u8 *)ptr; in __cmpxchg_local_generic() 28 *(u8 *)ptr = (u8)new; in __cmpxchg_local_generic() 30 case 2: prev = *(u16 *)ptr; in __cmpxchg_local_generic() 32 *(u16 *)ptr = (u16)new; in __cmpxchg_local_generic() 34 case 4: prev = *(u32 *)ptr; in __cmpxchg_local_generic() 36 *(u32 *)ptr = (u32)new; in __cmpxchg_local_generic() 38 case 8: prev = *(u64 *)ptr; in __cmpxchg_local_generic() [all …]
|
D | uaccess-unaligned.h | 8 #define __get_user_unaligned(x, ptr) \ argument 10 __typeof__ (*(ptr)) __x; \ 11 __copy_from_user(&__x, (ptr), sizeof(*(ptr))) ? -EFAULT : 0; \ 20 #define __put_user_unaligned(x, ptr) \ argument 22 __typeof__ (*(ptr)) __x = (x); \ 23 __copy_to_user((ptr), &__x, sizeof(*(ptr))) ? -EFAULT : 0; \
|
D | uaccess.h | 143 #define __put_user(x, ptr) \ argument 145 __typeof__(*(ptr)) __x = (x); \ 147 __chk_user_ptr(ptr); \ 148 switch (sizeof (*(ptr))) { \ 153 __pu_err = __put_user_fn(sizeof (*(ptr)), \ 154 ptr, &__x); \ 163 #define put_user(x, ptr) \ argument 166 access_ok(VERIFY_WRITE, ptr, sizeof(*ptr)) ? \ 167 __put_user(x, ptr) : \ 171 static inline int __put_user_fn(size_t size, void __user *ptr, void *x) in __put_user_fn() argument [all …]
|
D | percpu.h | 60 #define __this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset) argument 63 #define this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, my_cpu_offset) argument 65 #define this_cpu_ptr(ptr) __this_cpu_ptr(ptr) argument 85 #define this_cpu_ptr(ptr) per_cpu_ptr(ptr, 0) argument 86 #define __this_cpu_ptr(ptr) this_cpu_ptr(ptr) argument
|
D | atomic.h | 136 #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) argument 139 #define cmpxchg_local(ptr, o, n) \ argument 140 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ 141 (unsigned long)(n), sizeof(*(ptr)))) 143 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) argument
|
/include/net/netns/ |
D | generic.h | 31 void *ptr[0]; member 37 void *ptr; in net_generic() local 42 ptr = ng->ptr[id - 1]; in net_generic() 45 BUG_ON(!ptr); in net_generic() 46 return ptr; in net_generic()
|
/include/trace/events/ |
D | kmem.h | 14 const void *ptr, 19 TP_ARGS(call_site, ptr, bytes_req, bytes_alloc, gfp_flags), 23 __field( const void *, ptr ) 31 __entry->ptr = ptr; 39 __entry->ptr, 47 TP_PROTO(unsigned long call_site, const void *ptr, 50 TP_ARGS(call_site, ptr, bytes_req, bytes_alloc, gfp_flags) 55 TP_PROTO(unsigned long call_site, const void *ptr, 58 TP_ARGS(call_site, ptr, bytes_req, bytes_alloc, gfp_flags) 64 const void *ptr, [all …]
|
/include/linux/mlx4/ |
D | doorbell.h | 52 #define MLX4_INIT_DOORBELL_LOCK(ptr) do { } while (0) argument 53 #define MLX4_GET_DOORBELL_LOCK(ptr) (NULL) argument 70 #define MLX4_INIT_DOORBELL_LOCK(ptr) spin_lock_init(ptr) argument 71 #define MLX4_GET_DOORBELL_LOCK(ptr) (ptr) argument
|
/include/xen/ |
D | xencomm.h | 38 extern struct xencomm_handle *xencomm_map(void *ptr, unsigned long bytes); 39 extern struct xencomm_handle *__xencomm_map_no_alloc(void *ptr, 63 #define xencomm_map_no_alloc(ptr, bytes) \ argument 65 __xencomm_map_no_alloc(ptr, bytes, xc_desc); }) 70 static inline void *xencomm_pa(void *ptr) in xencomm_pa() argument 72 return (void *)xencomm_vtop((unsigned long)ptr); in xencomm_pa()
|
/include/drm/ |
D | drm_mem_util.h | 57 static __inline void drm_free_large(void *ptr) in drm_free_large() argument 59 if (!is_vmalloc_addr(ptr)) in drm_free_large() 60 return kfree(ptr); in drm_free_large() 62 vfree(ptr); in drm_free_large()
|
/include/net/ |
D | snmp.h | 171 __typeof__(*mib[0]) *ptr = __this_cpu_ptr((mib)[0]); \ 172 u64_stats_update_begin(&ptr->syncp); \ 173 ptr->mibs[field] += addend; \ 174 u64_stats_update_end(&ptr->syncp); \ 192 __typeof__(*mib[0]) *ptr; \ 193 ptr = __this_cpu_ptr((mib)[0]); \ 194 u64_stats_update_begin(&ptr->syncp); \ 195 ptr->mibs[basefield##PKTS]++; \ 196 ptr->mibs[basefield##OCTETS] += addend; \ 197 u64_stats_update_end(&ptr->syncp); \
|
/include/linux/mmc/ |
D | ioctl.h | 46 #define mmc_ioc_cmd_set_data(ic, ptr) ic.data_ptr = (__u64)(unsigned long) ptr argument
|