| /include/linux/ |
| D | find.h | 21 extern unsigned long _find_first_bit(const unsigned long *addr, unsigned long size); 22 unsigned long __find_nth_bit(const unsigned long *addr, unsigned long size, unsigned long n); 24 unsigned long size, unsigned long n); 26 unsigned long size, unsigned long n); 28 const unsigned long *addr3, unsigned long size, 31 const unsigned long *addr2, unsigned long size); 33 const unsigned long *addr3, unsigned long size); 34 extern unsigned long _find_first_zero_bit(const unsigned long *addr, unsigned long size); 35 extern unsigned long _find_last_bit(const unsigned long *addr, unsigned long size); 38 unsigned long _find_first_zero_bit_le(const unsigned long *addr, unsigned long size); [all …]
|
| D | instrumented.h | 24 static __always_inline void instrument_read(const volatile void *v, size_t size) in instrument_read() argument 26 kasan_check_read(v, size); in instrument_read() 27 kcsan_check_read(v, size); in instrument_read() 38 static __always_inline void instrument_write(const volatile void *v, size_t size) in instrument_write() argument 40 kasan_check_write(v, size); in instrument_write() 41 kcsan_check_write(v, size); in instrument_write() 52 static __always_inline void instrument_read_write(const volatile void *v, size_t size) in instrument_read_write() argument 54 kasan_check_write(v, size); in instrument_read_write() 55 kcsan_check_read_write(v, size); in instrument_read_write() 66 static __always_inline void instrument_atomic_read(const volatile void *v, size_t size) in instrument_atomic_read() argument [all …]
|
| D | circ_buf.h | 16 #define CIRC_CNT(head,tail,size) (((head) - (tail)) & ((size)-1)) argument 21 #define CIRC_SPACE(head,tail,size) CIRC_CNT((tail),((head)+1),(size)) argument 26 #define CIRC_CNT_TO_END(head,tail,size) \ argument 27 ({int end = (size) - (tail); \ 28 int n = ((head) + end) & ((size)-1); \ 32 #define CIRC_SPACE_TO_END(head,tail,size) \ argument 33 ({int end = (size) - 1 - (head); \ 34 int n = (end + (tail)) & ((size)-1); \
|
| D | fortify-string.h | 20 # define fortify_panic(func, write, avail, size, retfail) \ argument 21 __fortify_panic(FORTIFY_REASON(func, write), avail, size) 55 void __fortify_report(const u8 reason, const size_t avail, const size_t size); 56 void __fortify_panic(const u8 reason, const size_t avail, const size_t size) __cold __noreturn; 81 extern void *__underlying_memset(void *p, int c, __kernel_size_t size) __RENAME(memset); 82 extern void *__underlying_memmove(void *p, const void *q, __kernel_size_t size) __RENAME(memmove); 83 extern void *__underlying_memcpy(void *p, const void *q, __kernel_size_t size) __RENAME(memcpy); 85 extern void *__underlying_memset(void *p, int c, __kernel_size_t size) __RENAME(__asan_memset); 86 extern void *__underlying_memmove(void *p, const void *q, __kernel_size_t size) __RENAME(__asan_mem… 87 extern void *__underlying_memcpy(void *p, const void *q, __kernel_size_t size) __RENAME(__asan_memc… [all …]
|
| D | genalloc.h | 49 unsigned long size, 102 phys_addr_t phys, size_t size, int nid) in gen_pool_add_virt() argument 104 return gen_pool_add_owner(pool, addr, phys, size, nid, NULL); in gen_pool_add_virt() 120 size_t size, int nid) in gen_pool_add() argument 122 return gen_pool_add_virt(pool, addr, -1, size, nid); in gen_pool_add() 125 unsigned long gen_pool_alloc_algo_owner(struct gen_pool *pool, size_t size, 129 size_t size, void **owner) in gen_pool_alloc_owner() argument 131 return gen_pool_alloc_algo_owner(pool, size, pool->algo, pool->data, in gen_pool_alloc_owner() 136 size_t size, genpool_algo_t algo, void *data) in gen_pool_alloc_algo() argument 138 return gen_pool_alloc_algo_owner(pool, size, algo, data, NULL); in gen_pool_alloc_algo() [all …]
|
| D | slab.h | 322 __kmem_cache_create(const char *name, unsigned int size, unsigned int align, in __kmem_cache_create() argument 330 return __kmem_cache_create_args(name, size, &kmem_args, flags); in __kmem_cache_create() 352 kmem_cache_create_usercopy(const char *name, unsigned int size, in kmem_cache_create_usercopy() argument 364 return __kmem_cache_create_args(name, size, &kmem_args, flags); in kmem_cache_create_usercopy() 369 __kmem_cache_default_args(const char *name, unsigned int size, in __kmem_cache_default_args() argument 379 return __kmem_cache_create_args(name, size, &kmem_default_args, flags); in __kmem_cache_default_args() 657 static __always_inline unsigned int __kmalloc_index(size_t size, in __kmalloc_index() argument 660 if (!size) in __kmalloc_index() 663 if (size <= KMALLOC_MIN_SIZE) in __kmalloc_index() 666 if (KMALLOC_MIN_SIZE <= 32 && size > 64 && size <= 96) in __kmalloc_index() [all …]
|
| D | dma-map-ops.h | 18 void *(*alloc)(struct device *dev, size_t size, 21 void (*free)(struct device *dev, size_t size, void *vaddr, 23 struct page *(*alloc_pages_op)(struct device *dev, size_t size, 26 void (*free_pages)(struct device *dev, size_t size, struct page *vaddr, 32 void *cpu_addr, dma_addr_t dma_addr, size_t size, 36 unsigned long offset, size_t size, 39 size_t size, enum dma_data_direction dir, 51 size_t size, enum dma_data_direction dir, 54 size_t size, enum dma_data_direction dir, 57 size_t size, enum dma_data_direction dir); [all …]
|
| D | kmemleak.h | 18 extern void kmemleak_alloc(const void *ptr, size_t size, int min_count, 20 extern void kmemleak_alloc_percpu(const void __percpu *ptr, size_t size, 22 extern void kmemleak_vmalloc(const struct vm_struct *area, size_t size, 25 extern void kmemleak_free_part(const void *ptr, size_t size) __ref; 30 extern void kmemleak_scan_area(const void *ptr, size_t size, gfp_t gfp) __ref; 32 extern void kmemleak_alloc_phys(phys_addr_t phys, size_t size, 34 extern void kmemleak_free_part_phys(phys_addr_t phys, size_t size) __ref; 37 static inline void kmemleak_alloc_recursive(const void *ptr, size_t size, in kmemleak_alloc_recursive() argument 42 kmemleak_alloc(ptr, size, min_count, gfp); in kmemleak_alloc_recursive() 61 static inline void kmemleak_alloc(const void *ptr, size_t size, int min_count, in kmemleak_alloc() argument [all …]
|
| D | kcsan-checks.h | 37 void __kcsan_check_access(const volatile void *ptr, size_t size, int type); 143 size_t size; member 173 kcsan_begin_scoped_access(const volatile void *ptr, size_t size, int type, 189 static inline void __kcsan_check_access(const volatile void *ptr, size_t size, in __kcsan_check_access() argument 209 kcsan_begin_scoped_access(const volatile void *ptr, size_t size, int type, in kcsan_begin_scoped_access() argument 229 static inline void kcsan_check_access(const volatile void *ptr, size_t size, in kcsan_check_access() argument 276 #define __kcsan_check_read(ptr, size) __kcsan_check_access(ptr, size, 0) argument 284 #define __kcsan_check_write(ptr, size) \ argument 285 __kcsan_check_access(ptr, size, KCSAN_ACCESS_WRITE) 293 #define __kcsan_check_read_write(ptr, size) \ argument [all …]
|
| D | nfs.h | 30 unsigned short size; member 40 return a->size != b->size || memcmp(a->data, b->data, a->size) != 0; in nfs_compare_fh() 45 target->size = source->size; in nfs_copy_fh() 46 memcpy(target->data, source->data, source->size); in nfs_copy_fh() 67 return ~crc32_le(0xFFFFFFFF, &fh->data[0], fh->size); in nfs_fhandle_hash()
|
| D | io-mapping.h | 25 unsigned long size; member 44 unsigned long size) in io_mapping_init_wc() argument 48 if (iomap_create_wc(base, size, &prot)) in io_mapping_init_wc() 52 iomap->size = size; in io_mapping_init_wc() 60 iomap_free(mapping->base, mapping->size); in io_mapping_fini() 70 BUG_ON(offset >= mapping->size); in io_mapping_map_atomic_wc() 96 BUG_ON(offset >= mapping->size); in io_mapping_map_local_wc() 109 unsigned long size) in io_mapping_map_wc() argument 113 BUG_ON(offset >= mapping->size); in io_mapping_map_wc() 116 return ioremap_wc(phys_addr, size); in io_mapping_map_wc() [all …]
|
| D | kasan-checks.h | 19 bool __kasan_check_read(const volatile void *p, unsigned int size); 20 bool __kasan_check_write(const volatile void *p, unsigned int size); 22 static inline bool __kasan_check_read(const volatile void *p, unsigned int size) in __kasan_check_read() argument 26 static inline bool __kasan_check_write(const volatile void *p, unsigned int size) in __kasan_check_write() argument 40 static inline bool kasan_check_read(const volatile void *p, unsigned int size) in kasan_check_read() argument 44 static inline bool kasan_check_write(const volatile void *p, unsigned int size) in kasan_check_write() argument
|
| D | posix_acl_xattr.h | 25 posix_acl_xattr_count(size_t size) in posix_acl_xattr_count() argument 27 if (size < sizeof(struct posix_acl_xattr_header)) in posix_acl_xattr_count() 29 size -= sizeof(struct posix_acl_xattr_header); in posix_acl_xattr_count() 30 if (size % sizeof(struct posix_acl_xattr_entry)) in posix_acl_xattr_count() 32 return size / sizeof(struct posix_acl_xattr_entry); in posix_acl_xattr_count() 37 const void *value, size_t size); 41 size_t size) in posix_acl_from_xattr() argument 48 const struct posix_acl *acl, void *buffer, size_t size);
|
| D | seq_buf.h | 23 size_t size; member 30 .size = SIZE, \ 36 if (s->size) in seq_buf_clear() 41 seq_buf_init(struct seq_buf *s, char *buf, unsigned int size) in seq_buf_init() argument 44 s->size = size; in seq_buf_init() 55 return s->len > s->size; in seq_buf_has_overflowed() 61 s->len = s->size + 1; in seq_buf_set_overflow() 73 return s->size - s->len; in seq_buf_buffer_left() 79 return min(s->len, s->size); in seq_buf_used() 100 if (WARN_ON(s->size == 0)) in seq_buf_str() [all …]
|
| D | kasan.h | 69 int kasan_add_zero_shadow(void *start, unsigned long size); 70 void kasan_remove_zero_shadow(void *start, unsigned long size); 80 static inline int kasan_add_zero_shadow(void *start, unsigned long size) in kasan_add_zero_shadow() argument 85 unsigned long size) in kasan_remove_zero_shadow() argument 105 void __kasan_unpoison_range(const void *addr, size_t size); 106 static __always_inline void kasan_unpoison_range(const void *addr, size_t size) in kasan_unpoison_range() argument 109 __kasan_unpoison_range(addr, size); in kasan_unpoison_range() 255 size_t size, gfp_t flags); 257 const void *object, size_t size, gfp_t flags) in kasan_kmalloc() argument 260 return __kasan_kmalloc(s, object, size, flags); in kasan_kmalloc() [all …]
|
| D | sockptr.h | 45 size_t offset, size_t size) in copy_from_sockptr_offset() argument 48 return copy_from_user(dst, src.user + offset, size); in copy_from_sockptr_offset() 49 memcpy(dst, src.kernel + offset, size); in copy_from_sockptr_offset() 57 static inline int copy_from_sockptr(void *dst, sockptr_t src, size_t size) in copy_from_sockptr() argument 59 return copy_from_sockptr_offset(dst, src, 0, size); in copy_from_sockptr() 88 size_t size = min(ksize, usize); in copy_struct_from_sockptr() local 89 size_t rest = max(ksize, usize) - size; in copy_struct_from_sockptr() 92 return copy_struct_from_user(dst, ksize, src.user, size); in copy_struct_from_sockptr() 95 memset(dst + size, 0, rest); in copy_struct_from_sockptr() 104 memcpy(dst, src.kernel, size); in copy_struct_from_sockptr() [all …]
|
| D | iommu-dma.h | 25 unsigned long offset, size_t size, enum dma_data_direction dir, 28 size_t size, enum dma_data_direction dir, unsigned long attrs); 33 void *iommu_dma_alloc(struct device *dev, size_t size, dma_addr_t *handle, 36 void *cpu_addr, dma_addr_t dma_addr, size_t size, 39 void *cpu_addr, dma_addr_t dma_addr, size_t size, 44 void iommu_dma_free(struct device *dev, size_t size, void *cpu_addr, 47 size_t size, enum dma_data_direction dir, unsigned long attrs); 49 size_t size, enum dma_data_direction dir, unsigned long attrs); 50 struct sg_table *iommu_dma_alloc_noncontiguous(struct device *dev, size_t size, 52 void iommu_dma_free_noncontiguous(struct device *dev, size_t size, [all …]
|
| D | memblock.h | 64 phys_addr_t size; member 114 int memblock_add_node(phys_addr_t base, phys_addr_t size, int nid, 116 int memblock_add(phys_addr_t base, phys_addr_t size); 117 int memblock_remove(phys_addr_t base, phys_addr_t size); 118 int memblock_phys_free(phys_addr_t base, phys_addr_t size); 119 int memblock_reserve(phys_addr_t base, phys_addr_t size); 121 int memblock_physmem_add(phys_addr_t base, phys_addr_t size); 127 phys_addr_t base, phys_addr_t size); 129 int memblock_mark_hotplug(phys_addr_t base, phys_addr_t size); 130 int memblock_clear_hotplug(phys_addr_t base, phys_addr_t size); [all …]
|
| D | dma-mapping.h | 121 size_t offset, size_t size, enum dma_data_direction dir, 123 void dma_unmap_page_attrs(struct device *dev, dma_addr_t addr, size_t size, 133 size_t size, enum dma_data_direction dir, unsigned long attrs); 134 void dma_unmap_resource(struct device *dev, dma_addr_t addr, size_t size, 136 void *dma_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, 138 void dma_free_attrs(struct device *dev, size_t size, void *cpu_addr, 140 void *dmam_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, 142 void dmam_free_coherent(struct device *dev, size_t size, void *vaddr, 145 void *cpu_addr, dma_addr_t dma_addr, size_t size, 148 void *cpu_addr, dma_addr_t dma_addr, size_t size, [all …]
|
| D | regset.h | 25 static inline int membuf_zero(struct membuf *s, size_t size) in membuf_zero() argument 28 if (size > s->left) in membuf_zero() 29 size = s->left; in membuf_zero() 30 memset(s->p, 0, size); in membuf_zero() 31 s->p += size; in membuf_zero() 32 s->left -= size; in membuf_zero() 37 static inline int membuf_write(struct membuf *s, const void *v, size_t size) in membuf_write() argument 40 if (size > s->left) in membuf_write() 41 size = s->left; in membuf_write() 42 memcpy(s->p, v, size); in membuf_write() [all …]
|
| /include/crypto/ |
| D | utils.h | 14 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size); 16 static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size) in crypto_xor() argument 19 __builtin_constant_p(size) && in crypto_xor() 20 (size % sizeof(unsigned long)) == 0) { in crypto_xor() 25 while (size > 0) { in crypto_xor() 28 size -= sizeof(unsigned long); in crypto_xor() 31 __crypto_xor(dst, dst, src, size); in crypto_xor() 36 unsigned int size) in crypto_xor_cpy() argument 39 __builtin_constant_p(size) && in crypto_xor_cpy() 40 (size % sizeof(unsigned long)) == 0) { in crypto_xor_cpy() [all …]
|
| /include/asm-generic/ |
| D | getorder.h | 29 static __always_inline __attribute_const__ int get_order(unsigned long size) in get_order() argument 31 if (__builtin_constant_p(size)) { in get_order() 32 if (!size) in get_order() 35 if (size < (1UL << PAGE_SHIFT)) in get_order() 38 return ilog2((size) - 1) - PAGE_SHIFT + 1; in get_order() 41 size--; in get_order() 42 size >>= PAGE_SHIFT; in get_order() 44 return fls(size); in get_order() 46 return fls64(size); in get_order()
|
| D | early_ioremap.h | 12 unsigned long size); 14 unsigned long size); 16 unsigned long size); 18 unsigned long size, unsigned long prot_val); 19 extern void early_iounmap(void __iomem *addr, unsigned long size); 20 extern void early_memunmap(void *addr, unsigned long size); 39 unsigned long size);
|
| /include/uapi/asm-generic/ |
| D | ioctl.h | 69 #define _IOC(dir,type,nr,size) \ argument 73 ((size) << _IOC_SIZESHIFT)) 86 #define _IOR(type,nr,size) _IOC(_IOC_READ,(type),(nr),(_IOC_TYPECHECK(size))) argument 87 #define _IOW(type,nr,size) _IOC(_IOC_WRITE,(type),(nr),(_IOC_TYPECHECK(size))) argument 88 #define _IOWR(type,nr,size) _IOC(_IOC_READ|_IOC_WRITE,(type),(nr),(_IOC_TYPECHECK(size))) argument 89 #define _IOR_BAD(type,nr,size) _IOC(_IOC_READ,(type),(nr),sizeof(size)) argument 90 #define _IOW_BAD(type,nr,size) _IOC(_IOC_WRITE,(type),(nr),sizeof(size)) argument 91 #define _IOWR_BAD(type,nr,size) _IOC(_IOC_READ|_IOC_WRITE,(type),(nr),sizeof(size)) argument
|
| /include/trace/events/ |
| D | intel-sst.h | 106 TP_PROTO(unsigned int size), 108 TP_ARGS(size), 111 __field( unsigned int, size ) 115 __entry->size = size; 118 TP_printk("Mailbox bytes 0x%8.8x", (unsigned int)__entry->size) 123 TP_PROTO(unsigned int size), 125 TP_ARGS(size) 131 TP_PROTO(unsigned int size), 133 TP_ARGS(size) 139 TP_PROTO(unsigned int size), [all …]
|