/include/linux/ |
D | slub_def.h | 85 int size; /* The size of an object including meta data */ member 156 static __always_inline int kmalloc_index(size_t size) in kmalloc_index() argument 158 if (!size) in kmalloc_index() 161 if (size <= KMALLOC_MIN_SIZE) in kmalloc_index() 164 if (KMALLOC_MIN_SIZE <= 32 && size > 64 && size <= 96) in kmalloc_index() 166 if (KMALLOC_MIN_SIZE <= 64 && size > 128 && size <= 192) in kmalloc_index() 168 if (size <= 8) return 3; in kmalloc_index() 169 if (size <= 16) return 4; in kmalloc_index() 170 if (size <= 32) return 5; in kmalloc_index() 171 if (size <= 64) return 6; in kmalloc_index() [all …]
|
D | circ_buf.h | 15 #define CIRC_CNT(head,tail,size) (((head) - (tail)) & ((size)-1)) argument 20 #define CIRC_SPACE(head,tail,size) CIRC_CNT((tail),((head)+1),(size)) argument 25 #define CIRC_CNT_TO_END(head,tail,size) \ argument 26 ({int end = (size) - (tail); \ 27 int n = ((head) + end) & ((size)-1); \ 31 #define CIRC_SPACE_TO_END(head,tail,size) \ argument 32 ({int end = (size) - 1 - (head); \ 33 int n = (end + (tail)) & ((size)-1); \
|
D | vmalloc.h | 30 unsigned long size; member 54 extern void *vmalloc(unsigned long size); 55 extern void *vzalloc(unsigned long size); 56 extern void *vmalloc_user(unsigned long size); 57 extern void *vmalloc_node(unsigned long size, int node); 58 extern void *vzalloc_node(unsigned long size, int node); 59 extern void *vmalloc_exec(unsigned long size); 60 extern void *vmalloc_32(unsigned long size); 61 extern void *vmalloc_32_user(unsigned long size); 62 extern void *__vmalloc(unsigned long size, gfp_t gfp_mask, pgprot_t prot); [all …]
|
D | slab.h | 243 static inline void *kmalloc_array(size_t n, size_t size, gfp_t flags) in kmalloc_array() argument 245 if (size != 0 && n > ULONG_MAX / size) in kmalloc_array() 247 return __kmalloc(n * size, flags); in kmalloc_array() 256 static inline void *kcalloc(size_t n, size_t size, gfp_t flags) in kcalloc() argument 258 return kmalloc_array(n, size, flags | __GFP_ZERO); in kcalloc() 272 static inline void *kmalloc_node(size_t size, gfp_t flags, int node) in kmalloc_node() argument 274 return kmalloc(size, flags); in kmalloc_node() 277 static inline void *__kmalloc_node(size_t size, gfp_t flags, int node) in __kmalloc_node() argument 279 return __kmalloc(size, flags); in __kmalloc_node() 302 #define kmalloc_track_caller(size, flags) \ argument [all …]
|
D | kmemleak.h | 27 extern void kmemleak_alloc(const void *ptr, size_t size, int min_count, 29 extern void kmemleak_alloc_percpu(const void __percpu *ptr, size_t size) __ref; 31 extern void kmemleak_free_part(const void *ptr, size_t size) __ref; 34 size_t size) __ref; 37 extern void kmemleak_scan_area(const void *ptr, size_t size, gfp_t gfp) __ref; 40 static inline void kmemleak_alloc_recursive(const void *ptr, size_t size, in kmemleak_alloc_recursive() argument 45 kmemleak_alloc(ptr, size, min_count, gfp); in kmemleak_alloc_recursive() 64 static inline void kmemleak_alloc(const void *ptr, size_t size, int min_count, in kmemleak_alloc() argument 68 static inline void kmemleak_alloc_recursive(const void *ptr, size_t size, in kmemleak_alloc_recursive() argument 73 static inline void kmemleak_alloc_percpu(const void __percpu *ptr, size_t size) in kmemleak_alloc_percpu() argument [all …]
|
D | posix_acl_xattr.h | 45 posix_acl_xattr_count(size_t size) in posix_acl_xattr_count() argument 47 if (size < sizeof(posix_acl_xattr_header)) in posix_acl_xattr_count() 49 size -= sizeof(posix_acl_xattr_header); in posix_acl_xattr_count() 50 if (size % sizeof(posix_acl_xattr_entry)) in posix_acl_xattr_count() 52 return size / sizeof(posix_acl_xattr_entry); in posix_acl_xattr_count() 55 struct posix_acl *posix_acl_from_xattr(const void *value, size_t size); 56 int posix_acl_to_xattr(const struct posix_acl *acl, void *buffer, size_t size);
|
D | slab_def.h | 110 void *__kmalloc(size_t size, gfp_t flags); 113 extern void *kmem_cache_alloc_trace(size_t size, 118 kmem_cache_alloc_trace(size_t size, struct kmem_cache *cachep, gfp_t flags) in kmem_cache_alloc_trace() argument 128 static __always_inline void *kmalloc(size_t size, gfp_t flags) in kmalloc() argument 133 if (__builtin_constant_p(size)) { in kmalloc() 136 if (!size) in kmalloc() 140 if (size <= x) \ in kmalloc() 155 ret = kmem_cache_alloc_trace(size, cachep, flags); in kmalloc() 159 return __kmalloc(size, flags); in kmalloc() 163 extern void *__kmalloc_node(size_t size, gfp_t flags, int node); [all …]
|
D | slob_def.h | 12 void *__kmalloc_node(size_t size, gfp_t flags, int node); 14 static __always_inline void *kmalloc_node(size_t size, gfp_t flags, int node) in kmalloc_node() argument 16 return __kmalloc_node(size, flags, node); in kmalloc_node() 27 static __always_inline void *kmalloc(size_t size, gfp_t flags) in kmalloc() argument 29 return __kmalloc_node(size, flags, -1); in kmalloc() 32 static __always_inline void *__kmalloc(size_t size, gfp_t flags) in __kmalloc() argument 34 return kmalloc(size, flags); in __kmalloc()
|
D | bitops.h | 24 #define for_each_set_bit(bit, addr, size) \ argument 25 for ((bit) = find_first_bit((addr), (size)); \ 26 (bit) < (size); \ 27 (bit) = find_next_bit((addr), (size), (bit) + 1)) 30 #define for_each_set_bit_from(bit, addr, size) \ argument 31 for ((bit) = find_next_bit((addr), (size), (bit)); \ 32 (bit) < (size); \ 33 (bit) = find_next_bit((addr), (size), (bit) + 1)) 35 #define for_each_clear_bit(bit, addr, size) \ argument 36 for ((bit) = find_first_zero_bit((addr), (size)); \ [all …]
|
D | bootmem.h | 53 unsigned long size); 54 extern void free_bootmem(unsigned long addr, unsigned long size); 55 extern void free_bootmem_late(unsigned long addr, unsigned long size); 69 unsigned long size, 73 unsigned long size, 76 extern void *__alloc_bootmem(unsigned long size, 79 extern void *__alloc_bootmem_nopanic(unsigned long size, 83 unsigned long size, 87 unsigned long size, 91 unsigned long size, [all …]
|
D | iommu-helper.h | 6 static inline unsigned long iommu_device_max_index(unsigned long size, in iommu_device_max_index() argument 10 if (size + offset > dma_mask) in iommu_device_max_index() 13 return size; in iommu_device_max_index() 19 extern unsigned long iommu_area_alloc(unsigned long *map, unsigned long size, 29 unsigned long size = (addr & (io_page_size - 1)) + len; in iommu_num_pages() local 31 return DIV_ROUND_UP(size, io_page_size); in iommu_num_pages()
|
D | tty_flip.h | 4 extern int tty_buffer_request_room(struct tty_struct *tty, size_t size); 5 …p_string_flags(struct tty_struct *tty, const unsigned char *chars, const char *flags, size_t size); 6 …flip_string_fixed_flag(struct tty_struct *tty, const unsigned char *chars, char flag, size_t size); 7 extern int tty_prepare_flip_string(struct tty_struct *tty, unsigned char **chars, size_t size); 8 …repare_flip_string_flags(struct tty_struct *tty, unsigned char **chars, char **flags, size_t size); 15 if (tb && tb->used < tb->size) { in tty_insert_flip_char() 23 … inline int tty_insert_flip_string(struct tty_struct *tty, const unsigned char *chars, size_t size) in tty_insert_flip_string() argument 25 return tty_insert_flip_string_fixed_flag(tty, chars, TTY_NORMAL, size); in tty_insert_flip_string()
|
D | dma-debug.h | 38 size_t offset, size_t size, 43 size_t size, int direction, bool map_single); 51 extern void debug_dma_alloc_coherent(struct device *dev, size_t size, 54 extern void debug_dma_free_coherent(struct device *dev, size_t size, 58 dma_addr_t dma_handle, size_t size, 63 size_t size, int direction); 68 size_t size, 74 size_t size, int direction); 102 size_t offset, size_t size, in debug_dma_map_page() argument 109 size_t size, int direction, in debug_dma_unmap_page() argument [all …]
|
D | uid_stat.h | 22 int uid_stat_tcp_snd(uid_t uid, int size); 23 int uid_stat_tcp_rcv(uid_t uid, int size); 25 #define uid_stat_tcp_snd(uid, size) do {} while (0); argument 26 #define uid_stat_tcp_rcv(uid, size) do {} while (0); argument
|
D | memblock.h | 24 phys_addr_t size; member 50 phys_addr_t size, phys_addr_t align, int nid); 52 phys_addr_t size, phys_addr_t align); 55 int memblock_add_node(phys_addr_t base, phys_addr_t size, int nid); 56 int memblock_add(phys_addr_t base, phys_addr_t size); 57 int memblock_remove(phys_addr_t base, phys_addr_t size); 58 int memblock_free(phys_addr_t base, phys_addr_t size); 59 int memblock_reserve(phys_addr_t base, phys_addr_t size); 123 int memblock_set_node(phys_addr_t base, phys_addr_t size, int nid); 145 phys_addr_t memblock_alloc_nid(phys_addr_t size, phys_addr_t align, int nid); [all …]
|
D | dma-mapping.h | 12 void* (*alloc)(struct device *dev, size_t size, 15 void (*free)(struct device *dev, size_t size, 22 unsigned long offset, size_t size, 26 size_t size, enum dma_data_direction dir, 36 dma_addr_t dma_handle, size_t size, 39 dma_addr_t dma_handle, size_t size, 105 unsigned int size) in dma_set_max_seg_size() argument 108 dev->dma_parms->max_segment_size = size; in dma_set_max_seg_size() 129 static inline void *dma_zalloc_coherent(struct device *dev, size_t size, in dma_zalloc_coherent() argument 132 void *ret = dma_alloc_coherent(dev, size, dma_handle, flag); in dma_zalloc_coherent() [all …]
|
/include/drm/ |
D | drm_mem_util.h | 32 static __inline__ void *drm_calloc_large(size_t nmemb, size_t size) in drm_calloc_large() argument 34 if (size != 0 && nmemb > ULONG_MAX / size) in drm_calloc_large() 37 if (size * nmemb <= PAGE_SIZE) in drm_calloc_large() 38 return kcalloc(nmemb, size, GFP_KERNEL); in drm_calloc_large() 40 return __vmalloc(size * nmemb, in drm_calloc_large() 45 static __inline__ void *drm_malloc_ab(size_t nmemb, size_t size) in drm_malloc_ab() argument 47 if (size != 0 && nmemb > ULONG_MAX / size) in drm_malloc_ab() 50 if (size * nmemb <= PAGE_SIZE) in drm_malloc_ab() 51 return kmalloc(nmemb * size, GFP_KERNEL); in drm_malloc_ab() 53 return __vmalloc(size * nmemb, in drm_malloc_ab()
|
D | drm_mm.h | 54 unsigned long size; member 101 unsigned long size, 106 unsigned long size, 112 unsigned long size, in drm_mm_get_block() argument 115 return drm_mm_get_block_generic(parent, size, alignment, 0); in drm_mm_get_block() 118 unsigned long size, in drm_mm_get_block_atomic() argument 121 return drm_mm_get_block_generic(parent, size, alignment, 1); in drm_mm_get_block_atomic() 125 unsigned long size, in drm_mm_get_block_range() argument 130 return drm_mm_get_block_range_generic(parent, size, alignment, in drm_mm_get_block_range() 135 unsigned long size, in drm_mm_get_block_atomic_range() argument [all …]
|
/include/asm-generic/bitops/ |
D | find.h | 12 size, unsigned long offset); 23 long size, unsigned long offset); 36 unsigned long size); 46 unsigned long size); 49 #define find_first_bit(addr, size) find_next_bit((addr), (size), 0) argument 50 #define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0) argument
|
D | le.h | 12 unsigned long size, unsigned long offset) in find_next_zero_bit_le() argument 14 return find_next_zero_bit(addr, size, offset); in find_next_zero_bit_le() 18 unsigned long size, unsigned long offset) in find_next_bit_le() argument 20 return find_next_bit(addr, size, offset); in find_next_bit_le() 24 unsigned long size) in find_first_zero_bit_le() argument 26 return find_first_zero_bit(addr, size); in find_first_zero_bit_le() 35 unsigned long size, unsigned long offset); 40 unsigned long size, unsigned long offset); 44 #define find_first_zero_bit_le(addr, size) \ argument 45 find_next_zero_bit_le((addr), (size), 0)
|
/include/trace/events/ |
D | mmc.h | 30 TP_PROTO(unsigned int cmd, unsigned int addr, unsigned int size), 31 TP_ARGS(cmd, addr, size), 35 __field(unsigned int, size) 40 __entry->size = size; 43 __entry->cmd, __entry->addr, __entry->size) 47 TP_PROTO(unsigned int cmd, unsigned int addr, unsigned int size), 48 TP_ARGS(cmd, addr, size)); 51 TP_PROTO(unsigned int cmd, unsigned int addr, unsigned int size), 52 TP_ARGS(cmd, addr, size)); 64 __field(unsigned int, size) [all …]
|
/include/asm-generic/ |
D | ioctl.h | 65 #define _IOC(dir,type,nr,size) \ argument 69 ((size) << _IOC_SIZESHIFT)) 84 #define _IOR(type,nr,size) _IOC(_IOC_READ,(type),(nr),(_IOC_TYPECHECK(size))) argument 85 #define _IOW(type,nr,size) _IOC(_IOC_WRITE,(type),(nr),(_IOC_TYPECHECK(size))) argument 86 #define _IOWR(type,nr,size) _IOC(_IOC_READ|_IOC_WRITE,(type),(nr),(_IOC_TYPECHECK(size))) argument 87 #define _IOR_BAD(type,nr,size) _IOC(_IOC_READ,(type),(nr),sizeof(size)) argument 88 #define _IOW_BAD(type,nr,size) _IOC(_IOC_WRITE,(type),(nr),sizeof(size)) argument 89 #define _IOWR_BAD(type,nr,size) _IOC(_IOC_READ|_IOC_WRITE,(type),(nr),sizeof(size)) argument
|
D | getorder.h | 13 int __get_order(unsigned long size) in __get_order() argument 17 size--; in __get_order() 18 size >>= PAGE_SHIFT; in __get_order() 20 order = fls(size); in __get_order() 22 order = fls64(size); in __get_order()
|
D | dma-mapping-common.h | 11 size_t size, in dma_map_single_attrs() argument 18 kmemcheck_mark_initialized(ptr, size); in dma_map_single_attrs() 21 (unsigned long)ptr & ~PAGE_MASK, size, in dma_map_single_attrs() 24 (unsigned long)ptr & ~PAGE_MASK, size, in dma_map_single_attrs() 30 size_t size, in dma_unmap_single_attrs() argument 38 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_single_attrs() 39 debug_dma_unmap_page(dev, addr, size, dir, true); in dma_unmap_single_attrs() 72 size_t offset, size_t size, in dma_map_page() argument 78 kmemcheck_mark_initialized(page_address(page) + offset, size); in dma_map_page() 80 addr = ops->map_page(dev, page, offset, size, dir, NULL); in dma_map_page() [all …]
|
D | pci-dma-compat.h | 16 pci_alloc_consistent(struct pci_dev *hwdev, size_t size, in pci_alloc_consistent() argument 19 return dma_alloc_coherent(hwdev == NULL ? NULL : &hwdev->dev, size, dma_handle, GFP_ATOMIC); in pci_alloc_consistent() 23 pci_free_consistent(struct pci_dev *hwdev, size_t size, in pci_free_consistent() argument 26 dma_free_coherent(hwdev == NULL ? NULL : &hwdev->dev, size, vaddr, dma_handle); in pci_free_consistent() 30 pci_map_single(struct pci_dev *hwdev, void *ptr, size_t size, int direction) in pci_map_single() argument 32 …return dma_map_single(hwdev == NULL ? NULL : &hwdev->dev, ptr, size, (enum dma_data_direction)dire… in pci_map_single() 37 size_t size, int direction) in pci_unmap_single() argument 39 …dma_unmap_single(hwdev == NULL ? NULL : &hwdev->dev, dma_addr, size, (enum dma_data_direction)dire… in pci_unmap_single() 44 unsigned long offset, size_t size, int direction) in pci_map_page() argument 46 …return dma_map_page(hwdev == NULL ? NULL : &hwdev->dev, page, offset, size, (enum dma_data_directi… in pci_map_page() [all …]
|