/mm/kasan/ |
D | kasan.h | 208 size_t access_size; 233 size_t size; /* Size of the global variable. */ 234 size_t size_with_redzone; /* Size of the variable + size of the redzone. 32 bytes aligned. */ 282 size_t size; 290 size_t size; 319 bool kasan_check_range(unsigned long addr, size_t size, bool write, 331 void *kasan_find_first_bad_addr(void *addr, size_t size); 353 bool kasan_report(unsigned long addr, size_t size, 445 static inline void kasan_poison(const void *addr, size_t size, u8 value, bool init) in kasan_poison() 461 static inline void kasan_unpoison(const void *addr, size_t size, bool init) in kasan_unpoison() [all …]
|
D | generic.c | 82 size_t size) in bytes_is_nonzero() 124 size_t size) in memory_is_poisoned_n() 142 static __always_inline bool memory_is_poisoned(unsigned long addr, size_t size) in memory_is_poisoned() 163 size_t size, bool write, in check_region_inline() 186 bool kasan_check_range(unsigned long addr, size_t size, bool write, in kasan_check_range() 212 size_t aligned_size = round_up(global->size, KASAN_GRANULE_SIZE); in register_global() 221 void __asan_register_globals(struct kasan_global *globals, size_t size) in __asan_register_globals() 230 void __asan_unregister_globals(struct kasan_global *globals, size_t size) in __asan_unregister_globals() 259 void __asan_loadN(unsigned long addr, size_t size) in __asan_loadN() 266 void __asan_loadN_noabort(unsigned long, size_t); [all …]
|
D | kasan_test.c | 129 size_t size = 128 - KASAN_GRANULE_SIZE - 5; in kmalloc_oob_right() 158 size_t size = 15; in kmalloc_oob_left() 171 size_t size = 4096; in kmalloc_node_oob_right() 190 size_t size = KMALLOC_MAX_CACHE_SIZE + 10; in kmalloc_pagealloc_oob_right() 206 size_t size = KMALLOC_MAX_CACHE_SIZE + 10; in kmalloc_pagealloc_uaf() 220 size_t size = KMALLOC_MAX_CACHE_SIZE + 10; in kmalloc_pagealloc_invalid_free() 234 size_t order = 4; in pagealloc_oob_right() 235 size_t size = (1UL << (PAGE_SHIFT + order)); in pagealloc_oob_right() 256 size_t order = 4; in pagealloc_uaf() 269 size_t size = KMALLOC_MAX_CACHE_SIZE - 256; in kmalloc_large_oob_right() [all …]
|
D | shadow.c | 42 void *memset(void *addr, int c, size_t len) in memset() 52 void *memmove(void *dest, const void *src, size_t len) in memmove() 63 void *memcpy(void *dest, const void *src, size_t len) in memcpy() 72 void kasan_poison(const void *addr, size_t size, u8 value, bool init) in kasan_poison() 103 void kasan_poison_last_granule(const void *addr, size_t size) in kasan_poison_last_granule() 115 void kasan_unpoison(const void *addr, size_t size, bool init) in kasan_unpoison() 536 int kasan_alloc_module_shadow(void *addr, size_t size, gfp_t gfp_mask) in kasan_alloc_module_shadow() 539 size_t scaled_size; in kasan_alloc_module_shadow() 540 size_t shadow_size; in kasan_alloc_module_shadow()
|
D | report_generic.c | 33 void *kasan_find_first_bad_addr(void *addr, size_t size) in kasan_find_first_bad_addr() 181 char *token, size_t max_tok_len, in tokenize_frame_descr() 190 const size_t tok_len = sep - *frame_descr; in tokenize_frame_descr() 359 void __asan_report_load_n_noabort(unsigned long addr, size_t size) in __asan_report_load_n_noabort() 365 void __asan_report_store_n_noabort(unsigned long addr, size_t size) in __asan_report_store_n_noabort()
|
D | common.c | 63 void __kasan_unpoison_range(const void *address, size_t size) in __kasan_unpoison_range() 326 const void *object, size_t size, gfp_t flags) in ____kasan_kmalloc() 373 size_t size, gfp_t flags) in __kasan_kmalloc() 379 void * __must_check __kasan_kmalloc_large(const void *ptr, size_t size, in __kasan_kmalloc_large() 414 void * __must_check __kasan_krealloc(const void *object, size_t size, gfp_t flags) in __kasan_krealloc()
|
D | quarantine.c | 36 size_t bytes; 54 size_t size) in qlist_put() 249 size_t total_size, new_quarantine_size, percpu_quarantines; in kasan_quarantine_reduce() 281 WRITE_ONCE(quarantine_batch_size, max((size_t)QUARANTINE_PERCPU_SIZE, in kasan_quarantine_reduce()
|
/mm/ |
D | util.c | 57 size_t len; in kstrdup() 101 char *kstrndup(const char *s, size_t max, gfp_t gfp) in kstrndup() 103 size_t len; in kstrndup() 128 void *kmemdup(const void *src, size_t len, gfp_t gfp) in kmemdup() 148 char *kmemdup_nul(const char *s, size_t len, gfp_t gfp) in kmemdup_nul() 173 void *memdup_user(const void __user *src, size_t len) in memdup_user() 199 void *vmemdup_user(const void __user *src, size_t len) in vmemdup_user() 255 void *memdup_user_nul(const void __user *src, size_t len) in memdup_user_nul() 598 void *kvmalloc_node(size_t size, gfp_t flags, int node) in kvmalloc_node() 677 void kvfree_sensitive(const void *addr, size_t len) in kvfree_sensitive() [all …]
|
D | percpu.c | 141 static size_t pcpu_chunk_struct_size __ro_after_init; 156 static const size_t *pcpu_group_sizes __ro_after_init; 322 size_t align) in pcpu_check_block_hint() 508 static void *pcpu_mem_zalloc(size_t size, gfp_t gfp) in pcpu_mem_zalloc() 1107 size_t align, bool pop_only) in pcpu_find_block_fit() 1213 size_t align, int start) in pcpu_alloc_area() 1216 size_t align_mask = (align) ? (align - 1) : 0; in pcpu_alloc_area() 1347 size_t alloc_size; in pcpu_alloc_first_chunk() 1626 static bool pcpu_memcg_pre_alloc_hook(size_t size, gfp_t gfp, in pcpu_memcg_pre_alloc_hook() 1649 size_t size) in pcpu_memcg_post_alloc_hook() [all …]
|
D | slab.h | 89 struct kmem_cache *kmalloc_slab(size_t, gfp_t); 212 size_t count, loff_t *ppos); 220 void __kmem_cache_free_bulk(struct kmem_cache *, size_t, void **); 221 int __kmem_cache_alloc_bulk(struct kmem_cache *, gfp_t, size_t, void **); 290 static inline size_t obj_full_size(struct kmem_cache *s) in obj_full_size() 304 size_t objects, gfp_t flags) in memcg_slab_pre_alloc_hook() 329 gfp_t flags, size_t size, in memcg_slab_post_alloc_hook() 334 size_t i; in memcg_slab_post_alloc_hook() 421 size_t objects, gfp_t flags) in memcg_slab_pre_alloc_hook() 428 gfp_t flags, size_t size, in memcg_slab_post_alloc_hook() [all …]
|
D | dmapool.c | 46 size_t size; 48 size_t allocation; 49 size_t boundary; 131 size_t size, size_t align, size_t boundary) in dma_pool_create() 134 size_t allocation; in dma_pool_create() 148 allocation = max_t(size_t, size, PAGE_SIZE); in dma_pool_create() 319 size_t offset; in dma_pool_alloc() 499 size_t size, size_t align, size_t allocation) in dmam_pool_create()
|
D | slob.c | 237 static void *slob_page_alloc(struct page *sp, size_t size, int align, in slob_page_alloc() 301 static void *slob_alloc(size_t size, gfp_t gfp, int align, int node, in slob_alloc() 477 __do_kmalloc_node(size_t size, gfp_t gfp, int node, unsigned long caller) in __do_kmalloc_node() 480 int minalign = max_t(size_t, ARCH_KMALLOC_MINALIGN, ARCH_SLAB_MINALIGN); in __do_kmalloc_node() 524 void *__kmalloc(size_t size, gfp_t gfp) in __kmalloc() 530 void *__kmalloc_track_caller(size_t size, gfp_t gfp, unsigned long caller) in __kmalloc_track_caller() 537 void *__kmalloc_node_track_caller(size_t size, gfp_t gfp, in __kmalloc_node_track_caller() 557 int align = max_t(size_t, ARCH_KMALLOC_MINALIGN, ARCH_SLAB_MINALIGN); in kfree() 571 size_t __ksize(const void *block) in __ksize() 585 align = max_t(size_t, ARCH_KMALLOC_MINALIGN, ARCH_SLAB_MINALIGN); in __ksize() [all …]
|
D | maccess.c | 10 size_t size) in copy_from_kernel_nofault_allowed() 25 long copy_from_kernel_nofault(void *dst, const void *src, size_t size) in copy_from_kernel_nofault() 59 long copy_to_kernel_nofault(void *dst, const void *src, size_t size) in copy_to_kernel_nofault() 121 long copy_from_kernel_nofault(void *dst, const void *src, size_t size) in copy_from_kernel_nofault() 151 long copy_to_kernel_nofault(void *dst, const void *src, size_t size) in copy_to_kernel_nofault() 221 long copy_from_user_nofault(void *dst, const void __user *src, size_t size) in copy_from_user_nofault() 248 long copy_to_user_nofault(void __user *dst, const void *src, size_t size) in copy_to_user_nofault()
|
D | percpu-internal.h | 37 size_t max_alloc_size; /* largest allocation size */ 127 size_t min_alloc_size; /* min allocation size */ 128 size_t max_alloc_size; /* max allocation size */ 153 static inline void pcpu_stats_area_alloc(struct pcpu_chunk *chunk, size_t size) in pcpu_stats_area_alloc() 221 static inline void pcpu_stats_area_alloc(struct pcpu_chunk *chunk, size_t size) in pcpu_stats_area_alloc()
|
D | process_vm_access.c | 29 size_t len, in process_vm_rw_pages() 36 size_t copy = PAGE_SIZE - offset; in process_vm_rw_pages() 37 size_t copied; in process_vm_rw_pages() 97 size_t bytes; in process_vm_rw_single_vec() 164 size_t total_len = iov_iter_count(iter); in process_vm_rw_core() 187 process_pages = kmalloc(min_t(size_t, PVM_MAX_KMALLOC_PAGES, in process_vm_rw_core()
|
D | balloon_compaction.c | 40 size_t balloon_page_list_enqueue(struct balloon_dev_info *b_dev_info, in balloon_page_list_enqueue() 45 size_t n_pages = 0; in balloon_page_list_enqueue() 76 size_t balloon_page_list_dequeue(struct balloon_dev_info *b_dev_info, in balloon_page_list_dequeue() 77 struct list_head *pages, size_t n_req_pages) in balloon_page_list_dequeue() 81 size_t n_pages = 0; in balloon_page_list_dequeue()
|
D | slab_common.c | 116 void __kmem_cache_free_bulk(struct kmem_cache *s, size_t nr, void **p) in __kmem_cache_free_bulk() 118 size_t i; in __kmem_cache_free_bulk() 128 int __kmem_cache_alloc_bulk(struct kmem_cache *s, gfp_t flags, size_t nr, in __kmem_cache_alloc_bulk() 131 size_t i; in __kmem_cache_alloc_bulk() 744 struct kmem_cache *kmalloc_slab(size_t size, gfp_t flags) in kmalloc_slab() 958 void *kmalloc_order(size_t size, gfp_t flags, unsigned int order) in kmalloc_order() 981 void *kmalloc_order_trace(size_t size, gfp_t flags, unsigned int order) in kmalloc_order_trace() 1200 static __always_inline void *__do_krealloc(const void *p, size_t new_size, in __do_krealloc() 1204 size_t ks; in __do_krealloc() 1244 void *krealloc(const void *p, size_t new_size, gfp_t flags) in krealloc() [all …]
|
D | slab.c | 401 static unsigned int cache_estimate(unsigned long gfporder, size_t buffer_size, in cache_estimate() 402 slab_flags_t flags, size_t *left_over) in cache_estimate() 405 size_t slab_size = PAGE_SIZE << gfporder; in cache_estimate() 536 size_t memsize = sizeof(void *) * entries + sizeof(struct array_cache); in alloc_arraycache() 647 size_t memsize = sizeof(void *) * entries + sizeof(struct alien_cache); in __alloc_alien_cache() 1665 static size_t calculate_slab_order(struct kmem_cache *cachep, in calculate_slab_order() 1666 size_t size, slab_flags_t flags) in calculate_slab_order() 1668 size_t left_over = 0; in calculate_slab_order() 1673 size_t remainder; in calculate_slab_order() 1685 size_t freelist_size; in calculate_slab_order() [all …]
|
D | kmemleak.c | 125 size_t size; 149 size_t size; 266 size_t len, bool ascii) in warn_or_seq_hex_dump() 286 size_t len; in hex_dump_object() 289 len = min_t(size_t, object->size, HEX_MAX_LINES * HEX_ROW_SIZE); in hex_dump_object() 577 static struct kmemleak_object *create_object(unsigned long ptr, size_t size, in create_object() 709 static void delete_object_part(unsigned long ptr, size_t size) in delete_object_part() 794 static void add_scan_area(unsigned long ptr, size_t size, gfp_t gfp) in add_scan_area() 901 void __ref kmemleak_alloc(const void *ptr, size_t size, int min_count, in kmemleak_alloc() 920 void __ref kmemleak_alloc_percpu(const void __percpu *ptr, size_t size, in kmemleak_alloc_percpu() [all …]
|
D | mempool.c | 25 static void poison_error(mempool_t *pool, void *element, size_t size, in poison_error() 26 size_t byte) in poison_error() 42 static void __check_element(mempool_t *pool, void *element, size_t size) in __check_element() 45 size_t i; in __check_element() 73 static void __poison_element(void *element, size_t size) in __poison_element() 530 size_t size = (size_t)pool_data; in mempool_kmalloc()
|
D | mlock.c | 560 static int apply_vma_lock_flags(unsigned long start, size_t len, in apply_vma_lock_flags() 617 unsigned long start, size_t len) in count_mm_mlocked_page_nr() 648 static __must_check int do_mlock(unsigned long start, size_t len, vm_flags_t flags) in do_mlock() 695 SYSCALL_DEFINE2(mlock, unsigned long, start, size_t, len) in SYSCALL_DEFINE2() argument 700 SYSCALL_DEFINE3(mlock2, unsigned long, start, size_t, len, int, flags) in SYSCALL_DEFINE3() argument 713 SYSCALL_DEFINE2(munlock, unsigned long, start, size_t, len) in SYSCALL_DEFINE2() argument 822 int user_shm_lock(size_t size, struct ucounts *ucounts) in user_shm_lock() 850 void user_shm_unlock(size_t size, struct ucounts *ucounts) in user_shm_unlock()
|
D | pgsize_migration.c | 46 const char *buf, size_t n) in store_pgsize_migration_enabled() 130 size_t str_len = strlen(str); in str_has_suffix() 131 size_t suffix_len = strlen(suffix); in str_has_suffix()
|
/mm/kfence/ |
D | kfence_test.c | 45 static void probe_console(void *ignore, const char *buf, size_t len) in probe_console() 180 static size_t setup_test_cache(struct kunit *test, size_t size, slab_flags_t flags, in setup_test_cache() 209 static inline size_t kmalloc_cache_alignment(size_t size) in kmalloc_cache_alignment() 238 static void *test_alloc(struct kunit *test, size_t size, gfp_t gfp, enum allocation_policy policy) in test_alloc() 314 size_t size = 32; in test_out_of_bounds_read() 348 size_t size = 32; in test_out_of_bounds_write() 366 const size_t size = 32; in test_use_after_free_read() 382 const size_t size = 32; in test_double_free() 397 const size_t size = 32; in test_invalid_addr_free() 414 size_t size = 32; in test_corruption() [all …]
|
D | core.c | 134 #define UNIQUE_ALLOC_STACK_DEPTH ((size_t)8) 176 static u32 get_alloc_stack_hash(unsigned long *stack_entries, size_t num_entries) in get_alloc_stack_hash() 252 unsigned long *stack_entries, size_t num_stack_entries) in metadata_update_state() 331 static void *kfence_guarded_alloc(struct kmem_cache *cache, size_t size, gfp_t gfp, in kfence_guarded_alloc() 332 unsigned long *stack_entries, size_t num_stack_entries, in kfence_guarded_alloc() 847 void *__kfence_alloc(struct kmem_cache *s, size_t size, gfp_t flags) in __kfence_alloc() 850 size_t num_stack_entries; in __kfence_alloc() 913 size_t kfence_ksize(const void *addr) in kfence_ksize()
|
/mm/damon/ |
D | dbgfs.c | 26 static char *user_input_str(const char __user *buf, size_t count, loff_t *ppos) in user_input_str() 50 char __user *buf, size_t count, loff_t *ppos) in dbgfs_attrs_read() 67 const char __user *buf, size_t count, loff_t *ppos) in dbgfs_attrs_write() 132 size_t count, loff_t *ppos) in dbgfs_schemes_read() 241 size_t count, loff_t *ppos) in dbgfs_schemes_write() 308 char __user *buf, size_t count, loff_t *ppos) in dbgfs_target_ids_read() 362 const char __user *buf, size_t count, loff_t *ppos) in dbgfs_target_ids_write() 463 size_t count, loff_t *ppos) in dbgfs_init_regions_read() 560 const char __user *buf, size_t count, in dbgfs_init_regions_write() 589 char __user *buf, size_t count, loff_t *ppos) in dbgfs_kdamond_pid_read() [all …]
|