Searched refs:mem (Results 1 – 9 of 9) sorted by relevance
/mm/ |
D | cma_debug.c | 71 static void cma_add_to_cma_mem_list(struct cma *cma, struct cma_mem *mem) in cma_add_to_cma_mem_list() argument 74 hlist_add_head(&mem->node, &cma->mem_head); in cma_add_to_cma_mem_list() 80 struct cma_mem *mem = NULL; in cma_get_entry_from_list() local 84 mem = hlist_entry(cma->mem_head.first, struct cma_mem, node); in cma_get_entry_from_list() 85 hlist_del_init(&mem->node); in cma_get_entry_from_list() 89 return mem; in cma_get_entry_from_list() 94 struct cma_mem *mem = NULL; in cma_free_mem() local 97 mem = cma_get_entry_from_list(cma); in cma_free_mem() 98 if (mem == NULL) in cma_free_mem() 101 if (mem->n <= count) { in cma_free_mem() [all …]
|
D | memory_hotplug.c | 1305 static int online_memory_block(struct memory_block *mem, void *arg) in online_memory_block() argument 1307 mem->online_type = mhp_default_online_type; in online_memory_block() 1308 return device_online(&mem->dev); in online_memory_block() 2097 static int check_memblock_offlined_cb(struct memory_block *mem, void *arg) in check_memblock_offlined_cb() argument 2099 int ret = !is_memblock_offlined(mem); in check_memblock_offlined_cb() 2102 *nid = mem->nid; in check_memblock_offlined_cb() 2106 beginpa = PFN_PHYS(section_nr_to_pfn(mem->start_section_nr)); in check_memblock_offlined_cb() 2116 static int get_nr_vmemmap_pages_cb(struct memory_block *mem, void *arg) in get_nr_vmemmap_pages_cb() argument 2121 return mem->nr_vmemmap_pages; in get_nr_vmemmap_pages_cb() 2140 static int check_no_memblock_for_node_cb(struct memory_block *mem, void *arg) in check_no_memblock_for_node_cb() argument [all …]
|
D | page_poison.c | 49 static void check_poison_mem(struct page *page, unsigned char *mem, size_t bytes) in check_poison_mem() argument 55 start = memchr_inv(mem, PAGE_POISON, bytes); in check_poison_mem() 59 for (end = mem + bytes - 1; end > start; end--) { in check_poison_mem()
|
D | mempool.c | 511 struct kmem_cache *mem = pool_data; in mempool_alloc_slab() local 512 VM_BUG_ON(mem->ctor); in mempool_alloc_slab() 513 return kmem_cache_alloc(mem, gfp_mask); in mempool_alloc_slab() 519 struct kmem_cache *mem = pool_data; in mempool_free_slab() local 520 kmem_cache_free(mem, element); in mempool_free_slab()
|
D | vmalloc.c | 2150 void vm_unmap_ram(const void *mem, unsigned int count) in vm_unmap_ram() argument 2153 unsigned long addr = (unsigned long)kasan_reset_tag(mem); in vm_unmap_ram() 2162 kasan_poison_vmalloc(mem, size); in vm_unmap_ram() 2165 debug_check_no_locks_freed(mem, size); in vm_unmap_ram() 2196 void *mem; in vm_map_ram() local 2199 mem = vb_alloc(size, GFP_KERNEL); in vm_map_ram() 2200 if (IS_ERR(mem)) in vm_map_ram() 2202 addr = (unsigned long)mem; in vm_map_ram() 2211 mem = (void *)addr; in vm_map_ram() 2216 vm_unmap_ram(mem, count); in vm_map_ram() [all …]
|
D | slab_common.c | 1286 void *mem = (void *)p; in kfree_sensitive() local 1288 ks = ksize(mem); in kfree_sensitive() 1290 memzero_explicit(mem, ks); in kfree_sensitive() 1291 kfree(mem); in kfree_sensitive()
|
D | vmstat.c | 197 int mem; /* memory in 128 MB units */ in calculate_normal_threshold() local 229 mem = zone_managed_pages(zone) >> (27 - PAGE_SHIFT); in calculate_normal_threshold() 231 threshold = 2 * fls(num_online_cpus()) * (1 + fls(mem)); in calculate_normal_threshold()
|
D | filemap.c | 1487 static inline bool clear_bit_unlock_is_negative_byte(long nr, volatile void *mem) in clear_bit_unlock_is_negative_byte() argument 1489 clear_bit_unlock(nr, mem); in clear_bit_unlock_is_negative_byte() 1491 return test_bit(PG_waiters, mem); in clear_bit_unlock_is_negative_byte()
|
D | nommu.c | 336 void vm_unmap_ram(const void *mem, unsigned int count) in vm_unmap_ram() argument
|