/mm/ |
D | slab_common.c | 140 unsigned int align, unsigned int size) in calculate_alignment() argument 155 align = max(align, ralign); in calculate_alignment() 158 align = max(align, arch_slab_minalign()); in calculate_alignment() 160 return ALIGN(align, sizeof(void *)); in calculate_alignment() 186 struct kmem_cache *find_mergeable(unsigned int size, unsigned int align, in find_mergeable() argument 198 align = calculate_alignment(flags, align, size); in find_mergeable() 199 size = ALIGN(size, align); in find_mergeable() 218 if ((s->size & ~(align - 1)) != s->size) in find_mergeable() 224 if (IS_ENABLED(CONFIG_SLAB) && align && in find_mergeable() 225 (align > s->align || s->align % align)) in find_mergeable() [all …]
|
D | memblock.c | 212 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_bottom_up() argument 222 cand = round_up(this_start, align); in __memblock_find_range_bottom_up() 247 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_top_down() argument 261 cand = round_down(this_end - size, align); in __memblock_find_range_top_down() 285 phys_addr_t align, phys_addr_t start, in memblock_find_in_range_node() argument 299 return __memblock_find_range_bottom_up(start, end, size, align, in memblock_find_in_range_node() 302 return __memblock_find_range_top_down(start, end, size, align, in memblock_find_in_range_node() 321 phys_addr_t align) in memblock_find_in_range() argument 327 ret = memblock_find_in_range_node(size, align, start, end, in memblock_find_in_range() 1328 phys_addr_t align, phys_addr_t start, in memblock_alloc_range_nid() argument [all …]
|
D | slob.c | 237 static void *slob_page_alloc(struct page *sp, size_t size, int align, in slob_page_alloc() argument 254 if (align) { in slob_page_alloc() 256 (ALIGN((unsigned long)cur + align_offset, align) in slob_page_alloc() 301 static void *slob_alloc(size_t size, gfp_t gfp, int align, int node, in slob_alloc() argument 333 b = slob_page_alloc(sp, size, align, align_offset, &page_removed_from_list); in slob_alloc() 370 b = slob_page_alloc(sp, size, align, align_offset, &_unused); in slob_alloc() 483 int align = minalign; in __do_kmalloc_node() local 490 align = max_t(unsigned int, minalign, size); in __do_kmalloc_node() 495 m = slob_alloc(size + minalign, gfp, align, node, minalign); in __do_kmalloc_node() 552 unsigned int align = max_t(unsigned int, in kfree() local [all …]
|
D | vmalloc.c | 835 unsigned long align, unsigned long vstart) in is_within_this_va() argument 840 nva_start_addr = ALIGN(va->va_start, align); in is_within_this_va() 842 nva_start_addr = ALIGN(vstart, align); in is_within_this_va() 859 unsigned long align, unsigned long vstart) in find_vmap_lowest_match() argument 869 length = size + align - 1; in find_vmap_lowest_match() 878 if (is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_match() 898 if (is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_match() 918 unsigned long align, unsigned long vstart) in find_vmap_lowest_linear_match() argument 923 if (!is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_linear_match() 1090 __alloc_vmap_area(unsigned long size, unsigned long align, in __alloc_vmap_area() argument [all …]
|
D | cma.c | 437 struct page *cma_alloc(struct cma *cma, size_t count, unsigned int align, in cma_alloc() argument 458 (void *)cma, count, align, gfp_mask); in cma_alloc() 463 trace_cma_alloc_start(cma->name, count, align); in cma_alloc() 465 mask = cma_bitmap_aligned_mask(cma, align); in cma_alloc() 466 offset = cma_bitmap_aligned_offset(cma, align); in cma_alloc() 542 count, align); in cma_alloc() 556 trace_cma_alloc_finish(cma->name, pfn, page, count, align); in cma_alloc() 557 trace_cma_alloc_info(cma->name, page, count, align, &cma_info); in cma_alloc() 577 trace_android_vh_cma_alloc_finish(cma, page, count, align, gfp_mask, ts); in cma_alloc()
|
D | percpu.c | 400 int align, int *bit_off, int *bits) in pcpu_next_fit_region() argument 419 *bits = ALIGN(block->contig_hint_start, align) - in pcpu_next_fit_region() 439 align); in pcpu_next_fit_region() 462 #define pcpu_for_each_fit_region(chunk, alloc_bits, align, bit_off, bits) \ argument 463 for (pcpu_next_fit_region((chunk), (alloc_bits), (align), &(bit_off), \ 467 pcpu_next_fit_region((chunk), (alloc_bits), (align), &(bit_off), \ 1059 size_t align, bool pop_only) in pcpu_find_block_fit() argument 1070 bit_off = ALIGN(chunk_md->contig_hint_start, align) - in pcpu_find_block_fit() 1077 pcpu_for_each_fit_region(chunk, alloc_bits, align, bit_off, bits) { in pcpu_find_block_fit() 1168 size_t align, int start) in pcpu_alloc_area() argument [all …]
|
D | sparse-vmemmap.c | 41 unsigned long align, in __earlyonly_bootmem_alloc() argument 44 return memblock_alloc_try_nid_raw(size, align, goal, in __earlyonly_bootmem_alloc() 93 + altmap->align; in vmem_altmap_next_pfn() 98 unsigned long allocated = altmap->alloc + altmap->align; in vmem_altmap_nr_free() 124 altmap->align += nr_align; in altmap_alloc_block_buf() 128 __func__, pfn, altmap->alloc, altmap->align, nr_pfns); in altmap_alloc_block_buf()
|
D | dmapool.c | 131 size_t size, size_t align, size_t boundary) in dma_pool_create() argument 137 if (align == 0) in dma_pool_create() 138 align = 1; in dma_pool_create() 139 else if (align & (align - 1)) in dma_pool_create() 147 size = ALIGN(size, align); in dma_pool_create() 499 size_t size, size_t align, size_t allocation) in dmam_pool_create() argument 507 pool = *ptr = dma_pool_create(name, dev, size, align, allocation); in dmam_pool_create()
|
D | slab.h | 23 unsigned int align; /* Alignment as calculated */ member 127 struct kmem_cache *find_mergeable(unsigned size, unsigned align, 131 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align, 138 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align, in __kmem_cache_alias() argument
|
D | sparse.c | 258 unsigned long size, align; in memory_present() local 261 align = 1 << (INTERNODE_CACHE_SHIFT); in memory_present() 262 mem_section = memblock_alloc(size, align); in memory_present() 265 __func__, size, align); in memory_present()
|
D | slab.c | 1799 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align, in __kmem_cache_alias() argument 1804 cachep = find_mergeable(size, align, flags, name, ctor); in __kmem_cache_alias() 1959 if (ralign < cachep->align) { in __kmem_cache_create() 1960 ralign = cachep->align; in __kmem_cache_create() 1968 cachep->align = ralign; in __kmem_cache_create() 1971 if (cachep->colour_off < cachep->align) in __kmem_cache_create() 1972 cachep->colour_off = cachep->align; in __kmem_cache_create() 2004 size = ALIGN(size, cachep->align); in __kmem_cache_create() 2010 size = ALIGN(SLAB_OBJ_MIN_SIZE, cachep->align); in __kmem_cache_create()
|
D | nommu.c | 153 void *__vmalloc_node_range(unsigned long size, unsigned long align, in __vmalloc_node_range() argument 161 void *__vmalloc_node(unsigned long size, unsigned long align, gfp_t gfp_mask, in __vmalloc_node() argument
|
D | slub.c | 3810 s->red_left_pad = ALIGN(s->red_left_pad, s->align); in calculate_sizes() 3820 size = ALIGN(size, s->align); in calculate_sizes() 4483 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align, in __kmem_cache_alias() argument 4488 s = find_mergeable(size, align, flags, name, ctor); in __kmem_cache_alias() 5038 return sprintf(buf, "%u\n", s->align); in align_show() 5040 SLAB_ATTR_RO(align);
|
D | page_alloc.c | 2060 .align = PAGES_PER_SECTION, in deferred_init_memmap()
|
/mm/kfence/ |
D | kfence_test.c | 200 return kmalloc_caches[kmalloc_type(GFP_KERNEL)][kmalloc_index(size)]->align; in kmalloc_cache_alignment() 439 const size_t align = kmalloc_cache_alignment(size); in test_kmalloc_aligned_oob_read() local 464 expect.addr = buf + size + align; in test_kmalloc_aligned_oob_read()
|
D | core.c | 308 meta->addr = ALIGN_DOWN(meta->addr, cache->align); in kfence_guarded_alloc()
|