/mm/ |
D | slab_common.c | 282 unsigned int align, unsigned int size) in calculate_alignment() argument 297 align = max(align, ralign); in calculate_alignment() 300 if (align < ARCH_SLAB_MINALIGN) in calculate_alignment() 301 align = ARCH_SLAB_MINALIGN; in calculate_alignment() 303 return ALIGN(align, sizeof(void *)); in calculate_alignment() 332 struct kmem_cache *find_mergeable(unsigned int size, unsigned int align, in find_mergeable() argument 344 align = calculate_alignment(flags, align, size); in find_mergeable() 345 size = ALIGN(size, align); in find_mergeable() 364 if ((s->size & ~(align - 1)) != s->size) in find_mergeable() 370 if (IS_ENABLED(CONFIG_SLAB) && align && in find_mergeable() [all …]
|
D | slob.c | 237 static void *slob_page_alloc(struct page *sp, size_t size, int align, in slob_page_alloc() argument 254 if (align) { in slob_page_alloc() 256 (ALIGN((unsigned long)cur + align_offset, align) in slob_page_alloc() 301 static void *slob_alloc(size_t size, gfp_t gfp, int align, int node, in slob_alloc() argument 333 b = slob_page_alloc(sp, size, align, align_offset, &page_removed_from_list); in slob_alloc() 370 b = slob_page_alloc(sp, size, align, align_offset, &_unused); in slob_alloc() 481 int align = minalign; in __do_kmalloc_node() local 488 align = max(minalign, (int) size); in __do_kmalloc_node() 493 m = slob_alloc(size + minalign, gfp, align, node, minalign); in __do_kmalloc_node() 548 int align = max_t(size_t, ARCH_KMALLOC_MINALIGN, ARCH_SLAB_MINALIGN); in kfree() local [all …]
|
D | memblock.c | 191 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_bottom_up() argument 201 cand = round_up(this_start, align); in __memblock_find_range_bottom_up() 226 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_top_down() argument 240 cand = round_down(this_end - size, align); in __memblock_find_range_top_down() 272 phys_addr_t align, phys_addr_t start, in memblock_find_in_range_node() argument 300 size, align, nid, flags); in memblock_find_in_range_node() 318 return __memblock_find_range_top_down(start, end, size, align, nid, in memblock_find_in_range_node() 337 phys_addr_t align) in memblock_find_in_range() argument 343 ret = memblock_find_in_range_node(size, align, start, end, in memblock_find_in_range() 1344 phys_addr_t align, phys_addr_t start, in memblock_alloc_range_nid() argument [all …]
|
D | vmalloc.c | 765 unsigned long align, unsigned long vstart) in is_within_this_va() argument 770 nva_start_addr = ALIGN(va->va_start, align); in is_within_this_va() 772 nva_start_addr = ALIGN(vstart, align); in is_within_this_va() 789 unsigned long align, unsigned long vstart) in find_vmap_lowest_match() argument 799 length = size + align - 1; in find_vmap_lowest_match() 808 if (is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_match() 828 if (is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_match() 848 unsigned long align, unsigned long vstart) in find_vmap_lowest_linear_match() argument 853 if (!is_within_this_va(va, size, align, vstart)) in find_vmap_lowest_linear_match() 1007 __alloc_vmap_area(unsigned long size, unsigned long align, in __alloc_vmap_area() argument [all …]
|
D | dmapool.c | 131 size_t size, size_t align, size_t boundary) in dma_pool_create() argument 137 if (align == 0) in dma_pool_create() 138 align = 1; in dma_pool_create() 139 else if (align & (align - 1)) in dma_pool_create() 147 if ((size % align) != 0) in dma_pool_create() 148 size = ALIGN(size, align); in dma_pool_create() 507 size_t size, size_t align, size_t allocation) in dmam_pool_create() argument 515 pool = *ptr = dma_pool_create(name, dev, size, align, allocation); in dmam_pool_create()
|
D | percpu.c | 420 int align, int *bit_off, int *bits) in pcpu_next_fit_region() argument 439 *bits = ALIGN(block->contig_hint_start, align) - in pcpu_next_fit_region() 459 align); in pcpu_next_fit_region() 482 #define pcpu_for_each_fit_region(chunk, alloc_bits, align, bit_off, bits) \ argument 483 for (pcpu_next_fit_region((chunk), (alloc_bits), (align), &(bit_off), \ 487 pcpu_next_fit_region((chunk), (alloc_bits), (align), &(bit_off), \ 1078 size_t align, bool pop_only) in pcpu_find_block_fit() argument 1089 bit_off = ALIGN(chunk_md->contig_hint_start, align) - in pcpu_find_block_fit() 1096 pcpu_for_each_fit_region(chunk, alloc_bits, align, bit_off, bits) { in pcpu_find_block_fit() 1187 size_t align, int start) in pcpu_alloc_area() argument [all …]
|
D | sparse-vmemmap.c | 42 unsigned long align, in __earlyonly_bootmem_alloc() argument 45 return memblock_alloc_try_nid_raw(size, align, goal, in __earlyonly_bootmem_alloc() 86 + altmap->align; in vmem_altmap_next_pfn() 91 unsigned long allocated = altmap->alloc + altmap->align; in vmem_altmap_nr_free() 124 altmap->align += nr_align; in altmap_alloc_block_buf() 128 __func__, pfn, altmap->alloc, altmap->align, nr_pfns); in altmap_alloc_block_buf()
|
D | cma.c | 419 struct page *cma_alloc(struct cma *cma, size_t count, unsigned int align, in cma_alloc() argument 434 count, align); in cma_alloc() 439 mask = cma_bitmap_aligned_mask(cma, align); in cma_alloc() 440 offset = cma_bitmap_aligned_offset(cma, align); in cma_alloc() 484 trace_cma_alloc(pfn, page, count, align); in cma_alloc()
|
D | slab.h | 23 unsigned int align; /* Alignment as calculated */ member 167 struct kmem_cache *find_mergeable(unsigned size, unsigned align, 171 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align, 179 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align, in __kmem_cache_alias() argument
|
D | sparse.c | 264 unsigned long size, align; in memory_present() local 267 align = 1 << (INTERNODE_CACHE_SHIFT); in memory_present() 268 mem_section = memblock_alloc(size, align); in memory_present() 271 __func__, size, align); in memory_present()
|
D | slab.c | 1789 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align, in __kmem_cache_alias() argument 1794 cachep = find_mergeable(size, align, flags, name, ctor); in __kmem_cache_alias() 1949 if (ralign < cachep->align) { in __kmem_cache_create() 1950 ralign = cachep->align; in __kmem_cache_create() 1958 cachep->align = ralign; in __kmem_cache_create() 1961 if (cachep->colour_off < cachep->align) in __kmem_cache_create() 1962 cachep->colour_off = cachep->align; in __kmem_cache_create() 1994 size = ALIGN(size, cachep->align); in __kmem_cache_create() 2000 size = ALIGN(SLAB_OBJ_MIN_SIZE, cachep->align); in __kmem_cache_create()
|
D | slub.c | 3569 s->red_left_pad = ALIGN(s->red_left_pad, s->align); in calculate_sizes() 3579 size = ALIGN(size, s->align); in calculate_sizes() 4271 __kmem_cache_alias(const char *name, unsigned int size, unsigned int align, in __kmem_cache_alias() argument 4276 s = find_mergeable(size, align, flags, name, ctor); in __kmem_cache_alias() 4932 return sprintf(buf, "%u\n", s->align); in align_show() 4934 SLAB_ATTR_RO(align);
|