Home
last modified time | relevance | path

Searched refs:align (Results 1 – 13 of 13) sorted by relevance

/mm/
Dnobootmem.c35 static void * __init __alloc_memory_core_early(int nid, u64 size, u64 align, in __alloc_memory_core_early() argument
46 addr = memblock_find_in_range_node(size, align, goal, limit, nid, in __alloc_memory_core_early()
235 unsigned long align, in ___alloc_bootmem_nopanic() argument
246 ptr = __alloc_memory_core_early(NUMA_NO_NODE, size, align, goal, limit); in ___alloc_bootmem_nopanic()
272 void * __init __alloc_bootmem_nopanic(unsigned long size, unsigned long align, in __alloc_bootmem_nopanic() argument
277 return ___alloc_bootmem_nopanic(size, align, goal, limit); in __alloc_bootmem_nopanic()
280 static void * __init ___alloc_bootmem(unsigned long size, unsigned long align, in ___alloc_bootmem() argument
283 void *mem = ___alloc_bootmem_nopanic(size, align, goal, limit); in ___alloc_bootmem()
308 void * __init __alloc_bootmem(unsigned long size, unsigned long align, in __alloc_bootmem() argument
313 return ___alloc_bootmem(size, align, goal, limit); in __alloc_bootmem()
[all …]
Dbootmem.c491 unsigned long off, unsigned long align) in align_off() argument
497 return ALIGN(base + off, align) - base; in align_off()
501 unsigned long size, unsigned long align, in alloc_bootmem_bdata() argument
509 align, goal, limit); in alloc_bootmem_bdata()
512 BUG_ON(align & (align - 1)); in alloc_bootmem_bdata()
529 step = max(align >> PAGE_SHIFT, 1UL); in alloc_bootmem_bdata()
570 start_off = align_off(bdata, bdata->last_end_off, align); in alloc_bootmem_bdata()
608 unsigned long align, in alloc_bootmem_core() argument
624 region = alloc_bootmem_bdata(bdata, size, align, goal, limit); in alloc_bootmem_core()
633 unsigned long align, in ___alloc_bootmem_nopanic() argument
[all …]
Dmemblock.c125 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_bottom_up() argument
135 cand = round_up(this_start, align); in __memblock_find_range_bottom_up()
159 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_top_down() argument
173 cand = round_down(this_end - size, align); in __memblock_find_range_top_down()
196 phys_addr_t align, phys_addr_t start, in memblock_find_in_range_node() argument
208 return __memblock_find_range_bottom_up(start, end, size, align, in memblock_find_in_range_node()
211 return __memblock_find_range_top_down(start, end, size, align, in memblock_find_in_range_node()
229 phys_addr_t align) in memblock_find_in_range() argument
235 ret = memblock_find_in_range_node(size, align, start, end, in memblock_find_in_range()
1121 phys_addr_t align, phys_addr_t start, in memblock_alloc_range_nid() argument
[all …]
Dslob.c217 static void *slob_page_alloc(struct page *sp, size_t size, int align) in slob_page_alloc() argument
225 if (align) { in slob_page_alloc()
226 aligned = (slob_t *)ALIGN((unsigned long)cur, align); in slob_page_alloc()
268 static void *slob_alloc(size_t size, gfp_t gfp, int align, int node) in slob_alloc() argument
300 b = slob_page_alloc(sp, size, align); in slob_alloc()
328 b = slob_page_alloc(sp, size, align); in slob_alloc()
430 int align = max_t(size_t, ARCH_KMALLOC_MINALIGN, ARCH_SLAB_MINALIGN); in __do_kmalloc_node() local
437 if (size < PAGE_SIZE - align) { in __do_kmalloc_node()
441 m = slob_alloc(size + align, gfp, align, node); in __do_kmalloc_node()
446 ret = (void *)m + align; in __do_kmalloc_node()
[all …]
Dslab_common.c248 struct kmem_cache *find_mergeable(size_t size, size_t align, in find_mergeable() argument
260 align = calculate_alignment(flags, align, size); in find_mergeable()
261 size = ALIGN(size, align); in find_mergeable()
280 if ((s->size & ~(align - 1)) != s->size) in find_mergeable()
286 if (IS_ENABLED(CONFIG_SLAB) && align && in find_mergeable()
287 (align > s->align || s->align % align)) in find_mergeable()
300 unsigned long align, unsigned long size) in calculate_alignment() argument
313 align = max(align, ralign); in calculate_alignment()
316 if (align < ARCH_SLAB_MINALIGN) in calculate_alignment()
317 align = ARCH_SLAB_MINALIGN; in calculate_alignment()
[all …]
Dvmalloc.c352 unsigned long align, in alloc_vmap_area() argument
364 BUG_ON(!is_power_of_2(align)); in alloc_vmap_area()
391 align < cached_align) { in alloc_vmap_area()
398 cached_align = align; in alloc_vmap_area()
403 addr = ALIGN(first->va_end, align); in alloc_vmap_area()
410 addr = ALIGN(vstart, align); in alloc_vmap_area()
437 addr = ALIGN(first->va_end, align); in alloc_vmap_area()
463 BUG_ON(va->va_start & (align-1)); in alloc_vmap_area()
1189 void __init vm_area_register_early(struct vm_struct *vm, size_t align) in vm_area_register_early() argument
1194 addr = ALIGN(VMALLOC_START + vm_init_off, align); in vm_area_register_early()
[all …]
Ddmapool.c132 size_t size, size_t align, size_t boundary) in dma_pool_create() argument
138 if (align == 0) in dma_pool_create()
139 align = 1; in dma_pool_create()
140 else if (align & (align - 1)) in dma_pool_create()
148 if ((size % align) != 0) in dma_pool_create()
149 size = ALIGN(size, align); in dma_pool_create()
505 size_t size, size_t align, size_t allocation) in dmam_pool_create() argument
513 pool = *ptr = dma_pool_create(name, dev, size, align, allocation); in dmam_pool_create()
Dpercpu.c503 int size, int align, bool pop_only) in pcpu_fit_in_area() argument
508 int head = ALIGN(cand_off, align) - off; in pcpu_fit_in_area()
554 static int pcpu_alloc_area(struct pcpu_chunk *chunk, int size, int align, in pcpu_alloc_area() argument
573 head = pcpu_fit_in_area(chunk, off, this_size, size, align, in pcpu_alloc_area()
871 static void __percpu *pcpu_alloc(size_t size, size_t align, bool reserved, in pcpu_alloc() argument
887 if (unlikely(align < 2)) in pcpu_alloc()
888 align = 2; in pcpu_alloc()
892 if (unlikely(!size || size > PCPU_MIN_UNIT_SIZE || align > PAGE_SIZE)) { in pcpu_alloc()
894 size, align); in pcpu_alloc()
922 off = pcpu_alloc_area(chunk, size, align, is_atomic, in pcpu_alloc()
[all …]
Dslab.h22 unsigned int align; /* Alignment as calculated */ member
70 unsigned long align, unsigned long size);
91 struct kmem_cache *find_mergeable(size_t size, size_t align,
95 __kmem_cache_alias(const char *name, size_t size, size_t align,
103 __kmem_cache_alias(const char *name, size_t size, size_t align, in __kmem_cache_alias() argument
Dcma.c379 struct page *cma_alloc(struct cma *cma, size_t count, unsigned int align) in cma_alloc() argument
392 count, align); in cma_alloc()
397 mask = cma_bitmap_aligned_mask(cma, align); in cma_alloc()
398 offset = cma_bitmap_aligned_offset(cma, align); in cma_alloc()
438 trace_cma_alloc(pfn, page, count, align); in cma_alloc()
Dslab.c468 static size_t calculate_freelist_size(int nr_objs, size_t align) in calculate_freelist_size() argument
473 if (align) in calculate_freelist_size()
474 freelist_size = ALIGN(freelist_size, align); in calculate_freelist_size()
480 size_t idx_size, size_t align) in calculate_nr_objs() argument
501 freelist_size = calculate_freelist_size(nr_objs, align); in calculate_nr_objs()
512 size_t align, int flags, size_t *left_over, in cache_estimate() argument
539 sizeof(freelist_idx_t), align); in cache_estimate()
540 mgmt_size = calculate_freelist_size(nr_objs, align); in cache_estimate()
1932 size_t size, size_t align, unsigned long flags) in calculate_slab_order() argument
1942 cache_estimate(gfporder, size, align, flags, &remainder, &num); in calculate_slab_order()
[all …]
Dsparse-vmemmap.c40 unsigned long align, in __earlyonly_bootmem_alloc() argument
43 return memblock_virt_alloc_try_nid(size, align, goal, in __earlyonly_bootmem_alloc()
Dslub.c3389 s->red_left_pad = ALIGN(s->red_left_pad, s->align); in calculate_sizes()
3399 size = ALIGN(size, s->align); in calculate_sizes()
4071 __kmem_cache_alias(const char *name, size_t size, size_t align, in __kmem_cache_alias() argument
4076 s = find_mergeable(size, align, flags, name, ctor); in __kmem_cache_alias()
4757 return sprintf(buf, "%d\n", s->align); in align_show()
4759 SLAB_ATTR_RO(align);