Lines Matching refs:size
208 static int __pcpu_size_to_slot(int size) in __pcpu_size_to_slot() argument
210 int highbit = fls(size); /* size is in bytes */ in __pcpu_size_to_slot()
214 static int pcpu_size_to_slot(int size) in pcpu_size_to_slot() argument
216 if (size == pcpu_unit_size) in pcpu_size_to_slot()
218 return __pcpu_size_to_slot(size); in pcpu_size_to_slot()
297 static void *pcpu_mem_zalloc(size_t size) in pcpu_mem_zalloc() argument
302 if (size <= PAGE_SIZE) in pcpu_mem_zalloc()
303 return kzalloc(size, GFP_KERNEL); in pcpu_mem_zalloc()
305 return vzalloc(size); in pcpu_mem_zalloc()
315 static void pcpu_mem_free(void *ptr, size_t size) in pcpu_mem_free() argument
317 if (size <= PAGE_SIZE) in pcpu_mem_free()
502 int size, int align, bool pop_only) in pcpu_fit_in_area() argument
510 if (this_size < head + size) in pcpu_fit_in_area()
522 page_end = PFN_UP(head + off + size); in pcpu_fit_in_area()
553 static int pcpu_alloc_area(struct pcpu_chunk *chunk, int size, int align, in pcpu_alloc_area() argument
572 head = pcpu_fit_in_area(chunk, off, this_size, size, align, in pcpu_alloc_area()
600 tail = this_size - head - size; in pcpu_alloc_area()
603 size = this_size - head; in pcpu_alloc_area()
625 p[1] = off + size; in pcpu_alloc_area()
640 chunk->free_size -= size; in pcpu_alloc_area()
816 static int pcpu_populate_chunk(struct pcpu_chunk *chunk, int off, int size);
817 static void pcpu_depopulate_chunk(struct pcpu_chunk *chunk, int off, int size);
870 static void __percpu *pcpu_alloc(size_t size, size_t align, bool reserved, in pcpu_alloc() argument
889 size = ALIGN(size, 2); in pcpu_alloc()
891 if (unlikely(!size || size > PCPU_MIN_UNIT_SIZE || align > PAGE_SIZE)) { in pcpu_alloc()
893 "percpu allocation\n", size, align); in pcpu_alloc()
906 if (size > chunk->contig_hint) { in pcpu_alloc()
921 off = pcpu_alloc_area(chunk, size, align, is_atomic, in pcpu_alloc()
932 for (slot = pcpu_size_to_slot(size); slot < pcpu_nr_slots; slot++) { in pcpu_alloc()
934 if (size > chunk->contig_hint) in pcpu_alloc()
955 off = pcpu_alloc_area(chunk, size, align, is_atomic, in pcpu_alloc()
995 page_end = PFN_UP(off + size); in pcpu_alloc()
1026 memset((void *)pcpu_chunk_addr(chunk, cpu, 0) + off, 0, size); in pcpu_alloc()
1029 kmemleak_alloc_percpu(ptr, size, gfp); in pcpu_alloc()
1037 size, align, is_atomic, err); in pcpu_alloc()
1065 void __percpu *__alloc_percpu_gfp(size_t size, size_t align, gfp_t gfp) in __alloc_percpu_gfp() argument
1067 return pcpu_alloc(size, align, false, gfp); in __alloc_percpu_gfp()
1078 void __percpu *__alloc_percpu(size_t size, size_t align) in __alloc_percpu() argument
1080 return pcpu_alloc(size, align, false, GFP_KERNEL); in __alloc_percpu()
1100 void __percpu *__alloc_reserved_percpu(size_t size, size_t align) in __alloc_reserved_percpu() argument
1102 return pcpu_alloc(size, align, true, GFP_KERNEL); in __alloc_reserved_percpu()
2139 vm.size = num_possible_cpus() * ai->unit_size; in pcpu_page_first_chunk()
2202 static void * __init pcpu_dfl_fc_alloc(unsigned int cpu, size_t size, in pcpu_dfl_fc_alloc() argument
2206 size, align, __pa(MAX_DMA_ADDRESS)); in pcpu_dfl_fc_alloc()
2209 static void __init pcpu_dfl_fc_free(void *ptr, size_t size) in pcpu_dfl_fc_free() argument
2211 memblock_free_early(__pa(ptr), size); in pcpu_dfl_fc_free()
2291 const size_t size = PERCPU_DYNAMIC_EARLY_SLOTS * sizeof(map[0]); in percpu_init_late() local
2293 BUILD_BUG_ON(size > PAGE_SIZE); in percpu_init_late()
2295 map = pcpu_mem_zalloc(size); in percpu_init_late()
2299 memcpy(map, chunk->map, size); in percpu_init_late()