Lines Matching full:ai
1542 static int __init pcpu_verify_alloc_info(const struct pcpu_alloc_info *ai);
2240 * Allocate ai which is large enough for @nr_groups groups containing
2241 * @nr_units units. The returned ai's groups[0].cpu_map points to the
2253 struct pcpu_alloc_info *ai; in pcpu_alloc_alloc_info() local
2258 base_size = ALIGN(struct_size(ai, groups, nr_groups), in pcpu_alloc_alloc_info()
2259 __alignof__(ai->groups[0].cpu_map[0])); in pcpu_alloc_alloc_info()
2260 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); in pcpu_alloc_alloc_info()
2265 ai = ptr; in pcpu_alloc_alloc_info()
2268 ai->groups[0].cpu_map = ptr; in pcpu_alloc_alloc_info()
2271 ai->groups[0].cpu_map[unit] = NR_CPUS; in pcpu_alloc_alloc_info()
2273 ai->nr_groups = nr_groups; in pcpu_alloc_alloc_info()
2274 ai->__ai_size = PFN_ALIGN(ai_size); in pcpu_alloc_alloc_info()
2276 return ai; in pcpu_alloc_alloc_info()
2281 * @ai: pcpu_alloc_info to free
2283 * Free @ai which was allocated by pcpu_alloc_alloc_info().
2285 void __init pcpu_free_alloc_info(struct pcpu_alloc_info *ai) in pcpu_free_alloc_info() argument
2287 memblock_free_early(__pa(ai), ai->__ai_size); in pcpu_free_alloc_info()
2293 * @ai: allocation info to dump
2295 * Print out information about @ai using loglevel @lvl.
2298 const struct pcpu_alloc_info *ai) in pcpu_dump_alloc_info() argument
2306 v = ai->nr_groups; in pcpu_dump_alloc_info()
2315 upa = ai->alloc_size / ai->unit_size; in pcpu_dump_alloc_info()
2320 lvl, ai->static_size, ai->reserved_size, ai->dyn_size, in pcpu_dump_alloc_info()
2321 ai->unit_size, ai->alloc_size / ai->atom_size, ai->atom_size); in pcpu_dump_alloc_info()
2323 for (group = 0; group < ai->nr_groups; group++) { in pcpu_dump_alloc_info()
2324 const struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_dump_alloc_info()
2349 * @ai: pcpu_alloc_info describing how to percpu area is shaped
2356 * @ai contains all information necessary to initialize the first
2359 * @ai->static_size is the size of static percpu area.
2361 * @ai->reserved_size, if non-zero, specifies the amount of bytes to
2369 * @ai->dyn_size determines the number of bytes available for dynamic
2370 * allocation in the first chunk. The area between @ai->static_size +
2371 * @ai->reserved_size + @ai->dyn_size and @ai->unit_size is unused.
2373 * @ai->unit_size specifies unit size and must be aligned to PAGE_SIZE
2374 * and equal to or larger than @ai->static_size + @ai->reserved_size +
2375 * @ai->dyn_size.
2377 * @ai->atom_size is the allocation atom size and used as alignment
2380 * @ai->alloc_size is the allocation size and always multiple of
2381 * @ai->atom_size. This is larger than @ai->atom_size if
2382 * @ai->unit_size is larger than @ai->atom_size.
2384 * @ai->nr_groups and @ai->groups describe virtual memory layout of
2387 * groupings. If @ai->nr_groups is zero, a single group containing
2401 void __init pcpu_setup_first_chunk(const struct pcpu_alloc_info *ai, in pcpu_setup_first_chunk() argument
2404 size_t size_sum = ai->static_size + ai->reserved_size + ai->dyn_size; in pcpu_setup_first_chunk()
2423 pcpu_dump_alloc_info(KERN_EMERG, ai); \ in pcpu_setup_first_chunk()
2429 PCPU_SETUP_BUG_ON(ai->nr_groups <= 0); in pcpu_setup_first_chunk()
2431 PCPU_SETUP_BUG_ON(!ai->static_size); in pcpu_setup_first_chunk()
2436 PCPU_SETUP_BUG_ON(ai->unit_size < size_sum); in pcpu_setup_first_chunk()
2437 PCPU_SETUP_BUG_ON(offset_in_page(ai->unit_size)); in pcpu_setup_first_chunk()
2438 PCPU_SETUP_BUG_ON(ai->unit_size < PCPU_MIN_UNIT_SIZE); in pcpu_setup_first_chunk()
2439 PCPU_SETUP_BUG_ON(!IS_ALIGNED(ai->unit_size, PCPU_BITMAP_BLOCK_SIZE)); in pcpu_setup_first_chunk()
2440 PCPU_SETUP_BUG_ON(ai->dyn_size < PERCPU_DYNAMIC_EARLY_SIZE); in pcpu_setup_first_chunk()
2441 PCPU_SETUP_BUG_ON(!ai->dyn_size); in pcpu_setup_first_chunk()
2442 PCPU_SETUP_BUG_ON(!IS_ALIGNED(ai->reserved_size, PCPU_MIN_ALLOC_SIZE)); in pcpu_setup_first_chunk()
2445 PCPU_SETUP_BUG_ON(pcpu_verify_alloc_info(ai) < 0); in pcpu_setup_first_chunk()
2448 alloc_size = ai->nr_groups * sizeof(group_offsets[0]); in pcpu_setup_first_chunk()
2454 alloc_size = ai->nr_groups * sizeof(group_sizes[0]); in pcpu_setup_first_chunk()
2478 for (group = 0, unit = 0; group < ai->nr_groups; group++, unit += i) { in pcpu_setup_first_chunk()
2479 const struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_setup_first_chunk()
2482 group_sizes[group] = gi->nr_units * ai->unit_size; in pcpu_setup_first_chunk()
2494 unit_off[cpu] = gi->base_offset + i * ai->unit_size; in pcpu_setup_first_chunk()
2512 pcpu_dump_alloc_info(KERN_DEBUG, ai); in pcpu_setup_first_chunk()
2514 pcpu_nr_groups = ai->nr_groups; in pcpu_setup_first_chunk()
2521 pcpu_unit_pages = ai->unit_size >> PAGE_SHIFT; in pcpu_setup_first_chunk()
2523 pcpu_atom_size = ai->atom_size; in pcpu_setup_first_chunk()
2527 pcpu_stats_save_ai(ai); in pcpu_setup_first_chunk()
2555 static_size = ALIGN(ai->static_size, PCPU_MIN_ALLOC_SIZE); in pcpu_setup_first_chunk()
2556 dyn_size = ai->dyn_size - (static_size - ai->static_size); in pcpu_setup_first_chunk()
2567 map_size = ai->reserved_size ?: dyn_size; in pcpu_setup_first_chunk()
2571 if (ai->reserved_size) { in pcpu_setup_first_chunk()
2575 ai->reserved_size; in pcpu_setup_first_chunk()
2678 struct pcpu_alloc_info *ai; in pcpu_build_alloc_info() local
2762 ai = pcpu_alloc_alloc_info(nr_groups, nr_units); in pcpu_build_alloc_info()
2763 if (!ai) in pcpu_build_alloc_info()
2765 cpu_map = ai->groups[0].cpu_map; in pcpu_build_alloc_info()
2768 ai->groups[group].cpu_map = cpu_map; in pcpu_build_alloc_info()
2772 ai->static_size = static_size; in pcpu_build_alloc_info()
2773 ai->reserved_size = reserved_size; in pcpu_build_alloc_info()
2774 ai->dyn_size = dyn_size; in pcpu_build_alloc_info()
2775 ai->unit_size = alloc_size / upa; in pcpu_build_alloc_info()
2776 ai->atom_size = atom_size; in pcpu_build_alloc_info()
2777 ai->alloc_size = alloc_size; in pcpu_build_alloc_info()
2780 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_build_alloc_info()
2787 gi->base_offset = unit * ai->unit_size; in pcpu_build_alloc_info()
2797 return ai; in pcpu_build_alloc_info()
2842 struct pcpu_alloc_info *ai; in pcpu_embed_first_chunk() local
2847 ai = pcpu_build_alloc_info(reserved_size, dyn_size, atom_size, in pcpu_embed_first_chunk()
2849 if (IS_ERR(ai)) in pcpu_embed_first_chunk()
2850 return PTR_ERR(ai); in pcpu_embed_first_chunk()
2852 size_sum = ai->static_size + ai->reserved_size + ai->dyn_size; in pcpu_embed_first_chunk()
2853 areas_size = PFN_ALIGN(ai->nr_groups * sizeof(void *)); in pcpu_embed_first_chunk()
2863 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
2864 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_embed_first_chunk()
2873 ptr = alloc_fn(cpu, gi->nr_units * ai->unit_size, atom_size); in pcpu_embed_first_chunk()
2887 max_distance += ai->unit_size * ai->groups[highest_group].nr_units; in pcpu_embed_first_chunk()
2905 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
2906 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_embed_first_chunk()
2909 for (i = 0; i < gi->nr_units; i++, ptr += ai->unit_size) { in pcpu_embed_first_chunk()
2912 free_fn(ptr, ai->unit_size); in pcpu_embed_first_chunk()
2916 memcpy(ptr, __per_cpu_load, ai->static_size); in pcpu_embed_first_chunk()
2917 free_fn(ptr + size_sum, ai->unit_size - size_sum); in pcpu_embed_first_chunk()
2922 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
2923 ai->groups[group].base_offset = areas[group] - base; in pcpu_embed_first_chunk()
2927 PFN_DOWN(size_sum), ai->static_size, ai->reserved_size, in pcpu_embed_first_chunk()
2928 ai->dyn_size, ai->unit_size); in pcpu_embed_first_chunk()
2930 pcpu_setup_first_chunk(ai, base); in pcpu_embed_first_chunk()
2934 for (group = 0; group < ai->nr_groups; group++) in pcpu_embed_first_chunk()
2937 ai->groups[group].nr_units * ai->unit_size); in pcpu_embed_first_chunk()
2939 pcpu_free_alloc_info(ai); in pcpu_embed_first_chunk()
2969 struct pcpu_alloc_info *ai; in pcpu_page_first_chunk() local
2980 ai = pcpu_build_alloc_info(reserved_size, 0, PAGE_SIZE, NULL); in pcpu_page_first_chunk()
2981 if (IS_ERR(ai)) in pcpu_page_first_chunk()
2982 return PTR_ERR(ai); in pcpu_page_first_chunk()
2983 BUG_ON(ai->nr_groups != 1); in pcpu_page_first_chunk()
2984 upa = ai->alloc_size/ai->unit_size; in pcpu_page_first_chunk()
2986 if (WARN_ON(ai->groups[0].nr_units != nr_g0_units)) { in pcpu_page_first_chunk()
2987 pcpu_free_alloc_info(ai); in pcpu_page_first_chunk()
2991 unit_pages = ai->unit_size >> PAGE_SHIFT; in pcpu_page_first_chunk()
3004 unsigned int cpu = ai->groups[0].cpu_map[unit]; in pcpu_page_first_chunk()
3022 vm.size = num_possible_cpus() * ai->unit_size; in pcpu_page_first_chunk()
3027 (unsigned long)vm.addr + unit * ai->unit_size; in pcpu_page_first_chunk()
3047 memcpy((void *)unit_addr, __per_cpu_load, ai->static_size); in pcpu_page_first_chunk()
3052 unit_pages, psize_str, ai->static_size, in pcpu_page_first_chunk()
3053 ai->reserved_size, ai->dyn_size); in pcpu_page_first_chunk()
3055 pcpu_setup_first_chunk(ai, vm.addr); in pcpu_page_first_chunk()
3064 pcpu_free_alloc_info(ai); in pcpu_page_first_chunk()
3132 struct pcpu_alloc_info *ai; in setup_per_cpu_areas() local
3135 ai = pcpu_alloc_alloc_info(1, 1); in setup_per_cpu_areas()
3137 if (!ai || !fc) in setup_per_cpu_areas()
3142 ai->dyn_size = unit_size; in setup_per_cpu_areas()
3143 ai->unit_size = unit_size; in setup_per_cpu_areas()
3144 ai->atom_size = unit_size; in setup_per_cpu_areas()
3145 ai->alloc_size = unit_size; in setup_per_cpu_areas()
3146 ai->groups[0].nr_units = 1; in setup_per_cpu_areas()
3147 ai->groups[0].cpu_map[0] = 0; in setup_per_cpu_areas()
3149 pcpu_setup_first_chunk(ai, fc); in setup_per_cpu_areas()
3150 pcpu_free_alloc_info(ai); in setup_per_cpu_areas()