Lines Matching refs:size
167 static inline phys_addr_t memblock_cap_size(phys_addr_t base, phys_addr_t *size) in memblock_cap_size() argument
169 return *size = min(*size, PHYS_ADDR_MAX - base); in memblock_cap_size()
182 phys_addr_t base, phys_addr_t size) in memblock_overlaps_region() argument
186 memblock_cap_size(base, &size); in memblock_overlaps_region()
189 if (memblock_addrs_overlap(base, size, type->regions[i].base, in memblock_overlaps_region()
190 type->regions[i].size)) in memblock_overlaps_region()
212 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_bottom_up() argument
223 if (cand < this_end && this_end - cand >= size) in __memblock_find_range_bottom_up()
247 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_top_down() argument
258 if (this_end < size) in __memblock_find_range_top_down()
261 cand = round_down(this_end - size, align); in __memblock_find_range_top_down()
284 static phys_addr_t __init_memblock memblock_find_in_range_node(phys_addr_t size, in memblock_find_in_range_node() argument
299 return __memblock_find_range_bottom_up(start, end, size, align, in memblock_find_in_range_node()
302 return __memblock_find_range_top_down(start, end, size, align, in memblock_find_in_range_node()
320 phys_addr_t end, phys_addr_t size, in memblock_find_in_range() argument
327 ret = memblock_find_in_range_node(size, align, start, end, in memblock_find_in_range()
332 &size); in memblock_find_in_range()
342 type->total_size -= type->regions[r].size; in memblock_remove_region()
352 type->regions[0].size = 0; in memblock_remove_region()
364 phys_addr_t addr, size; in memblock_discard() local
368 size = PAGE_ALIGN(sizeof(struct memblock_region) * in memblock_discard()
373 __memblock_free_late(addr, size); in memblock_discard()
378 size = PAGE_ALIGN(sizeof(struct memblock_region) * in memblock_discard()
383 __memblock_free_late(addr, size); in memblock_discard()
512 if (this->base + this->size != next->base || in memblock_merge_regions()
516 BUG_ON(this->base + this->size > next->base); in memblock_merge_regions()
521 this->size += next->size; in memblock_merge_regions()
542 phys_addr_t size, in memblock_insert_region() argument
551 rgn->size = size; in memblock_insert_region()
555 type->total_size += size; in memblock_insert_region()
575 phys_addr_t base, phys_addr_t size, in memblock_add_range() argument
580 phys_addr_t end = base + memblock_cap_size(base, &size); in memblock_add_range()
584 if (!size) in memblock_add_range()
588 if (type->regions[0].size == 0) { in memblock_add_range()
591 type->regions[0].size = size; in memblock_add_range()
594 type->total_size = size; in memblock_add_range()
608 phys_addr_t rend = rbase + rgn->size; in memblock_add_range()
650 if (memblock_double_array(type, obase, size) < 0) in memblock_add_range()
672 int __init_memblock memblock_add_node(phys_addr_t base, phys_addr_t size, in memblock_add_node() argument
675 return memblock_add_range(&memblock.memory, base, size, nid, 0); in memblock_add_node()
689 int __init_memblock memblock_add(phys_addr_t base, phys_addr_t size) in memblock_add() argument
691 phys_addr_t end = base + size - 1; in memblock_add()
696 return memblock_add_range(&memblock.memory, base, size, MAX_NUMNODES, 0); in memblock_add()
716 phys_addr_t base, phys_addr_t size, in memblock_isolate_range() argument
719 phys_addr_t end = base + memblock_cap_size(base, &size); in memblock_isolate_range()
725 if (!size) in memblock_isolate_range()
730 if (memblock_double_array(type, base, size) < 0) in memblock_isolate_range()
735 phys_addr_t rend = rbase + rgn->size; in memblock_isolate_range()
748 rgn->size -= base - rbase; in memblock_isolate_range()
759 rgn->size -= end - rbase; in memblock_isolate_range()
776 phys_addr_t base, phys_addr_t size) in memblock_remove_range() argument
781 ret = memblock_isolate_range(type, base, size, &start_rgn, &end_rgn); in memblock_remove_range()
790 int __init_memblock memblock_remove(phys_addr_t base, phys_addr_t size) in memblock_remove() argument
792 phys_addr_t end = base + size - 1; in memblock_remove()
797 return memblock_remove_range(&memblock.memory, base, size); in memblock_remove()
808 int __init_memblock memblock_free(phys_addr_t base, phys_addr_t size) in memblock_free() argument
810 phys_addr_t end = base + size - 1; in memblock_free()
815 kmemleak_free_part_phys(base, size); in memblock_free()
816 return memblock_remove_range(&memblock.reserved, base, size); in memblock_free()
822 int __init_memblock memblock_reserve(phys_addr_t base, phys_addr_t size) in memblock_reserve() argument
824 phys_addr_t end = base + size - 1; in memblock_reserve()
829 return memblock_add_range(&memblock.reserved, base, size, MAX_NUMNODES, 0); in memblock_reserve()
833 int __init_memblock memblock_physmem_add(phys_addr_t base, phys_addr_t size) in memblock_physmem_add() argument
835 phys_addr_t end = base + size - 1; in memblock_physmem_add()
840 return memblock_add_range(&physmem, base, size, MAX_NUMNODES, 0); in memblock_physmem_add()
856 phys_addr_t size, int set, int flag) in memblock_setclr_flag() argument
861 ret = memblock_isolate_range(type, base, size, &start_rgn, &end_rgn); in memblock_setclr_flag()
885 int __init_memblock memblock_mark_hotplug(phys_addr_t base, phys_addr_t size) in memblock_mark_hotplug() argument
887 return memblock_setclr_flag(base, size, 1, MEMBLOCK_HOTPLUG); in memblock_mark_hotplug()
897 int __init_memblock memblock_clear_hotplug(phys_addr_t base, phys_addr_t size) in memblock_clear_hotplug() argument
899 return memblock_setclr_flag(base, size, 0, MEMBLOCK_HOTPLUG); in memblock_clear_hotplug()
909 int __init_memblock memblock_mark_mirror(phys_addr_t base, phys_addr_t size) in memblock_mark_mirror() argument
913 return memblock_setclr_flag(base, size, 1, MEMBLOCK_MIRROR); in memblock_mark_mirror()
923 int __init_memblock memblock_mark_nomap(phys_addr_t base, phys_addr_t size) in memblock_mark_nomap() argument
925 return memblock_setclr_flag(base, size, 1, MEMBLOCK_NOMAP); in memblock_mark_nomap()
935 int __init_memblock memblock_clear_nomap(phys_addr_t base, phys_addr_t size) in memblock_clear_nomap() argument
937 return memblock_setclr_flag(base, size, 0, MEMBLOCK_NOMAP); in memblock_clear_nomap()
1012 phys_addr_t m_end = m->base + m->size; in __next_mem_range()
1037 r_start = idx_b ? r[-1].base + r[-1].size : 0; in __next_mem_range()
1116 phys_addr_t m_end = m->base + m->size; in __next_mem_range_rev()
1141 r_start = idx_b ? r[-1].base + r[-1].size : 0; in __next_mem_range_rev()
1187 if (PFN_UP(r->base) >= PFN_DOWN(r->base + r->size)) in __next_mem_pfn_range()
1200 *out_end_pfn = PFN_DOWN(r->base + r->size); in __next_mem_pfn_range()
1218 int __init_memblock memblock_set_node(phys_addr_t base, phys_addr_t size, in memblock_set_node() argument
1225 ret = memblock_isolate_range(type, base, size, &start_rgn, &end_rgn); in memblock_set_node()
1327 phys_addr_t __init memblock_alloc_range_nid(phys_addr_t size, in memblock_alloc_range_nid() argument
1345 found = memblock_find_in_range_node(size, align, start, end, nid, in memblock_alloc_range_nid()
1347 if (found && !memblock_reserve(found, size)) in memblock_alloc_range_nid()
1351 found = memblock_find_in_range_node(size, align, start, in memblock_alloc_range_nid()
1354 if (found && !memblock_reserve(found, size)) in memblock_alloc_range_nid()
1361 &size); in memblock_alloc_range_nid()
1376 kmemleak_alloc_phys(found, size, 0, 0); in memblock_alloc_range_nid()
1393 phys_addr_t __init memblock_phys_alloc_range(phys_addr_t size, in memblock_phys_alloc_range() argument
1399 __func__, (u64)size, (u64)align, &start, &end, in memblock_phys_alloc_range()
1401 return memblock_alloc_range_nid(size, align, start, end, NUMA_NO_NODE, in memblock_phys_alloc_range()
1418 phys_addr_t __init memblock_phys_alloc_try_nid(phys_addr_t size, phys_addr_t align, int nid) in memblock_phys_alloc_try_nid() argument
1420 return memblock_alloc_range_nid(size, align, 0, in memblock_phys_alloc_try_nid()
1445 phys_addr_t size, phys_addr_t align, in memblock_alloc_internal() argument
1457 return kzalloc_node(size, GFP_NOWAIT, nid); in memblock_alloc_internal()
1462 alloc = memblock_alloc_range_nid(size, align, min_addr, max_addr, nid, in memblock_alloc_internal()
1467 alloc = memblock_alloc_range_nid(size, align, 0, max_addr, nid, in memblock_alloc_internal()
1495 phys_addr_t size, phys_addr_t align, in memblock_alloc_exact_nid_raw() argument
1502 __func__, (u64)size, (u64)align, nid, &min_addr, in memblock_alloc_exact_nid_raw()
1505 ptr = memblock_alloc_internal(size, align, in memblock_alloc_exact_nid_raw()
1507 if (ptr && size > 0) in memblock_alloc_exact_nid_raw()
1508 page_init_poison(ptr, size); in memblock_alloc_exact_nid_raw()
1533 phys_addr_t size, phys_addr_t align, in memblock_alloc_try_nid_raw() argument
1540 __func__, (u64)size, (u64)align, nid, &min_addr, in memblock_alloc_try_nid_raw()
1543 ptr = memblock_alloc_internal(size, align, in memblock_alloc_try_nid_raw()
1545 if (ptr && size > 0) in memblock_alloc_try_nid_raw()
1546 page_init_poison(ptr, size); in memblock_alloc_try_nid_raw()
1569 phys_addr_t size, phys_addr_t align, in memblock_alloc_try_nid() argument
1576 __func__, (u64)size, (u64)align, nid, &min_addr, in memblock_alloc_try_nid()
1578 ptr = memblock_alloc_internal(size, align, in memblock_alloc_try_nid()
1581 memset(ptr, 0, size); in memblock_alloc_try_nid()
1595 void __init __memblock_free_late(phys_addr_t base, phys_addr_t size) in __memblock_free_late() argument
1599 end = base + size - 1; in __memblock_free_late()
1602 kmemleak_free_part_phys(base, size); in __memblock_free_late()
1604 end = PFN_DOWN(base + size); in __memblock_free_late()
1636 return (memblock.memory.regions[idx].base + memblock.memory.regions[idx].size); in memblock_end_of_DRAM()
1651 if (limit <= r->size) { in __find_max_addr()
1655 limit -= r->size; in __find_max_addr()
1681 void __init memblock_cap_memory_range(phys_addr_t base, phys_addr_t size) in memblock_cap_memory_range() argument
1686 if (!size) in memblock_cap_memory_range()
1689 ret = memblock_isolate_range(&memblock.memory, base, size, in memblock_cap_memory_range()
1706 base + size, PHYS_ADDR_MAX); in memblock_cap_memory_range()
1735 type->regions[mid].size)) in memblock_search()
1772 *end_pfn = PFN_DOWN(type->regions[mid].base + type->regions[mid].size); in memblock_search_pfn_nid()
1787 bool __init_memblock memblock_is_region_memory(phys_addr_t base, phys_addr_t size) in memblock_is_region_memory() argument
1790 phys_addr_t end = base + memblock_cap_size(base, &size); in memblock_is_region_memory()
1795 memblock.memory.regions[idx].size) >= end; in memblock_is_region_memory()
1809 bool __init_memblock memblock_is_region_reserved(phys_addr_t base, phys_addr_t size) in memblock_is_region_reserved() argument
1811 return memblock_overlaps_region(&memblock.reserved, base, size); in memblock_is_region_reserved()
1821 orig_end = r->base + r->size; in memblock_trim_memory()
1830 r->size = end - start; in memblock_trim_memory()
1851 phys_addr_t base, end, size; in memblock_dump() local
1862 size = rgn->size; in memblock_dump()
1863 end = base + size - 1; in memblock_dump()
1871 type->name, idx, &base, &end, &size, nid_buf, flags); in memblock_dump()
2024 end = reg->base + reg->size - 1; in memblock_debug_show()