Lines Matching refs:u64
61 static unsigned long lmb_addrs_overlap(u64 base1, u64 size1, u64 base2, in lmb_addrs_overlap()
62 u64 size2) in lmb_addrs_overlap()
67 static long lmb_addrs_adjacent(u64 base1, u64 size1, u64 base2, u64 size2) in lmb_addrs_adjacent()
80 u64 base1 = rgn->region[r1].base; in lmb_regions_adjacent()
81 u64 size1 = rgn->region[r1].size; in lmb_regions_adjacent()
82 u64 base2 = rgn->region[r2].base; in lmb_regions_adjacent()
83 u64 size2 = rgn->region[r2].size; in lmb_regions_adjacent()
132 static long lmb_add_region(struct lmb_region *rgn, u64 base, u64 size) in lmb_add_region()
145 u64 rgnbase = rgn->region[i].base; in lmb_add_region()
146 u64 rgnsize = rgn->region[i].size; in lmb_add_region()
196 long lmb_add(u64 base, u64 size) in lmb_add()
208 long lmb_remove(u64 base, u64 size) in lmb_remove()
211 u64 rgnbegin, rgnend; in lmb_remove()
212 u64 end = base + size; in lmb_remove()
257 long __init lmb_reserve(u64 base, u64 size) in lmb_reserve()
266 long __init lmb_overlaps_region(struct lmb_region *rgn, u64 base, u64 size) in lmb_overlaps_region()
271 u64 rgnbase = rgn->region[i].base; in lmb_overlaps_region()
272 u64 rgnsize = rgn->region[i].size; in lmb_overlaps_region()
280 static u64 lmb_align_down(u64 addr, u64 size) in lmb_align_down()
285 static u64 lmb_align_up(u64 addr, u64 size) in lmb_align_up()
290 static u64 __init lmb_alloc_nid_unreserved(u64 start, u64 end, in lmb_alloc_nid_unreserved()
291 u64 size, u64 align) in lmb_alloc_nid_unreserved()
293 u64 base, res_base; in lmb_alloc_nid_unreserved()
302 base = ~(u64)0; in lmb_alloc_nid_unreserved()
311 return ~(u64)0; in lmb_alloc_nid_unreserved()
314 static u64 __init lmb_alloc_nid_region(struct lmb_property *mp, in lmb_alloc_nid_region() argument
315 u64 (*nid_range)(u64, u64, int *), in lmb_alloc_nid_region() argument
316 u64 size, u64 align, int nid) in lmb_alloc_nid_region()
318 u64 start, end; in lmb_alloc_nid_region()
325 u64 this_end; in lmb_alloc_nid_region()
330 u64 ret = lmb_alloc_nid_unreserved(start, this_end, in lmb_alloc_nid_region()
332 if (ret != ~(u64)0) in lmb_alloc_nid_region()
338 return ~(u64)0; in lmb_alloc_nid_region()
341 u64 __init lmb_alloc_nid(u64 size, u64 align, int nid, in lmb_alloc_nid()
342 u64 (*nid_range)(u64 start, u64 end, int *nid)) in lmb_alloc_nid()
352 u64 ret = lmb_alloc_nid_region(&mem->region[i], in lmb_alloc_nid()
355 if (ret != ~(u64)0) in lmb_alloc_nid()
362 u64 __init lmb_alloc(u64 size, u64 align) in lmb_alloc()
367 u64 __init lmb_alloc_base(u64 size, u64 align, u64 max_addr) in lmb_alloc_base()
369 u64 alloc; in lmb_alloc_base()
380 u64 __init __lmb_alloc_base(u64 size, u64 align, u64 max_addr) in __lmb_alloc_base()
383 u64 base = 0; in __lmb_alloc_base()
384 u64 res_base; in __lmb_alloc_base()
396 u64 lmbbase = lmb.memory.region[i].base; in __lmb_alloc_base()
397 u64 lmbsize = lmb.memory.region[i].size; in __lmb_alloc_base()
427 u64 __init lmb_phys_mem_size(void) in lmb_phys_mem_size()
432 u64 __init lmb_end_of_DRAM(void) in lmb_end_of_DRAM()
440 void __init lmb_enforce_memory_limit(u64 memory_limit) in lmb_enforce_memory_limit()
443 u64 limit; in lmb_enforce_memory_limit()
483 int __init lmb_is_reserved(u64 addr) in lmb_is_reserved()
488 u64 upper = lmb.reserved.region[i].base + in lmb_is_reserved()
503 u64 rstart, rend; in lmb_find()
509 u64 start = lmb.memory.region[i].base; in lmb_find()
510 u64 end = start + lmb.memory.region[i].size - 1; in lmb_find()