Home
last modified time | relevance | path

Searched refs:base (Results 1 – 13 of 13) sorted by relevance

/mm/
Dmemblock.c74 static inline phys_addr_t memblock_cap_size(phys_addr_t base, phys_addr_t *size) in memblock_cap_size() argument
76 return *size = min(*size, (phys_addr_t)ULLONG_MAX - base); in memblock_cap_size()
89 phys_addr_t base, phys_addr_t size) in memblock_overlaps_region() argument
94 phys_addr_t rgnbase = type->regions[i].base; in memblock_overlaps_region()
96 if (memblock_addrs_overlap(base, size, rgnbase, rgnsize)) in memblock_overlaps_region()
270 type->regions[0].base = 0; in memblock_remove_region()
437 if (this->base + this->size != next->base || in memblock_merge_regions()
441 BUG_ON(this->base + this->size > next->base); in memblock_merge_regions()
466 int idx, phys_addr_t base, in memblock_insert_region() argument
474 rgn->base = base; in memblock_insert_region()
[all …]
Dcma.c169 int __init cma_init_reserved_mem(phys_addr_t base, phys_addr_t size, in cma_init_reserved_mem() argument
182 if (!size || !memblock_is_region_reserved(base, size)) in cma_init_reserved_mem()
193 if (ALIGN(base, alignment) != base || ALIGN(size, alignment) != size) in cma_init_reserved_mem()
201 cma->base_pfn = PFN_DOWN(base); in cma_init_reserved_mem()
229 int __init cma_declare_contiguous(phys_addr_t base, in cma_declare_contiguous() argument
251 __func__, &size, &base, &limit, &alignment); in cma_declare_contiguous()
272 base = ALIGN(base, alignment); in cma_declare_contiguous()
276 if (!base) in cma_declare_contiguous()
287 if (fixed && base < highmem_start && base + size > highmem_start) { in cma_declare_contiguous()
290 &base, &highmem_start); in cma_declare_contiguous()
[all …]
Dpage_cgroup.c28 struct page_cgroup *base; in lookup_page_cgroup() local
30 base = NODE_DATA(page_to_nid(page))->node_page_cgroup; in lookup_page_cgroup()
38 if (unlikely(!base)) in lookup_page_cgroup()
42 return base + offset; in lookup_page_cgroup()
47 struct page_cgroup *base; in alloc_node_page_cgroup() local
57 base = memblock_virt_alloc_try_nid_nopanic( in alloc_node_page_cgroup()
60 if (!base) in alloc_node_page_cgroup()
62 NODE_DATA(nid)->node_page_cgroup = base; in alloc_node_page_cgroup()
131 struct page_cgroup *base; in init_section_page_cgroup() local
140 base = alloc_page_cgroup(table_size, nid); in init_section_page_cgroup()
[all …]
Diov_iter.c304 const struct iovec *iov, size_t base, size_t bytes) in __iovec_copy_from_user_inatomic() argument
309 char __user *buf = iov->iov_base + base; in __iovec_copy_from_user_inatomic()
310 int copy = min(bytes, iov->iov_len - base); in __iovec_copy_from_user_inatomic()
312 base = 0; in __iovec_copy_from_user_inatomic()
360 size_t base = i->iov_offset; in advance_iovec() local
370 copy = min(bytes, iov->iov_len - base); in advance_iovec()
374 base += copy; in advance_iovec()
375 if (iov->iov_len == base) { in advance_iovec()
378 base = 0; in advance_iovec()
382 i->iov_offset = base; in advance_iovec()
[all …]
Dinternal.h287 static inline struct page *mem_map_offset(struct page *base, int offset) in mem_map_offset() argument
290 return nth_page(base, offset); in mem_map_offset()
291 return base + offset; in mem_map_offset()
299 struct page *base, int offset) in mem_map_next() argument
302 unsigned long pfn = page_to_pfn(base) + offset; in mem_map_next()
Dbootmem.c473 unsigned long base = bdata->node_min_pfn; in align_idx() local
480 return ALIGN(base + idx, step) - base; in align_idx()
486 unsigned long base = PFN_PHYS(bdata->node_min_pfn); in align_off() local
490 return ALIGN(base + off, align) - base; in align_off()
Dvmalloc.c2370 unsigned long base, start, end, last_end; in pcpu_get_vm_areas() local
2425 base = vmalloc_end - last_end; in pcpu_get_vm_areas()
2428 base = pvm_determine_end(&next, &prev, align) - end; in pcpu_get_vm_areas()
2431 BUG_ON(next && next->va_end <= base + end); in pcpu_get_vm_areas()
2432 BUG_ON(prev && prev->va_end > base + end); in pcpu_get_vm_areas()
2438 if (base + last_end < vmalloc_start + last_end) { in pcpu_get_vm_areas()
2452 if (next && next->va_start < base + end) { in pcpu_get_vm_areas()
2453 base = pvm_determine_end(&next, &prev, align) - end; in pcpu_get_vm_areas()
2463 if (prev && prev->va_end > base + start) { in pcpu_get_vm_areas()
2466 base = pvm_determine_end(&next, &prev, align) - end; in pcpu_get_vm_areas()
[all …]
Dpercpu.c1303 void __percpu *base = __addr_to_pcpu_ptr(pcpu_base_addr); in is_kernel_percpu_address() local
1307 void *start = per_cpu_ptr(base, cpu); in is_kernel_percpu_address()
1342 void __percpu *base = __addr_to_pcpu_ptr(pcpu_base_addr); in per_cpu_ptr_to_phys() local
1358 void *start = per_cpu_ptr(base, cpu); in per_cpu_ptr_to_phys()
1969 void *base = (void *)ULONG_MAX; in pcpu_embed_first_chunk() local
2009 base = min(ptr, base); in pcpu_embed_first_chunk()
2036 ai->groups[group].base_offset = areas[group] - base; in pcpu_embed_first_chunk()
2055 PFN_DOWN(size_sum), base, ai->static_size, ai->reserved_size, in pcpu_embed_first_chunk()
2058 rc = pcpu_setup_first_chunk(ai, base); in pcpu_embed_first_chunk()
Dslob.c147 slob_t *base = (slob_t *)((unsigned long)s & PAGE_MASK); in set_slob() local
148 slobidx_t offset = next - base; in set_slob()
172 slob_t *base = (slob_t *)((unsigned long)s & PAGE_MASK); in slob_next() local
179 return base+next; in slob_next()
Dnommu.c1154 void *base; in do_mmap_private() local
1211 base = page_address(pages); in do_mmap_private()
1213 region->vm_start = (unsigned long) base; in do_mmap_private()
1230 ret = vma->vm_file->f_op->read(vma->vm_file, base, len, &fpos); in do_mmap_private()
1238 memset(base + ret, 0, len - ret); in do_mmap_private()
Dslub.c486 void *base; in check_valid_pointer() local
491 base = page_address(page); in check_valid_pointer()
493 if (object < base || object >= base + page->objects * s->size || in check_valid_pointer()
494 (object - base) % s->size) { in check_valid_pointer()
Dhugetlb.c3590 unsigned long base = addr & PUD_MASK; in vma_shareable() local
3591 unsigned long end = base + PUD_SIZE; in vma_shareable()
3597 vma->vm_start <= base && end <= vma->vm_end) in vma_shareable()
Dpage_alloc.c5180 usable_startpfn = PFN_DOWN(r->base); in find_zone_movable_pfns_for_nodes()