Home
last modified time | relevance | path

Searched refs:page_address (Results 1 – 16 of 16) sorted by relevance

/mm/
Dhighmem.c242 if (page_address(page)) in map_new_virtual()
243 return (unsigned long)page_address(page); in map_new_virtual()
276 vaddr = (unsigned long)page_address(page); in kmap_high()
302 vaddr = (unsigned long)page_address(page); in kmap_high_get()
329 vaddr = (unsigned long)page_address(page); in kunmap_high()
556 return page_address(page); in __kmap_local_page_prot()
724 void *page_address(const struct page *page) in page_address() function
750 EXPORT_SYMBOL(page_address);
Dz3fold.c388 struct z3fold_header *zhdr = page_address(page); in init_z3fold_page()
1338 zhdr = page_address(page); in z3fold_reclaim_page()
1573 zhdr = page_address(page); in z3fold_page_isolate()
1613 zhdr = page_address(page); in z3fold_page_migrate()
1627 new_zhdr = page_address(newpage); in z3fold_page_migrate()
1672 zhdr = page_address(page); in z3fold_page_putback()
Dslub.c530 void *addr = page_address(page); in __fill_map()
636 base = page_address(page); in check_valid_pointer()
708 for_each_object(p, s, page_address(page), page->objects) { in get_each_object_track()
842 u8 *addr = page_address(page); in print_trailer()
936 u8 *addr = page_address(page); in check_bytes_and_report()
1031 start = page_address(page); in slab_pad_check()
1880 start = fixup_red_left(s, page_address(page)); in shuffle_freelist()
1957 start = page_address(page); in allocate_slab()
2008 for_each_object(p, s, page_address(page), in __free_slab()
4251 void *addr = page_address(page); in list_slab_objects()
[all …]
Dpercpu-km.c75 chunk->base_addr = page_address(pages); in pcpu_create_chunk()
Dswap_cgroup.c72 sc = page_address(mappage); in __lookup_swap_cgroup()
Dsecretmem.c99 addr = (unsigned long)page_address(page); in secretmem_fault()
Dzbud.c155 struct zbud_header *zhdr = page_address(page); in init_zbud_page()
Dsparse-vmemmap.c413 return page_address(page); in vmemmap_alloc_block()
Dslob.c207 return page_address(page); in slob_new_pages()
Dvmalloc.c598 __pa(page_address(pages[i])), prot, in vmap_pages_range_noflush()
2560 if (page_address(area->pages[i])) in set_area_direct_map()
2594 unsigned long addr = (unsigned long)page_address(area->pages[i]); in vm_remove_mappings()
Dpage_alloc.c1475 debug_check_no_locks_freed(page_address(page), in free_pages_prepare()
1477 debug_check_no_obj_freed(page_address(page), in free_pages_prepare()
5816 return (unsigned long) page_address(page); in __get_free_pages()
5896 nc->va = page ? page_address(page) : NULL; in __page_frag_cache_refill()
6062 return make_alloc_exact((unsigned long)page_address(p), order, size); in alloc_pages_exact_nid()
8558 direct_map_addr = page_address(page); in free_reserved_area()
Dslab_common.c977 ret = page_address(page); in kmalloc_order()
Dshmem.c3115 memcpy(page_address(page), symname, len); in shmem_symlink()
3163 return page_address(page); in shmem_get_link()
Dpercpu.c3257 free_fn(page_address(pages[j]), PAGE_SIZE); in pcpu_page_first_chunk()
Dslab.c2298 void *addr = page_address(page); in alloc_slabmgmt()
/mm/kasan/
Dcommon.c113 kasan_unpoison(page_address(page), PAGE_SIZE << order, init); in __kasan_unpoison_pages()
119 kasan_poison(page_address(page), PAGE_SIZE << order, in __kasan_poison_pages()
255 kasan_poison(page_address(page), page_size(page), in __kasan_poison_slab()
378 if (ptr != page_address(virt_to_head_page(ptr))) { in ____kasan_kfree_large()