Home
last modified time | relevance | path

Searched refs:pfn (Results 1 – 9 of 9) sorted by relevance

/kernel/power/
Dsnapshot.c706 static int memory_bm_find_bit(struct memory_bitmap *bm, unsigned long pfn, in memory_bm_find_bit() argument
715 if (pfn >= zone->start_pfn && pfn < zone->end_pfn) in memory_bm_find_bit()
722 if (pfn >= curr->start_pfn && pfn < curr->end_pfn) { in memory_bm_find_bit()
744 ((pfn - zone->start_pfn) & ~BM_BLOCK_MASK) == bm->cur.node_pfn) in memory_bm_find_bit()
748 block_nr = (pfn - zone->start_pfn) >> BM_BLOCK_SHIFT; in memory_bm_find_bit()
763 bm->cur.node_pfn = (pfn - zone->start_pfn) & ~BM_BLOCK_MASK; in memory_bm_find_bit()
767 *bit_nr = (pfn - zone->start_pfn) & BM_BLOCK_MASK; in memory_bm_find_bit()
772 static void memory_bm_set_bit(struct memory_bitmap *bm, unsigned long pfn) in memory_bm_set_bit() argument
778 error = memory_bm_find_bit(bm, pfn, &addr, &bit); in memory_bm_set_bit()
783 static int mem_bm_set_bit_check(struct memory_bitmap *bm, unsigned long pfn) in mem_bm_set_bit_check() argument
[all …]
/kernel/dma/
Ddebug.c75 unsigned long pfn; member
395 return __pfn_to_phys(entry->pfn) + entry->offset; in phys_addr()
397 return page_to_phys(pfn_to_page(entry->pfn)) + entry->offset; in phys_addr()
419 phys_addr(entry), entry->pfn, in debug_dma_dump_mappings()
459 return (entry->pfn << CACHELINE_PER_PAGE_SHIFT) + in to_cacheline_number()
796 phys_addr(entry), entry->pfn, in dump_show()
1232 entry->pfn = page_to_pfn(page); in debug_dma_map_page()
1327 entry->pfn = page_to_pfn(sg_page(s)); in debug_dma_map_sg()
1374 .pfn = page_to_pfn(sg_page(s)), in debug_dma_unmap_sg()
1419 entry->pfn = vmalloc_to_pfn(virt); in debug_dma_alloc_coherent()
[all …]
Dswiotlb.c421 unsigned long pfn = PFN_DOWN(orig_addr); in swiotlb_bounce() local
424 if (PageHighMem(pfn_to_page(pfn))) { in swiotlb_bounce()
435 buffer = kmap_atomic(pfn_to_page(pfn)); in swiotlb_bounce()
444 pfn++; in swiotlb_bounce()
Dcoherent.c275 unsigned long pfn = mem->pfn_base + start + off; in __dma_mmap_from_coherent() local
276 *ret = remap_pfn_range(vma, vma->vm_start, pfn, in __dma_mmap_from_coherent()
Ddirect.c456 unsigned long pfn = PHYS_PFN(dma_to_phys(dev, dma_addr)); in dma_direct_mmap() local
466 return remap_pfn_range(vma, vma->vm_start, pfn + vma->vm_pgoff, in dma_direct_mmap()
/kernel/
Diomem.c33 unsigned long pfn = PHYS_PFN(offset); in try_ram_remap() local
36 if (pfn_valid(pfn) && !PageHighMem(pfn_to_page(pfn)) && in try_ram_remap()
Dresource.c484 unsigned long pfn, end_pfn; in walk_system_ram_range() local
493 pfn = PFN_UP(res.start); in walk_system_ram_range()
495 if (end_pfn > pfn) in walk_system_ram_range()
496 ret = (*func)(pfn, end_pfn - pfn, arg); in walk_system_ram_range()
504 static int __is_ram(unsigned long pfn, unsigned long nr_pages, void *arg) in __is_ram() argument
513 int __weak page_is_ram(unsigned long pfn) in page_is_ram() argument
515 return walk_system_ram_range(pfn, 1, NULL, __is_ram) == 1; in page_is_ram()
Dkexec_core.c377 unsigned long pfn, epfn, addr, eaddr; in kimage_alloc_normal_control_pages() local
382 pfn = page_to_boot_pfn(pages); in kimage_alloc_normal_control_pages()
383 epfn = pfn + count; in kimage_alloc_normal_control_pages()
384 addr = pfn << PAGE_SHIFT; in kimage_alloc_normal_control_pages()
/kernel/debug/kdb/
Dkdb_support.c379 unsigned long pfn; in kdb_getphys() local
383 pfn = (addr >> PAGE_SHIFT); in kdb_getphys()
384 if (!pfn_valid(pfn)) in kdb_getphys()
386 page = pfn_to_page(pfn); in kdb_getphys()