Home
last modified time | relevance | path

Searched refs:pfn (Results 1 – 9 of 9) sorted by relevance

/kernel/power/
Dsnapshot.c740 static int memory_bm_find_bit(struct memory_bitmap *bm, unsigned long pfn, in memory_bm_find_bit() argument
749 if (pfn >= zone->start_pfn && pfn < zone->end_pfn) in memory_bm_find_bit()
756 if (pfn >= curr->start_pfn && pfn < curr->end_pfn) { in memory_bm_find_bit()
778 ((pfn - zone->start_pfn) & ~BM_BLOCK_MASK) == bm->cur.node_pfn) in memory_bm_find_bit()
782 block_nr = (pfn - zone->start_pfn) >> BM_BLOCK_SHIFT; in memory_bm_find_bit()
797 bm->cur.node_pfn = (pfn - zone->start_pfn) & ~BM_BLOCK_MASK; in memory_bm_find_bit()
801 *bit_nr = (pfn - zone->start_pfn) & BM_BLOCK_MASK; in memory_bm_find_bit()
806 static void memory_bm_set_bit(struct memory_bitmap *bm, unsigned long pfn) in memory_bm_set_bit() argument
812 error = memory_bm_find_bit(bm, pfn, &addr, &bit); in memory_bm_set_bit()
817 static int mem_bm_set_bit_check(struct memory_bitmap *bm, unsigned long pfn) in mem_bm_set_bit_check() argument
[all …]
/kernel/dma/
Ddebug.c75 unsigned long pfn; member
395 return __pfn_to_phys(entry->pfn) + entry->offset; in phys_addr()
397 return page_to_phys(pfn_to_page(entry->pfn)) + entry->offset; in phys_addr()
419 phys_addr(entry), entry->pfn, in debug_dma_dump_mappings()
459 return (entry->pfn << CACHELINE_PER_PAGE_SHIFT) + in to_cacheline_number()
795 phys_addr(entry), entry->pfn, in dump_show()
1221 entry->pfn = page_to_pfn(page); in debug_dma_map_page()
1317 entry->pfn = page_to_pfn(sg_page(s)); in debug_dma_map_sg()
1364 .pfn = page_to_pfn(sg_page(s)), in debug_dma_unmap_sg()
1410 entry->pfn = vmalloc_to_pfn(virt); in debug_dma_alloc_coherent()
[all …]
Dswiotlb.c373 unsigned long pfn = PFN_DOWN(orig_addr); in swiotlb_bounce() local
407 if (PageHighMem(pfn_to_page(pfn))) { in swiotlb_bounce()
418 buffer = kmap_atomic(pfn_to_page(pfn)); in swiotlb_bounce()
427 pfn++; in swiotlb_bounce()
Dcoherent.c247 unsigned long pfn = mem->pfn_base + start + off; in __dma_mmap_from_coherent() local
248 *ret = remap_pfn_range(vma, vma->vm_start, pfn, in __dma_mmap_from_coherent()
Ddirect.c500 unsigned long pfn = PHYS_PFN(dma_to_phys(dev, dma_addr)); in dma_direct_mmap() local
512 return remap_pfn_range(vma, vma->vm_start, pfn + vma->vm_pgoff, in dma_direct_mmap()
/kernel/
Diomem.c33 unsigned long pfn = PHYS_PFN(offset); in try_ram_remap() local
36 if (pfn_valid(pfn) && !PageHighMem(pfn_to_page(pfn)) && in try_ram_remap()
Dresource.c446 unsigned long pfn, end_pfn; in walk_system_ram_range() local
454 pfn = PFN_UP(res.start); in walk_system_ram_range()
456 if (end_pfn > pfn) in walk_system_ram_range()
457 ret = (*func)(pfn, end_pfn - pfn, arg); in walk_system_ram_range()
465 static int __is_ram(unsigned long pfn, unsigned long nr_pages, void *arg) in __is_ram() argument
474 int __weak page_is_ram(unsigned long pfn) in page_is_ram() argument
476 return walk_system_ram_range(pfn, 1, NULL, __is_ram) == 1; in page_is_ram()
Dkexec_core.c378 unsigned long pfn, epfn, addr, eaddr; in kimage_alloc_normal_control_pages() local
383 pfn = page_to_boot_pfn(pages); in kimage_alloc_normal_control_pages()
384 epfn = pfn + count; in kimage_alloc_normal_control_pages()
385 addr = pfn << PAGE_SHIFT; in kimage_alloc_normal_control_pages()
/kernel/debug/kdb/
Dkdb_support.c320 unsigned long pfn; in kdb_getphys() local
324 pfn = (addr >> PAGE_SHIFT); in kdb_getphys()
325 if (!pfn_valid(pfn)) in kdb_getphys()
327 page = pfn_to_page(pfn); in kdb_getphys()