Searched refs:start_pfn (Results 1 – 11 of 11) sorted by relevance
/drivers/hv/ |
D | hv_balloon.c | 436 unsigned long start_pfn; member 547 static void hv_bring_pgs_online(unsigned long start_pfn, unsigned long size) in hv_bring_pgs_online() argument 553 pg = pfn_to_page(start_pfn + i); in hv_bring_pgs_online() 566 unsigned long start_pfn; in hv_mem_hot_add() local 571 start_pfn = start + (i * HA_CHUNK); in hv_mem_hot_add() 587 nid = memory_add_physaddr_to_nid(PFN_PHYS(start_pfn)); in hv_mem_hot_add() 588 ret = add_memory(nid, PFN_PHYS((start_pfn)), in hv_mem_hot_add() 656 static bool pfn_covered(unsigned long start_pfn, unsigned long pfn_cnt) in pfn_covered() argument 672 if ((start_pfn >= has->end_pfn)) in pfn_covered() 678 if ((start_pfn + pfn_cnt) > has->end_pfn) { in pfn_covered() [all …]
|
/drivers/base/ |
D | memory.c | 210 static bool pages_correctly_reserved(unsigned long start_pfn) in pages_correctly_reserved() argument 214 unsigned long pfn = start_pfn; in pages_correctly_reserved() 248 unsigned long start_pfn; in memory_block_action() local 254 start_pfn = page_to_pfn(first_page); in memory_block_action() 258 if (!pages_correctly_reserved(start_pfn)) in memory_block_action() 261 ret = online_pages(start_pfn, nr_pages, online_type); in memory_block_action() 264 ret = offline_pages(start_pfn, nr_pages); in memory_block_action() 512 int __weak arch_get_memory_phys_device(unsigned long start_pfn) in arch_get_memory_phys_device() argument 553 unsigned long start_pfn; in init_memory_block() local 568 start_pfn = section_nr_to_pfn(mem->start_section_nr); in init_memory_block() [all …]
|
D | node.c | 465 unsigned long start_pfn = NODE_DATA(nid)->node_start_pfn; in link_mem_sections() local 466 unsigned long end_pfn = start_pfn + NODE_DATA(nid)->node_spanned_pages; in link_mem_sections() 471 for (pfn = start_pfn; pfn < end_pfn; pfn += PAGES_PER_SECTION) { in link_mem_sections()
|
/drivers/iommu/ |
D | intel-iommu.c | 858 unsigned long start_pfn, in dma_pte_clear_range() argument 866 BUG_ON(addr_width < BITS_PER_LONG && start_pfn >> addr_width); in dma_pte_clear_range() 868 BUG_ON(start_pfn > last_pfn); in dma_pte_clear_range() 873 first_pte = pte = dma_pfn_level_pte(domain, start_pfn, 1, &large_page); in dma_pte_clear_range() 875 start_pfn = align_to_level(start_pfn + 1, large_page + 1); in dma_pte_clear_range() 880 start_pfn += lvl_to_nr_pages(large_page); in dma_pte_clear_range() 882 } while (start_pfn <= last_pfn && !first_pte_in_page(pte)); in dma_pte_clear_range() 887 } while (start_pfn && start_pfn <= last_pfn); in dma_pte_clear_range() 895 unsigned long start_pfn, in dma_pte_free_pagetable() argument 905 BUG_ON(addr_width < BITS_PER_LONG && start_pfn >> addr_width); in dma_pte_free_pagetable() [all …]
|
/drivers/net/ethernet/ibm/ehea/ |
D | ehea_qmr.c | 707 unsigned long pfn, start_pfn, end_pfn, nr_pages; in ehea_create_busmap_callback() local 714 start_pfn = initial_pfn; in ehea_create_busmap_callback() 716 pfn = start_pfn; in ehea_create_busmap_callback() 721 nr_pages = pfn - start_pfn; in ehea_create_busmap_callback() 722 ret = ehea_update_busmap(start_pfn, nr_pages, in ehea_create_busmap_callback() 729 start_pfn = pfn; in ehea_create_busmap_callback() 735 nr_pages = pfn - start_pfn; in ehea_create_busmap_callback() 736 return ehea_update_busmap(start_pfn, nr_pages, EHEA_BUSMAP_ADD_SECT); in ehea_create_busmap_callback()
|
D | ehea_main.c | 3421 if (ehea_add_sect_bmap(arg->start_pfn, arg->nr_pages)) in ehea_mem_notifier() 3428 if (ehea_rem_sect_bmap(arg->start_pfn, arg->nr_pages)) in ehea_mem_notifier()
|
/drivers/xen/ |
D | balloon.c | 565 static void __init balloon_add_region(unsigned long start_pfn, in balloon_add_region() argument 576 extra_pfn_end = min(max_pfn, start_pfn + pages); in balloon_add_region() 578 for (pfn = start_pfn; pfn < extra_pfn_end; pfn++) { in balloon_add_region()
|
/drivers/s390/char/ |
D | sclp_cmd.c | 355 int arch_get_memory_phys_device(unsigned long start_pfn) in arch_get_memory_phys_device() argument 359 return PFN_PHYS(start_pfn) >> ilog2(rzm); in arch_get_memory_phys_device() 482 start = arg->start_pfn << PAGE_SHIFT; in sclp_mem_notifier()
|
/drivers/infiniband/hw/ehca/ |
D | ehca_mrmw.c | 2426 unsigned long pfn, start_pfn, end_pfn, nr_pages; in ehca_create_busmap_callback() local 2432 start_pfn = initial_pfn; in ehca_create_busmap_callback() 2434 pfn = start_pfn; in ehca_create_busmap_callback() 2439 nr_pages = pfn - start_pfn; in ehca_create_busmap_callback() 2440 ret = ehca_update_busmap(start_pfn, nr_pages); in ehca_create_busmap_callback() 2445 start_pfn = pfn; in ehca_create_busmap_callback() 2451 nr_pages = pfn - start_pfn; in ehca_create_busmap_callback() 2452 return ehca_update_busmap(start_pfn, nr_pages); in ehca_create_busmap_callback()
|
/drivers/gpu/drm/gma500/ |
D | mmu.c | 697 int psb_mmu_insert_pfn_sequence(struct psb_mmu_pd *pd, uint32_t start_pfn, in psb_mmu_insert_pfn_sequence() argument 722 pte = psb_mmu_mask_pte(start_pfn++, type); in psb_mmu_insert_pfn_sequence()
|
D | psb_drv.h | 738 uint32_t start_pfn,
|