Home
last modified time | relevance | path

Searched refs:pfn (Results 1 – 25 of 414) sorted by relevance

12345678910>>...17

/arch/x86/include/asm/xen/
Dpage.h46 extern int xen_alloc_p2m_entry(unsigned long pfn);
48 extern unsigned long get_phys_to_machine(unsigned long pfn);
49 extern bool set_phys_to_machine(unsigned long pfn, unsigned long mfn);
50 extern bool __set_phys_to_machine(unsigned long pfn, unsigned long mfn);
85 static inline unsigned long __pfn_to_mfn(unsigned long pfn) in __pfn_to_mfn() argument
89 if (pfn < xen_p2m_size) in __pfn_to_mfn()
90 mfn = xen_p2m_addr[pfn]; in __pfn_to_mfn()
91 else if (unlikely(pfn < xen_max_p2m_pfn)) in __pfn_to_mfn()
92 return get_phys_to_machine(pfn); in __pfn_to_mfn()
94 return IDENTITY_FRAME(pfn); in __pfn_to_mfn()
[all …]
/arch/x86/xen/
Dp2m.c124 static inline unsigned p2m_top_index(unsigned long pfn) in p2m_top_index() argument
126 BUG_ON(pfn >= MAX_P2M_PFN); in p2m_top_index()
127 return pfn / (P2M_MID_PER_PAGE * P2M_PER_PAGE); in p2m_top_index()
130 static inline unsigned p2m_mid_index(unsigned long pfn) in p2m_mid_index() argument
132 return (pfn / P2M_PER_PAGE) % P2M_MID_PER_PAGE; in p2m_mid_index()
135 static inline unsigned p2m_index(unsigned long pfn) in p2m_index() argument
137 return pfn % P2M_PER_PAGE; in p2m_index()
172 static void p2m_init_identity(unsigned long *p2m, unsigned long pfn) in p2m_init_identity() argument
177 p2m[i] = IDENTITY_FRAME(pfn + i); in p2m_init_identity()
210 unsigned long pfn, mfn; in xen_build_mfn_list_list() local
[all …]
/arch/arm/xen/
Dp2m.c21 unsigned long pfn; member
42 if (new->pfn == entry->pfn) in xen_add_phys_to_mach_entry()
45 if (new->pfn < entry->pfn) in xen_add_phys_to_mach_entry()
57 __func__, &new->pfn, &new->mfn, &entry->pfn, &entry->mfn); in xen_add_phys_to_mach_entry()
62 unsigned long __pfn_to_mfn(unsigned long pfn) in __pfn_to_mfn() argument
71 if (entry->pfn <= pfn && in __pfn_to_mfn()
72 entry->pfn + entry->nr_pages > pfn) { in __pfn_to_mfn()
74 return entry->mfn + (pfn - entry->pfn); in __pfn_to_mfn()
76 if (pfn < entry->pfn) in __pfn_to_mfn()
119 bool __set_phys_to_machine_multi(unsigned long pfn, in __set_phys_to_machine_multi() argument
[all …]
/arch/m32r/include/asm/
Dmmzone.h16 #define node_localnr(pfn, nid) ((pfn) - NODE_DATA(nid)->node_start_pfn) argument
27 #define pfn_valid(pfn) (1) argument
29 #define pfn_valid(pfn) ((pfn) < num_physpages) argument
36 static __inline__ int pfn_to_nid(unsigned long pfn) in pfn_to_nid() argument
41 if (pfn >= node_start_pfn(node) && pfn < node_end_pfn(node)) in pfn_to_nid()
47 static __inline__ struct pglist_data *pfn_to_pgdat(unsigned long pfn) in pfn_to_pgdat() argument
49 return(NODE_DATA(pfn_to_nid(pfn))); in pfn_to_pgdat()
/arch/parisc/include/asm/
Dmmzone.h33 #define pfn_is_io(pfn) ((pfn & (0xf0000000UL >> PAGE_SHIFT)) == (0xf0000000UL >> PAGE_SHIFT)) argument
36 #define pfn_is_io(pfn) ((pfn & (0xf000000000000000UL >> PAGE_SHIFT)) == (0xf000000000000000UL >> PA… argument
39 static inline int pfn_to_nid(unsigned long pfn) in pfn_to_nid() argument
43 if (unlikely(pfn_is_io(pfn))) in pfn_to_nid()
46 i = pfn >> PFNNID_SHIFT; in pfn_to_nid()
52 static inline int pfn_valid(int pfn) in pfn_valid() argument
54 int nid = pfn_to_nid(pfn); in pfn_valid()
57 return (pfn < node_end_pfn(nid)); in pfn_valid()
/arch/arm/mach-gemini/
Dmm.c23 .pfn =__phys_to_pfn(GEMINI_GLOBAL_BASE),
28 .pfn = __phys_to_pfn(GEMINI_UART_BASE),
33 .pfn = __phys_to_pfn(GEMINI_TIMER_BASE),
38 .pfn = __phys_to_pfn(GEMINI_INTERRUPT_BASE),
43 .pfn = __phys_to_pfn(GEMINI_POWER_CTRL_BASE),
48 .pfn = __phys_to_pfn(GEMINI_GPIO_BASE(0)),
53 .pfn = __phys_to_pfn(GEMINI_GPIO_BASE(1)),
58 .pfn = __phys_to_pfn(GEMINI_GPIO_BASE(2)),
63 .pfn = __phys_to_pfn(GEMINI_FLASH_CTRL_BASE),
68 .pfn = __phys_to_pfn(GEMINI_DRAM_CTRL_BASE),
[all …]
/arch/arm/include/asm/xen/
Dpage.h14 #define phys_to_machine_mapping_valid(pfn) (1) argument
42 unsigned long __pfn_to_mfn(unsigned long pfn);
46 static inline unsigned long pfn_to_gfn(unsigned long pfn) in pfn_to_gfn() argument
48 return pfn; in pfn_to_gfn()
57 static inline unsigned long pfn_to_bfn(unsigned long pfn) in pfn_to_bfn() argument
62 mfn = __pfn_to_mfn(pfn); in pfn_to_bfn()
67 return pfn; in pfn_to_bfn()
105 bool __set_phys_to_machine(unsigned long pfn, unsigned long mfn);
106 bool __set_phys_to_machine_multi(unsigned long pfn, unsigned long mfn,
109 static inline bool set_phys_to_machine(unsigned long pfn, unsigned long mfn) in set_phys_to_machine() argument
[all …]
/arch/x86/include/asm/
Dmmzone_32.h33 static inline int pfn_to_nid(unsigned long pfn) in pfn_to_nid() argument
36 return((int) physnode_map[(pfn) / PAGES_PER_SECTION]); in pfn_to_nid()
42 static inline int pfn_valid(int pfn) in pfn_valid() argument
44 int nid = pfn_to_nid(pfn); in pfn_valid()
47 return (pfn < node_end_pfn(nid)); in pfn_valid()
51 #define early_pfn_valid(pfn) pfn_valid((pfn)) argument
/arch/s390/kernel/
Dsuspend.c99 void page_key_read(unsigned long *pfn) in page_key_read() argument
103 addr = (unsigned long) page_address(pfn_to_page(*pfn)); in page_key_read()
104 *(unsigned char *) pfn = (unsigned char) page_get_storage_key(addr); in page_key_read()
111 void page_key_memorize(unsigned long *pfn) in page_key_memorize() argument
113 page_key_wp->data[page_key_wx] = *(unsigned char *) pfn; in page_key_memorize()
114 *(unsigned char *) pfn = 0; in page_key_memorize()
137 int pfn_is_nosave(unsigned long pfn) in pfn_is_nosave() argument
145 if (pfn <= LC_PAGES) in pfn_is_nosave()
147 if (pfn >= nosave_begin_pfn && pfn < nosave_end_pfn) in pfn_is_nosave()
150 if (pfn >= stext_pfn && pfn <= eshared_pfn) in pfn_is_nosave()
[all …]
/arch/arc/include/asm/
Dmmzone.h17 static inline int pfn_to_nid(unsigned long pfn) in pfn_to_nid() argument
22 is_end_low = pfn <= virt_to_pfn(0xFFFFFFFFUL); in pfn_to_nid()
29 if (pfn >= ARCH_PFN_OFFSET && is_end_low) in pfn_to_nid()
35 static inline int pfn_valid(unsigned long pfn) in pfn_valid() argument
37 int nid = pfn_to_nid(pfn); in pfn_valid()
39 return (pfn <= node_end_pfn(nid)); in pfn_valid()
/arch/x86/power/
Dhibernate_32.c83 unsigned long pfn; in resume_physical_mapping_init() local
91 pfn = 0; in resume_physical_mapping_init()
98 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
102 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
110 set_pmd(pmd, pfn_pmd(pfn, PAGE_KERNEL_LARGE_EXEC)); in resume_physical_mapping_init()
111 pfn += PTRS_PER_PTE; in resume_physical_mapping_init()
120 for (; pte < max_pte; pte++, pfn++) { in resume_physical_mapping_init()
121 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
124 set_pte(pte, pfn_pte(pfn, PAGE_KERNEL_EXEC)); in resume_physical_mapping_init()
167 int pfn_is_nosave(unsigned long pfn) in pfn_is_nosave() argument
[all …]
/arch/alpha/include/asm/
Dmmzone.h34 #define node_localnr(pfn, nid) ((pfn) - NODE_DATA(nid)->node_start_pfn) argument
81 unsigned long pfn; \
83 pfn = page_to_pfn(page) << 32; \
84 pte_val(pte) = pfn | pgprot_val(pgprot); \
103 #define pfn_to_nid(pfn) pa_to_nid(((u64)(pfn) << PAGE_SHIFT)) argument
104 #define pfn_valid(pfn) \ argument
105 (((pfn) - node_start_pfn(pfn_to_nid(pfn))) < \
106 node_spanned_pages(pfn_to_nid(pfn))) \
/arch/unicore32/kernel/
Dhibernate.c75 unsigned long pfn; in resume_physical_mapping_init() local
83 pfn = 0; in resume_physical_mapping_init()
90 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
96 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
107 for (; pte < max_pte; pte++, pfn++) { in resume_physical_mapping_init()
108 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
111 set_pte(pte, pfn_pte(pfn, PAGE_KERNEL_EXEC)); in resume_physical_mapping_init()
145 int pfn_is_nosave(unsigned long pfn) in pfn_is_nosave() argument
150 return (pfn >= begin_pfn) && (pfn < end_pfn); in pfn_is_nosave()
/arch/unicore32/mm/
Dioremap.c102 remap_area_sections(unsigned long virt, unsigned long pfn, in remap_area_sections() argument
118 set_pmd(pmd, __pmd(__pfn_to_phys(pfn) | type->prot_sect)); in remap_area_sections()
119 pfn += SZ_4M >> PAGE_SHIFT; in remap_area_sections()
129 void __iomem *__uc32_ioremap_pfn_caller(unsigned long pfn, in __uc32_ioremap_pfn_caller() argument
140 if (pfn >= 0x100000 && (__pfn_to_phys(pfn) & ~SECTION_MASK)) in __uc32_ioremap_pfn_caller()
146 if (pfn_valid(pfn)) { in __uc32_ioremap_pfn_caller()
168 if (!((__pfn_to_phys(pfn) | size | addr) & ~PMD_MASK)) { in __uc32_ioremap_pfn_caller()
170 err = remap_area_sections(addr, pfn, size, type); in __uc32_ioremap_pfn_caller()
172 err = ioremap_page_range(addr, addr + size, __pfn_to_phys(pfn), in __uc32_ioremap_pfn_caller()
189 unsigned long pfn = __phys_to_pfn(phys_addr); in __uc32_ioremap_caller() local
[all …]
/arch/arm/mach-omap2/
Dio.c74 .pfn = __phys_to_pfn(L3_24XX_PHYS),
80 .pfn = __phys_to_pfn(L4_24XX_PHYS),
90 .pfn = __phys_to_pfn(DSP_MEM_2420_PHYS),
96 .pfn = __phys_to_pfn(DSP_IPI_2420_PHYS),
102 .pfn = __phys_to_pfn(DSP_MMU_2420_PHYS),
114 .pfn = __phys_to_pfn(L4_WK_243X_PHYS),
120 .pfn = __phys_to_pfn(OMAP243X_GPMC_PHYS),
126 .pfn = __phys_to_pfn(OMAP243X_SDRC_PHYS),
132 .pfn = __phys_to_pfn(OMAP243X_SMS_PHYS),
144 .pfn = __phys_to_pfn(L3_34XX_PHYS),
[all …]
/arch/ia64/include/asm/
Dpage.h98 extern int ia64_pfn_valid (unsigned long pfn);
100 # define ia64_pfn_valid(pfn) 1 argument
107 # define pfn_to_page(pfn) (vmem_map + (pfn)) argument
108 # define __pfn_to_phys(pfn) PFN_PHYS(pfn) argument
117 # define pfn_valid(pfn) (((pfn) < max_mapnr) && ia64_pfn_valid(pfn)) argument
121 # define pfn_valid(pfn) (((pfn) >= min_low_pfn) && ((pfn) < max_low_pfn) && ia64_pfn_valid(pfn)) argument
126 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT) argument
/arch/x86/kernel/
Dcrash_dump_32.c17 static inline bool is_crashed_pfn_valid(unsigned long pfn) in is_crashed_pfn_valid() argument
27 return pte_pfn(pfn_pte(pfn, __pgprot(0))) == pfn; in is_crashed_pfn_valid()
50 ssize_t copy_oldmem_page(unsigned long pfn, char *buf, in copy_oldmem_page() argument
58 if (!is_crashed_pfn_valid(pfn)) in copy_oldmem_page()
61 vaddr = kmap_atomic_pfn(pfn); in copy_oldmem_page()
/arch/tile/include/asm/
Dmmzone.h38 static inline int pfn_to_nid(unsigned long pfn) in pfn_to_nid() argument
40 return highbits_to_node[__pfn_to_highbits(pfn)]; in pfn_to_nid()
45 static inline int pfn_valid(unsigned long pfn) in pfn_valid() argument
47 int nid = pfn_to_nid(pfn); in pfn_valid()
50 return (pfn < node_end_pfn(nid)); in pfn_valid()
/arch/arm/mm/
Dfault-armv.c41 unsigned long pfn, pte_t *ptep) in do_adjust_pte() argument
56 flush_cache_page(vma, address, pfn); in do_adjust_pte()
57 outer_flush_range((pfn << PAGE_SHIFT), in do_adjust_pte()
58 (pfn << PAGE_SHIFT) + PAGE_SIZE); in do_adjust_pte()
93 unsigned long pfn) in adjust_pte() argument
123 ret = do_adjust_pte(vma, address, pfn, pte); in adjust_pte()
133 unsigned long addr, pte_t *ptep, unsigned long pfn) in make_coherent() argument
160 aliases += adjust_pte(mpnt, mpnt->vm_start + offset, pfn); in make_coherent()
164 do_adjust_pte(vma, addr, pfn, ptep); in make_coherent()
183 unsigned long pfn = pte_pfn(*ptep); in update_mmu_cache() local
[all …]
Dflush.c41 static void flush_pfn_alias(unsigned long pfn, unsigned long vaddr) in flush_pfn_alias() argument
46 set_top_pte(to, pfn_pte(pfn, PAGE_KERNEL)); in flush_pfn_alias()
55 static void flush_icache_alias(unsigned long pfn, unsigned long vaddr, unsigned long len) in flush_icache_alias() argument
61 set_top_pte(va, pfn_pte(pfn, PAGE_KERNEL)); in flush_icache_alias()
101 void flush_cache_page(struct vm_area_struct *vma, unsigned long user_addr, unsigned long pfn) in flush_cache_page() argument
104 vivt_flush_cache_page(vma, user_addr, pfn); in flush_cache_page()
109 flush_pfn_alias(pfn, user_addr); in flush_cache_page()
118 #define flush_pfn_alias(pfn,vaddr) do { } while (0) argument
119 #define flush_icache_alias(pfn,vaddr,len) do { } while (0) argument
275 unsigned long pfn; in __sync_icache_dcache() local
[all …]
/arch/metag/include/asm/
Dmmzone.h10 static inline int pfn_to_nid(unsigned long pfn) in pfn_to_nid() argument
15 if (pfn >= node_start_pfn(nid) && pfn <= node_end_pfn(nid)) in pfn_to_nid()
21 static inline struct pglist_data *pfn_to_pgdat(unsigned long pfn) in pfn_to_pgdat() argument
23 return NODE_DATA(pfn_to_nid(pfn)); in pfn_to_pgdat()
/arch/frv/include/asm/
Dpage.h40 #define devmem_is_allowed(pfn) 1 argument
45 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT) argument
52 #define pfn_valid(pfn) ((pfn) < max_mapnr) argument
55 #define pfn_valid(pfn) ((pfn) >= min_low_pfn && (pfn) < max_low_pfn) argument
/arch/sh/include/asm/
Dmmzone.h12 static inline int pfn_to_nid(unsigned long pfn) in pfn_to_nid() argument
17 if (pfn >= node_start_pfn(nid) && pfn <= node_end_pfn(nid)) in pfn_to_nid()
23 static inline struct pglist_data *pfn_to_pgdat(unsigned long pfn) in pfn_to_pgdat() argument
25 return NODE_DATA(pfn_to_nid(pfn)); in pfn_to_pgdat()
/arch/mn10300/include/asm/
Dpage.h107 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT) argument
108 #define pfn_to_page(pfn) (mem_map + ((pfn) - __pfn_disp)) argument
110 #define __pfn_to_phys(pfn) PFN_PHYS(pfn) argument
112 #define pfn_valid(pfn) \ argument
114 unsigned long __pfn = (pfn) - __pfn_disp; \
/arch/m68k/include/asm/
Dpage_no.h23 #define pfn_to_virt(pfn) __va((pfn) << PAGE_SHIFT) argument
28 #define pfn_to_page(pfn) virt_to_page(pfn_to_virt(pfn)) argument
30 #define pfn_valid(pfn) ((pfn) < max_mapnr) argument

12345678910>>...17