Home
last modified time | relevance | path

Searched refs:pgdp (Results 1 – 25 of 47) sorted by relevance

12

/arch/powerpc/include/asm/
Dpgtable-ppc64-4k.h58 #define pgd_clear(pgdp) (pgd_val(*(pgdp)) = 0) argument
62 #define pud_offset(pgdp, addr) \ argument
63 (((pud_t *) pgd_page_vaddr(*(pgdp))) + \
/arch/sh/mm/
Dgup.c170 pgd_t *pgdp; in __get_user_pages_fast() local
186 pgdp = pgd_offset(mm, addr); in __get_user_pages_fast()
188 pgd_t pgd = *pgdp; in __get_user_pages_fast()
195 } while (pgdp++, addr = next, addr != end); in __get_user_pages_fast()
223 pgd_t *pgdp; in get_user_pages_fast() local
235 pgdp = pgd_offset(mm, addr); in get_user_pages_fast()
237 pgd_t pgd = *pgdp; in get_user_pages_fast()
244 } while (pgdp++, addr = next, addr != end); in get_user_pages_fast()
/arch/s390/mm/
Dhugetlbpage.c164 pgd_t *pgdp; in huge_pte_alloc() local
168 pgdp = pgd_offset(mm, addr); in huge_pte_alloc()
169 pudp = pud_alloc(mm, pgdp, addr); in huge_pte_alloc()
177 pgd_t *pgdp; in huge_pte_offset() local
181 pgdp = pgd_offset(mm, addr); in huge_pte_offset()
182 if (pgd_present(*pgdp)) { in huge_pte_offset()
183 pudp = pud_offset(pgdp, addr); in huge_pte_offset()
Dgup.c141 static inline int gup_pud_range(pgd_t *pgdp, pgd_t pgd, unsigned long addr, in gup_pud_range() argument
147 pudp = (pud_t *) pgdp; in gup_pud_range()
176 pgd_t *pgdp, pgd; in __get_user_pages_fast() local
193 pgdp = pgd_offset(mm, addr); in __get_user_pages_fast()
195 pgd = *pgdp; in __get_user_pages_fast()
200 if (!gup_pud_range(pgdp, pgd, addr, next, write, pages, &nr)) in __get_user_pages_fast()
202 } while (pgdp++, addr = next, addr != end); in __get_user_pages_fast()
Dpageattr.c45 pgd_t *pgdp; in walk_page_table() local
50 pgdp = pgd_offset_k(addr); in walk_page_table()
51 if (pgd_none(*pgdp)) in walk_page_table()
53 pudp = pud_offset(pgdp, addr); in walk_page_table()
/arch/sparc/mm/
Dgup.c169 pgd_t *pgdp; in __get_user_pages_fast() local
178 pgdp = pgd_offset(mm, addr); in __get_user_pages_fast()
180 pgd_t pgd = *pgdp; in __get_user_pages_fast()
187 } while (pgdp++, addr = next, addr != end); in __get_user_pages_fast()
199 pgd_t *pgdp; in get_user_pages_fast() local
226 pgdp = pgd_offset(mm, addr); in get_user_pages_fast()
228 pgd_t pgd = *pgdp; in get_user_pages_fast()
235 } while (pgdp++, addr = next, addr != end); in get_user_pages_fast()
Dsrmmu.c109 static inline void srmmu_ctxd_set(ctxd_t *ctxp, pgd_t *pgdp) in srmmu_ctxd_set() argument
110 { set_pte((pte_t *)ctxp, (SRMMU_ET_PTD | (__nocache_pa((unsigned long) pgdp) >> 4))); } in srmmu_ctxd_set()
485 pgd_t *pgdp; in srmmu_mapioaddr() local
491 pgdp = pgd_offset_k(virt_addr); in srmmu_mapioaddr()
492 pmdp = pmd_offset(pgdp, virt_addr); in srmmu_mapioaddr()
520 pgd_t *pgdp; in srmmu_unmapioaddr() local
524 pgdp = pgd_offset_k(virt_addr); in srmmu_unmapioaddr()
525 pmdp = pmd_offset(pgdp, virt_addr); in srmmu_unmapioaddr()
662 pgd_t *pgdp; in srmmu_early_allocate_ptable_skeleton() local
667 pgdp = pgd_offset_k(start); in srmmu_early_allocate_ptable_skeleton()
[all …]
Dfault_64.c94 pgd_t *pgdp = pgd_offset(current->mm, tpc); in get_user_insn() local
101 if (pgd_none(*pgdp) || unlikely(pgd_bad(*pgdp))) in get_user_insn()
103 pudp = pud_offset(pgdp, tpc); in get_user_insn()
Dio-unit.c217 pgd_t *pgdp; in iounit_map_dma_area() local
222 pgdp = pgd_offset(&init_mm, addr); in iounit_map_dma_area()
223 pmdp = pmd_offset(pgdp, addr); in iounit_map_dma_area()
/arch/powerpc/mm/
Dgup.c140 pgd_t *pgdp; in __get_user_pages_fast() local
175 pgdp = pgd_offset(mm, addr); in __get_user_pages_fast()
177 pgd_t pgd = ACCESS_ONCE(*pgdp); in __get_user_pages_fast()
185 if (!gup_hugepte((pte_t *)pgdp, PGDIR_SIZE, addr, next, in __get_user_pages_fast()
188 } else if (is_hugepd(pgdp)) { in __get_user_pages_fast()
189 if (!gup_hugepd((hugepd_t *)pgdp, PGDIR_SHIFT, in __get_user_pages_fast()
194 } while (pgdp++, addr = next, addr != end); in __get_user_pages_fast()
Dpgtable_64.c96 pgd_t *pgdp; in map_kernel_page() local
102 pgdp = pgd_offset_k(ea); in map_kernel_page()
103 pudp = pud_alloc(&init_mm, pgdp, ea); in map_kernel_page()
120 pgdp = pgd_offset_k(ea); in map_kernel_page()
122 if (pgd_none(*pgdp)) { in map_kernel_page()
125 pgd_populate(&init_mm, pgdp, pudp); in map_kernel_page()
128 pudp = pud_offset(pgdp, ea); in map_kernel_page()
/arch/mips/mm/
Dgup.c203 pgd_t *pgdp; in __get_user_pages_fast() local
232 pgdp = pgd_offset(mm, addr); in __get_user_pages_fast()
234 pgd_t pgd = *pgdp; in __get_user_pages_fast()
241 } while (pgdp++, addr = next, addr != end); in __get_user_pages_fast()
269 pgd_t *pgdp; in get_user_pages_fast() local
282 pgdp = pgd_offset(mm, addr); in get_user_pages_fast()
284 pgd_t pgd = *pgdp; in get_user_pages_fast()
291 } while (pgdp++, addr = next, addr != end); in get_user_pages_fast()
Dtlb-r8k.c186 pgd_t *pgdp; in __update_tlb() local
203 pgdp = pgd_offset(vma->vm_mm, address); in __update_tlb()
204 pmdp = pmd_offset(pgdp, address); in __update_tlb()
Dc-r3k.c242 pgd_t *pgdp; in r3k_flush_cache_page() local
254 pgdp = pgd_offset(mm, addr); in r3k_flush_cache_page()
255 pudp = pud_offset(pgdp, addr); in r3k_flush_cache_page()
Dc-tx39.c173 pgd_t *pgdp; in tx39_flush_cache_page() local
186 pgdp = pgd_offset(mm, page); in tx39_flush_cache_page()
187 pudp = pud_offset(pgdp, page); in tx39_flush_cache_page()
/arch/x86/mm/
Dgup.c264 pgd_t *pgdp; in __get_user_pages_fast() local
294 pgdp = pgd_offset(mm, addr); in __get_user_pages_fast()
296 pgd_t pgd = *pgdp; in __get_user_pages_fast()
303 } while (pgdp++, addr = next, addr != end); in __get_user_pages_fast()
331 pgd_t *pgdp; in get_user_pages_fast() local
366 pgdp = pgd_offset(mm, addr); in get_user_pages_fast()
368 pgd_t pgd = *pgdp; in get_user_pages_fast()
375 } while (pgdp++, addr = next, addr != end); in get_user_pages_fast()
/arch/score/mm/
Dcache.c177 pgd_t *pgdp; in flush_cache_range() local
185 pgdp = pgd_offset(mm, start); in flush_cache_range()
186 pudp = pud_offset(pgdp, start); in flush_cache_range()
192 pgdp = pgd_offset(mm, start); in flush_cache_range()
193 pudp = pud_offset(pgdp, start); in flush_cache_range()
/arch/arm64/include/asm/
Dpgalloc.h71 static inline void __pgd_populate(pgd_t *pgdp, phys_addr_t pud, pgdval_t prot) in __pgd_populate() argument
73 set_pgd(pgdp, __pgd(pud | prot)); in __pgd_populate()
81 static inline void __pgd_populate(pgd_t *pgdp, phys_addr_t pud, pgdval_t prot) in __pgd_populate() argument
/arch/sparc/include/asm/
Dpgalloc_32.h30 static inline void pgd_set(pgd_t * pgdp, pmd_t * pmdp) in pgd_set() argument
34 set_pte((pte_t *)pgdp, (SRMMU_ET_PTD | (pa >> 4))); in pgd_set()
/arch/m68k/include/asm/
Dmotorola_pgtable.h120 static inline void pgd_set(pgd_t *pgdp, pmd_t *pmdp) in pgd_set() argument
122 pgd_val(*pgdp) = _PAGE_TABLE | _PAGE_ACCESSED | __pa(pmdp); in pgd_set()
153 #define pgd_clear(pgdp) ({ pgd_val(*pgdp) = 0; }) argument
Dpgtable_no.h21 #define pgd_clear(pgdp) argument
Dsun3_pgtable.h114 static inline void pgd_set(pgd_t *pgdp, pmd_t *pmdp) in pgd_set() argument
116 pgd_val(*pgdp) = virt_to_phys(pmdp); in pgd_set()
152 static inline void pgd_clear (pgd_t *pgdp) {} in pgd_clear() argument
/arch/alpha/include/asm/
Dpgtable.h229 extern inline void pgd_set(pgd_t * pgdp, pmd_t * pmdp) in pgd_set() argument
230 { pgd_val(*pgdp) = _PAGE_TABLE | ((((unsigned long) pmdp) - PAGE_OFFSET) << (32-PAGE_SHIFT)); } in pgd_set()
262 extern inline void pgd_clear(pgd_t * pgdp) { pgd_val(*pgdp) = 0; } in pgd_clear() argument
/arch/x86/include/asm/
Dpgtable_64.h109 static inline void native_set_pgd(pgd_t *pgdp, pgd_t pgd) in native_set_pgd() argument
111 *pgdp = pgd; in native_set_pgd()
/arch/c6x/include/asm/
Dpgtable.h29 #define pgd_clear(pgdp) argument

12