Searched refs:PTRS_PER_PUD (Results 1 – 25 of 44) sorted by relevance
12
/arch/powerpc/include/asm/nohash/64/ |
D | pgtable-4k.h | 26 #define PTRS_PER_PUD (1 << PUD_INDEX_SIZE) macro 83 (((addr) >> PUD_SHIFT) & (PTRS_PER_PUD - 1)))
|
/arch/mips/include/asm/ |
D | pgtable-64.h | 133 #define PTRS_PER_PUD ((PAGE_SIZE << PUD_ORDER) / sizeof(pud_t)) macro 151 min(PTRS_PER_PGD * PTRS_PER_PUD * PTRS_PER_PMD * PTRS_PER_PTE * PAGE_SIZE, \ 186 extern pud_t invalid_pud_table[PTRS_PER_PUD]; 214 #define pud_index(address) (((address) >> PUD_SHIFT) & (PTRS_PER_PUD - 1)) 323 #define __pud_offset(address) (((address) >> PUD_SHIFT) & (PTRS_PER_PUD-1))
|
D | pgtable-32.h | 203 #define __pud_offset(address) (((address) >> PUD_SHIFT) & (PTRS_PER_PUD-1))
|
/arch/x86/mm/ |
D | mem_encrypt_identity.c | 132 memset(pud, 0, sizeof(*pud) * PTRS_PER_PUD); in sme_prepare_pgd() 133 ppd->pgtable_area += sizeof(*pud) * PTRS_PER_PUD; in sme_prepare_pgd() 272 entries += (DIV_ROUND_UP(len, P4D_SIZE) + 1) * sizeof(pud_t) * PTRS_PER_PUD; in sme_pgtable_calc() 283 tables += DIV_ROUND_UP(entries, P4D_SIZE) * sizeof(pud_t) * PTRS_PER_PUD; in sme_pgtable_calc()
|
D | dump_pagetables.c | 153 #define P4D_LEVEL_MULT (PTRS_PER_PUD * PUD_LEVEL_MULT) 436 #if PTRS_PER_PUD > 1 447 for (i = 0; i < PTRS_PER_PUD; i++) { in walk_pud_level()
|
D | kasan_init_64.c | 286 for (i = 0; i < PTRS_PER_PUD; i++) in kasan_early_init()
|
/arch/x86/kernel/ |
D | head64.c | 204 pud[(i + 0) % PTRS_PER_PUD] = (pudval_t)pmd + pgtable_flags; in __startup_64() 205 pud[(i + 1) % PTRS_PER_PUD] = (pudval_t)pmd + pgtable_flags; in __startup_64() 347 memset(pud_p, 0, sizeof(*pud_p) * PTRS_PER_PUD); in __early_make_pgtable()
|
D | espfix_64.c | 65 static __page_aligned_bss pud_t espfix_pud_page[PTRS_PER_PUD]
|
D | head_64.S | 42 #define pud_index(x) (((x) >> PUD_SHIFT) & (PTRS_PER_PUD-1))
|
/arch/x86/include/asm/ |
D | pgtable_64_types.h | 84 #define PTRS_PER_PUD 512 macro
|
/arch/mips/mm/ |
D | pgtable-64.c | 75 end = p + PTRS_PER_PUD; in pud_init()
|
D | init.c | 249 for ( ; (j < PTRS_PER_PUD) && (vaddr < end); pud++, j++) { in fixrange_init() 519 pud_t invalid_pud_table[PTRS_PER_PUD] __page_aligned_bss;
|
/arch/x86/xen/ |
D | xen-head.S | 97 ELFNOTE(Xen, XEN_ELFNOTE_INIT_P2M, .quad (PUD_SIZE * PTRS_PER_PUD))
|
D | mmu_pv.c | 100 static pud_t level3_user_vsyscall[PTRS_PER_PUD] __page_aligned_bss; 596 nr = last ? pud_index(limit) + 1 : PTRS_PER_PUD; in xen_pud_walk() 624 if (PTRS_PER_PUD > 1) in xen_p4d_walk() 1195 for (i = 0; i < PTRS_PER_PUD; i++) { in xen_cleanmfnmap_p4d() 2098 for (idx_pmd = 0; idx_pmd < min(n_pmd, PTRS_PER_PUD); in xen_relocate_p2m() 2129 n_pmd -= PTRS_PER_PUD; in xen_relocate_p2m()
|
/arch/powerpc/mm/book3s64/ |
D | hash_hugetlbpage.c | 77 offset = PTRS_PER_PUD; in __hash_page_huge()
|
D | hash_tlb.c | 68 offset = PTRS_PER_PUD; in hpte_need_flush()
|
/arch/csky/mm/ |
D | highmem.c | 139 for ( ; (j < PTRS_PER_PUD) && (vaddr != end); pud++, j++) { in fixrange_init()
|
/arch/ia64/include/asm/ |
D | pgtable.h | 113 #define PTRS_PER_PUD (1UL << (PTRS_PER_PTD_SHIFT)) macro 391 ((pud_t *) pgd_page_vaddr(*(dir)) + (((addr) >> PUD_SHIFT) & (PTRS_PER_PUD - 1)))
|
/arch/arm64/include/asm/ |
D | pgtable-hwdef.h | 62 #define PTRS_PER_PUD PTRS_PER_PTE macro
|
/arch/s390/mm/ |
D | dump_pagetables.c | 196 for (i = 0; i < PTRS_PER_PUD && addr < max_addr; i++, pud++) { in walk_pud_level()
|
/arch/sh/include/asm/ |
D | pgtable_64.h | 55 #define __pud_offset(address) (((address) >> PUD_SHIFT) & (PTRS_PER_PUD-1))
|
/arch/csky/include/asm/ |
D | pgtable.h | 236 #define __pud_offset(address) (((address) >> PUD_SHIFT) & (PTRS_PER_PUD-1))
|
/arch/sh/mm/ |
D | init.c | 182 for ( ; (j < PTRS_PER_PUD) && (vaddr != end); pud++, j++) { in page_table_range_init()
|
D | fault.c | 69 if (PTRS_PER_PUD != 1) in show_pte()
|
/arch/powerpc/mm/ptdump/ |
D | ptdump.c | 287 for (i = 0; i < PTRS_PER_PUD; i++, pud++) { in walk_pud()
|
12