Home
last modified time | relevance | path

Searched refs:PMD_SIZE (Results 1 – 25 of 57) sorted by relevance

123

/arch/sh/include/asm/
Dpgtable-3level.h22 #define PMD_SIZE (1UL << PMD_SHIFT) macro
23 #define PMD_MASK (~(PMD_SIZE-1))
25 #define PTRS_PER_PMD ((1 << PGDIR_SHIFT) / PMD_SIZE)
/arch/s390/kvm/
Dgaccess.h183 size = PMD_SIZE - (guestdest & ~PMD_MASK); in __copy_to_guest_fast()
196 while (n >= PMD_SIZE) { in __copy_to_guest_fast()
202 r = copy_to_user(uptr, from, PMD_SIZE); in __copy_to_guest_fast()
208 from += PMD_SIZE; in __copy_to_guest_fast()
209 n -= PMD_SIZE; in __copy_to_guest_fast()
210 guestdest += PMD_SIZE; in __copy_to_guest_fast()
309 size = PMD_SIZE - (guestsrc & ~PMD_MASK); in __copy_from_guest_fast()
322 while (n >= PMD_SIZE) { in __copy_from_guest_fast()
328 r = copy_from_user(to, uptr, PMD_SIZE); in __copy_from_guest_fast()
334 to += PMD_SIZE; in __copy_from_guest_fast()
[all …]
/arch/powerpc/include/asm/
Dpgtable-ppc64-64k.h27 #define PMD_SIZE (1UL << PMD_SHIFT) macro
28 #define PMD_MASK (~(PMD_SIZE-1))
Dpgtable-ppc64-4k.h27 #define PMD_SIZE (1UL << PMD_SHIFT) macro
28 #define PMD_MASK (~(PMD_SIZE-1))
/arch/x86/include/asm/
Dpgtable_32_types.h11 # define PMD_SIZE (1UL << PMD_SHIFT) macro
12 # define PMD_MASK (~(PMD_SIZE - 1))
Dpgtable_64_types.h47 #define PMD_SIZE (_AC(1, UL) << PMD_SHIFT) macro
48 #define PMD_MASK (~(PMD_SIZE - 1))
/arch/x86/mm/
Dinit.c62 pmds += (extra + PMD_SIZE - 1) >> PMD_SHIFT; in find_early_table_space()
64 pmds += (range + PMD_SIZE - 1) >> PMD_SHIFT; in find_early_table_space()
70 extra += PMD_SIZE; in find_early_table_space()
191 end_pfn = ((pos + (PMD_SIZE - 1))>>PMD_SHIFT) in init_memory_mapping()
194 end_pfn = ((pos + (PMD_SIZE - 1)) >> PMD_SHIFT) in init_memory_mapping()
205 start_pfn = ((pos + (PMD_SIZE - 1))>>PMD_SHIFT) in init_memory_mapping()
235 start_pfn = ((pos + (PMD_SIZE - 1))>>PMD_SHIFT) in init_memory_mapping()
Dinit_64.c260 for (; size; phys += PMD_SIZE, size -= PMD_SIZE) { in __init_extra_mapping()
306 unsigned long end = roundup((unsigned long)_brk_end, PMD_SIZE) - 1; in cleanup_highmap()
309 for (; vaddr + PMD_SIZE - 1 < vaddr_end; pmd++, vaddr += PMD_SIZE) { in cleanup_highmap()
415 for (; i < PTRS_PER_PMD; i++, address += PMD_SIZE) { in phys_pmd_init()
465 last_map_addr = (address & PMD_MASK) + PMD_SIZE; in phys_pmd_init()
952 p = vmemmap_alloc_block_buf(PMD_SIZE, node); in vmemmap_populate()
970 addr_end = addr + PMD_SIZE; in vmemmap_populate()
971 p_end = p + PMD_SIZE; in vmemmap_populate()
Dpgtable_32.c74 if (vaddr & (PMD_SIZE-1)) { /* vaddr is misaligned */ in set_pmd_pfn()
/arch/parisc/kernel/
Dpci-dma.c91 if (end > PMD_SIZE) in map_pte_uncached()
92 end = PMD_SIZE; in map_pte_uncached()
126 vaddr = (vaddr + PMD_SIZE) & PMD_MASK; in map_pmd_uncached()
127 orig_vaddr += PMD_SIZE; in map_pmd_uncached()
171 if (end > PMD_SIZE) in unmap_uncached_pte()
172 end = PMD_SIZE; in unmap_uncached_pte()
211 vaddr = (vaddr + PMD_SIZE) & PMD_MASK; in unmap_uncached_pmd()
212 orig_vaddr += PMD_SIZE; in unmap_uncached_pmd()
/arch/arm/include/asm/
Dpgtable-2level.h86 #define PMD_SIZE (1UL << PMD_SHIFT) macro
87 #define PMD_MASK (~(PMD_SIZE-1))
Dpgtable-3level.h50 #define PMD_SIZE (1UL << PMD_SHIFT) macro
51 #define PMD_MASK (~(PMD_SIZE-1))
Dhighmem.h6 #define PKMAP_BASE (PAGE_OFFSET - PMD_SIZE)
/arch/m68k/include/asm/
Dpgtable_mm.h37 #define PMD_SIZE (1UL << PMD_SHIFT) macro
38 #define PMD_MASK (~(PMD_SIZE-1))
/arch/mips/mm/
Dioremap.c29 if (end > PMD_SIZE) in remap_area_pte()
30 end = PMD_SIZE; in remap_area_pte()
61 address = (address + PMD_SIZE) & PMD_MASK; in remap_area_pmd()
/arch/tile/include/asm/
Dpgtable_64.h31 #define PMD_SIZE HV_PAGE_SIZE_LARGE macro
32 #define PMD_MASK (~(PMD_SIZE-1))
/arch/um/include/asm/
Dpgtable-3level.h27 #define PMD_SIZE (1UL << PMD_SHIFT) macro
28 #define PMD_MASK (~(PMD_SIZE-1))
/arch/s390/mm/
Dpgtable.c284 if ((to | len) & (PMD_SIZE - 1)) in gmap_unmap_segment()
292 for (off = 0; off < len; off += PMD_SIZE) { in gmap_unmap_segment()
336 if ((from | to | len) & (PMD_SIZE - 1)) in gmap_map_segment()
345 for (off = 0; off < len; off += PMD_SIZE) { in gmap_map_segment()
481 address = (address + PMD_SIZE) & PMD_MASK; in gmap_discard()
487 address = (address + PMD_SIZE) & PMD_MASK; in gmap_discard()
493 address = (address + PMD_SIZE) & PMD_MASK; in gmap_discard()
499 address = (address + PMD_SIZE) & PMD_MASK; in gmap_discard()
505 size = min(to - address, PMD_SIZE - (address & ~PMD_MASK)); in gmap_discard()
508 address = (address + PMD_SIZE) & PMD_MASK; in gmap_discard()
/arch/arm/mm/
Dioremap.c106 addr += PMD_SIZE; in unmap_area_sections()
145 addr += PMD_SIZE; in remap_area_sections()
182 addr += PMD_SIZE; in remap_area_supersections()
Drodata.c88 pmd_end = min(ALIGN(virt + 1, PMD_SIZE), end); in set_page_attributes()
Dmmu.c873 next = (addr + PMD_SIZE - 1) & PMD_MASK; in fill_pmd_gaps()
1033 for (addr = 0; addr < MODULES_VADDR; addr += PMD_SIZE) in prepare_page_table()
1038 addr = ((unsigned long)_etext + PMD_SIZE - 1) & PMD_MASK; in prepare_page_table()
1040 for ( ; addr < PAGE_OFFSET; addr += PMD_SIZE) in prepare_page_table()
1055 addr < VMALLOC_START; addr += PMD_SIZE) in prepare_page_table()
1107 for (addr = VMALLOC_START; addr; addr += PMD_SIZE) in devicemaps_init()
1222 end = ALIGN(__pa(__end_rodata), PMD_SIZE); in map_lowmem()
/arch/mips/include/asm/
Dpgtable-64.h52 #define PMD_SIZE (1UL << PMD_SHIFT) macro
53 #define PMD_MASK (~(PMD_SIZE-1))
/arch/mn10300/mm/
Dpgtable.c41 if (vaddr & (PMD_SIZE-1)) { /* vaddr is misaligned */ in set_pmd_pfn()
/arch/x86/power/
Dhibernate_64.c60 for (j = 0; j < PTRS_PER_PMD; pmd++, j++, paddr += PMD_SIZE) { in res_phys_pud_init()
/arch/tile/mm/
Dhugetlbpage.c39 BUG_ON(sz != PMD_SIZE); in huge_pte_alloc()
329 if (ps == PMD_SIZE) { in setup_hugepagesz()

123