Home
last modified time | relevance | path

Searched refs:PMD_SIZE (Results 1 – 25 of 87) sorted by relevance

1234

/arch/sh/include/asm/
Dpgtable-3level.h24 #define PMD_SIZE (1UL << PMD_SHIFT) macro
25 #define PMD_MASK (~(PMD_SIZE-1))
27 #define PTRS_PER_PMD ((1 << PGDIR_SHIFT) / PMD_SIZE)
/arch/x86/mm/
Dinit.c280 unsigned long start = round_down(mr[i].start, PMD_SIZE); in adjust_range_page_size_mask()
281 unsigned long end = round_up(mr[i].end, PMD_SIZE); in adjust_range_page_size_mask()
347 end_pfn = PFN_DOWN(PMD_SIZE); in split_mem_range()
349 end_pfn = round_up(pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range()
351 end_pfn = round_up(pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range()
361 start_pfn = round_up(pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range()
363 end_pfn = round_down(limit_pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range()
366 if (end_pfn > round_down(limit_pfn, PFN_DOWN(PMD_SIZE))) in split_mem_range()
367 end_pfn = round_down(limit_pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range()
388 start_pfn = round_up(pfn, PFN_DOWN(PMD_SIZE)); in split_mem_range()
[all …]
Dinit_64.c330 for (; size; phys += PMD_SIZE, size -= PMD_SIZE) { in __init_extra_mapping()
382 unsigned long end = roundup((unsigned long)_brk_end, PMD_SIZE) - 1; in cleanup_highmap()
393 for (; vaddr + PMD_SIZE - 1 < vaddr_end; pmd++, vaddr += PMD_SIZE) { in cleanup_highmap()
473 paddr_next = (paddr & PMD_MASK) + PMD_SIZE; in phys_pmd_init()
965 if (IS_ALIGNED(addr, PMD_SIZE) && in remove_pmd_table()
966 IS_ALIGNED(next, PMD_SIZE)) { in remove_pmd_table()
969 get_order(PMD_SIZE)); in remove_pmd_table()
981 PMD_SIZE)) { in remove_pmd_table()
983 get_order(PMD_SIZE)); in remove_pmd_table()
1247 all_end = roundup((unsigned long)_brk_end, PMD_SIZE); in mark_rodata_ro()
[all …]
Dkasan_init_64.c43 ((end - addr) == PMD_SIZE) && in kasan_populate_pmd()
44 IS_ALIGNED(addr, PMD_SIZE)) { in kasan_populate_pmd()
45 p = early_alloc(PMD_SIZE, nid, false); in kasan_populate_pmd()
49 memblock_free(__pa(p), PMD_SIZE); in kasan_populate_pmd()
/arch/x86/include/asm/
Dpgtable_32_types.h12 # define PMD_SIZE (1UL << PMD_SHIFT) macro
13 # define PMD_MASK (~(PMD_SIZE - 1))
Dpgtable_64_types.h71 #define PMD_SIZE (_AC(1, UL) << PMD_SHIFT) macro
72 #define PMD_MASK (~(PMD_SIZE - 1))
/arch/powerpc/include/asm/nohash/64/
Dpgtable-64k.h38 #define PMD_SIZE (1UL << PMD_SHIFT) macro
39 #define PMD_MASK (~(PMD_SIZE-1))
Dpgtable-4k.h31 #define PMD_SIZE (1UL << PMD_SHIFT) macro
32 #define PMD_MASK (~(PMD_SIZE-1))
/arch/arm64/mm/
Dhugetlbpage.c65 *pgsize = PMD_SIZE; in find_num_contig()
81 case PMD_SIZE: in num_contig_ptes()
85 *pgsize = PMD_SIZE; in num_contig_ptes()
233 } else if (sz == PMD_SIZE) { in huge_pte_alloc()
239 } else if (sz == (PMD_SIZE * CONT_PMDS)) { in huge_pte_alloc()
276 if (!(sz == PMD_SIZE || sz == CONT_PMD_SIZE) && in huge_pte_offset()
299 } else if (pagesize != PUD_SIZE && pagesize != PMD_SIZE) { in arch_make_huge_pte()
417 case PMD_SIZE * CONT_PMDS: in setup_hugepagesz()
418 case PMD_SIZE: in setup_hugepagesz()
/arch/tile/mm/
Dhugetlbpage.c73 if (sz >= PMD_SIZE) { in huge_pte_alloc()
74 BUG_ON(sz != PMD_SIZE && in huge_pte_alloc()
75 sz != (PMD_SIZE << huge_shift[HUGE_SHIFT_PMD])); in huge_pte_alloc()
85 BUG_ON(sz != PMD_SIZE); in huge_pte_alloc()
276 } else if (ps >= PMD_SIZE) { in __setup_hugepagesz()
336 BUILD_BUG_ON(ADDITIONAL_HUGE_SIZE >= PMD_SIZE || in add_default_hugepagesz()
/arch/s390/mm/
Dvmem.c124 !(address & ~PMD_MASK) && (address + PMD_SIZE <= end) && in vmem_add_mem()
127 address += PMD_SIZE; in vmem_add_mem()
191 address += PMD_SIZE; in vmem_remove_range()
196 address += PMD_SIZE; in vmem_remove_range()
267 new_page = vmemmap_alloc_block(PMD_SIZE, node); in vmemmap_populate()
271 address = (address + PMD_SIZE) & PMD_MASK; in vmemmap_populate()
279 address = (address + PMD_SIZE) & PMD_MASK; in vmemmap_populate()
/arch/x86/boot/compressed/
Dpagetable.c134 start = round_down(start, PMD_SIZE); in add_identity_map()
135 end = round_up(end, PMD_SIZE); in add_identity_map()
/arch/nios2/mm/
Dioremap.c33 if (end > PMD_SIZE) in remap_area_pte()
34 end = PMD_SIZE; in remap_area_pte()
70 address = (address + PMD_SIZE) & PMD_MASK; in remap_area_pmd()
/arch/parisc/kernel/
Dpci-dma.c92 if (end > PMD_SIZE) in map_pte_uncached()
93 end = PMD_SIZE; in map_pte_uncached()
127 vaddr = (vaddr + PMD_SIZE) & PMD_MASK; in map_pmd_uncached()
128 orig_vaddr += PMD_SIZE; in map_pmd_uncached()
172 if (end > PMD_SIZE) in unmap_uncached_pte()
173 end = PMD_SIZE; in unmap_uncached_pte()
212 vaddr = (vaddr + PMD_SIZE) & PMD_MASK; in unmap_uncached_pmd()
213 orig_vaddr += PMD_SIZE; in unmap_uncached_pmd()
/arch/m68k/include/asm/
Dpgtable_mm.h38 #define PMD_SIZE (1UL << PMD_SHIFT) macro
39 #define PMD_MASK (~(PMD_SIZE-1))
/arch/mips/mm/
Dioremap.c31 if (end > PMD_SIZE) in remap_area_pte()
32 end = PMD_SIZE; in remap_area_pte()
63 address = (address + PMD_SIZE) & PMD_MASK; in remap_area_pmd()
/arch/um/include/asm/
Dpgtable-3level.h28 #define PMD_SIZE (1UL << PMD_SHIFT) macro
29 #define PMD_MASK (~(PMD_SIZE-1))
/arch/arm/include/asm/
Dpgtable-2level.h88 #define PMD_SIZE (1UL << PMD_SHIFT) macro
89 #define PMD_MASK (~(PMD_SIZE-1))
Dhighmem.h7 #define PKMAP_BASE (PAGE_OFFSET - PMD_SIZE)
/arch/tile/include/asm/
Dpgtable_64.h32 #define PMD_SIZE HPAGE_SIZE macro
33 #define PMD_MASK (~(PMD_SIZE-1))
/arch/powerpc/mm/
Dpgtable-radix.c84 if (map_page_size == PMD_SIZE) { in radix__map_kernel_page()
109 if (map_page_size == PMD_SIZE) { in radix__map_kernel_page()
238 else if (IS_ALIGNED(addr, PMD_SIZE) && gap >= PMD_SIZE && in create_physical_mapping()
240 mapping_size = PMD_SIZE; in create_physical_mapping()
247 max_mapping_size = PMD_SIZE; in create_physical_mapping()
251 if (split_text_mapping && (mapping_size == PMD_SIZE) && in create_physical_mapping()
786 split_kernel_mapping(addr, end, PMD_SIZE, (pte_t *)pmd); in remove_pmd_table()
/arch/metag/include/asm/
Dhighmem.h27 #define PKMAP_BASE (FIXADDR_START - PMD_SIZE)
/arch/arm64/include/asm/
Dpgtable-hwdef.h59 #define PMD_SIZE (_AC(1, UL) << PMD_SHIFT) macro
60 #define PMD_MASK (~(PMD_SIZE-1))
108 #define CONT_PMD_SIZE (CONT_PMDS * PMD_SIZE)
/arch/sparc/mm/
Dhugetlbpage.c280 if (sz >= PMD_SIZE) in huge_pte_alloc()
320 else if (size >= PMD_SIZE) in set_huge_pte_at()
357 else if (size >= PMD_SIZE) in huge_ptep_get_and_clear()
486 addr += PMD_SIZE; in hugetlb_free_pgd_range()
496 end -= PMD_SIZE; in hugetlb_free_pgd_range()
/arch/arm/mm/
Dmmu.c1092 next = (addr + PMD_SIZE - 1) & PMD_MASK; in fill_pmd_gaps()
1184 if (!IS_ALIGNED(reg->base, PMD_SIZE)) { in adjust_lowmem_bounds()
1187 len = round_up(reg->base, PMD_SIZE) - reg->base; in adjust_lowmem_bounds()
1227 if (!IS_ALIGNED(block_start, PMD_SIZE)) in adjust_lowmem_bounds()
1229 else if (!IS_ALIGNED(block_end, PMD_SIZE)) in adjust_lowmem_bounds()
1248 memblock_limit = round_down(memblock_limit, PMD_SIZE); in adjust_lowmem_bounds()
1273 for (addr = 0; addr < MODULES_VADDR; addr += PMD_SIZE) in prepare_page_table()
1278 addr = ((unsigned long)_exiprom + PMD_SIZE - 1) & PMD_MASK; in prepare_page_table()
1280 for ( ; addr < PAGE_OFFSET; addr += PMD_SIZE) in prepare_page_table()
1295 addr < VMALLOC_START; addr += PMD_SIZE) in prepare_page_table()
[all …]

1234