Searched refs:PMD_SIZE (Results 1 – 25 of 41) sorted by relevance
12
32 if (end > PMD_SIZE) in io_remap_pte_range()33 end = PMD_SIZE; in io_remap_pte_range()57 address = (address + PMD_SIZE) & PMD_MASK; in io_remap_pmd_range()
36 if (end > PMD_SIZE) in io_remap_pte_range()37 end = PMD_SIZE; in io_remap_pte_range()100 address = (address + PMD_SIZE) & PMD_MASK; in io_remap_pmd_range()
37 #define PMD_SIZE (1UL << PMD_SHIFT) macro38 #define PMD_MASK (~(PMD_SIZE-1))
90 if (end > PMD_SIZE) in map_pte_uncached()91 end = PMD_SIZE; in map_pte_uncached()123 vaddr = (vaddr + PMD_SIZE) & PMD_MASK; in map_pmd_uncached()124 orig_vaddr += PMD_SIZE; in map_pmd_uncached()168 if (end > PMD_SIZE) in unmap_uncached_pte()169 end = PMD_SIZE; in unmap_uncached_pte()206 vaddr = (vaddr + PMD_SIZE) & PMD_MASK; in unmap_uncached_pmd()207 orig_vaddr += PMD_SIZE; in unmap_uncached_pmd()
237 for (; size; phys += PMD_SIZE, size -= PMD_SIZE) { in __init_extra_mapping()282 unsigned long end = roundup((unsigned long)_end, PMD_SIZE) - 1; in cleanup_highmap()286 for (; pmd < last_pmd; pmd++, vaddr += PMD_SIZE) { in cleanup_highmap()389 for (; i < PTRS_PER_PMD; i++, address += PMD_SIZE) { in phys_pmd_init()437 last_map_addr = (address & PMD_MASK) + PMD_SIZE; in phys_pmd_init()560 pmds = (extra + PMD_SIZE - 1) >> PMD_SHIFT; in find_early_table_space()562 pmds = (end + PMD_SIZE - 1) >> PMD_SHIFT; in find_early_table_space()715 end_pfn = ((pos + (PMD_SIZE - 1)) >> PMD_SHIFT) in init_memory_mapping()725 start_pfn = ((pos + (PMD_SIZE - 1))>>PMD_SHIFT) in init_memory_mapping()749 start_pfn = ((pos + (PMD_SIZE - 1))>>PMD_SHIFT) in init_memory_mapping()[all …]
213 vaddr += PMD_SIZE; in page_table_range_init()555 end = (FIXADDR_TOP + PMD_SIZE - 1) & PMD_MASK; in early_ioremap_page_table_range_init()831 pmds = (end + PMD_SIZE - 1) >> PMD_SHIFT; in find_early_table_space()838 extra += PMD_SIZE; in find_early_table_space()913 big_page_start = PMD_SIZE; in init_memory_mapping()921 end_pfn = ((start + (PMD_SIZE - 1))>>PMD_SHIFT) in init_memory_mapping()928 start_pfn = ((start + (PMD_SIZE - 1))>>PMD_SHIFT) in init_memory_mapping()
74 if (vaddr & (PMD_SIZE-1)) { /* vaddr is misaligned */ in set_pmd_pfn()
140 BUG_ON(sz != PMD_SIZE); in huge_pte_alloc()432 if (ps == PMD_SIZE) { in setup_hugepagesz()
28 if (end > PMD_SIZE) in remap_area_pte()29 end = PMD_SIZE; in remap_area_pte()62 address = (address + PMD_SIZE) & PMD_MASK; in remap_area_pmd()
27 #define PMD_SIZE (1UL << PMD_SHIFT) macro28 #define PMD_MASK (~(PMD_SIZE-1))
64 #define PMD_SIZE (1UL << PMD_SHIFT) macro65 #define PMD_MASK (~(PMD_SIZE-1))
43 # define PMD_SIZE (1UL << PMD_SHIFT) macro44 # define PMD_MASK (~(PMD_SIZE - 1))
141 #define PMD_SIZE (_AC(1, UL) << PMD_SHIFT) macro142 #define PMD_MASK (~(PMD_SIZE - 1))
50 end = (end + PMD_SIZE - 1) & PMD_MASK; in early_mapping_set_exec()
42 #define PMD_SIZE (1UL << PMD_SHIFT) macro43 #define PMD_MASK (~(PMD_SIZE-1))
113 end = (end + PMD_SIZE - 1) & PMD_MASK; in page_table_range_init()129 vaddr += PMD_SIZE; in page_table_range_init()
42 if (vaddr & (PMD_SIZE-1)) { /* vaddr is misaligned */ in set_pmd_pfn()
87 end = (start + PMD_SIZE - 1) & PMD_MASK; in vdso_addr()
58 for (j = 0; j < PTRS_PER_PMD; pmd++, j++, paddr += PMD_SIZE) { in res_phys_pud_init()
124 #define PMD_SIZE (1UL << PMD_SHIFT) macro125 #define PMD_MASK (~(PMD_SIZE-1))
125 end3 = (vaddr + (PMD_SIZE-1)) & PMD_MASK; in dvma_map_cpu()
31 #define PMD_SIZE (1UL << PMD_SHIFT) macro32 #define PMD_MASK (~(PMD_SIZE-1))
49 #define PMD_SIZE (1UL << PMD_SHIFT) macro50 #define PMD_MASK (~(PMD_SIZE-1))