/arch/hexagon/include/asm/ |
D | page.h | 16 #define PAGE_SHIFT 12 macro 21 #define PAGE_SHIFT 14 macro 26 #define PAGE_SHIFT 16 macro 31 #define PAGE_SHIFT 18 macro 36 #define PAGE_SHIFT 20 macro 49 #define HUGETLB_PAGE_ORDER (HPAGE_SHIFT-PAGE_SHIFT) 53 #define PAGE_SIZE (1UL << PAGE_SHIFT) 54 #define PAGE_MASK (~((1 << PAGE_SHIFT) - 1)) 100 #define virt_addr_valid(kaddr) pfn_valid(__pa(kaddr) >> PAGE_SHIFT) 128 #define page_to_phys(page) (page_to_pfn(page) << PAGE_SHIFT) [all …]
|
D | mem-layout.h | 38 #define PHYS_PFN_OFFSET (PHYS_OFFSET >> PAGE_SHIFT) 76 #define FIXADDR_SIZE (__end_of_fixed_addresses << PAGE_SHIFT) 86 #define PKMAP_NR(virt) ((virt - PKMAP_BASE) >> PAGE_SHIFT) 87 #define PKMAP_ADDR(nr) (PKMAP_BASE + ((nr) << PAGE_SHIFT))
|
/arch/sparc/mm/ |
D | init_32.c | 66 unsigned long start_pfn = sp_banks[i].base_addr >> PAGE_SHIFT; in calc_highpages() 67 unsigned long end_pfn = (sp_banks[i].base_addr + sp_banks[i].num_bytes) >> PAGE_SHIFT; in calc_highpages() 84 unsigned long tmp = pfn_base + (SRMMU_MAXMEM >> PAGE_SHIFT); in calc_max_low_pfn() 87 last_pfn = (sp_banks[0].base_addr + sp_banks[0].num_bytes) >> PAGE_SHIFT; in calc_max_low_pfn() 89 curr_pfn = sp_banks[i].base_addr >> PAGE_SHIFT; in calc_max_low_pfn() 97 last_pfn = (sp_banks[i].base_addr + sp_banks[i].num_bytes) >> PAGE_SHIFT; in calc_max_low_pfn() 176 start_pfn >>= PAGE_SHIFT; in bootmem_init() 178 max_pfn = end_of_phys_memory >> PAGE_SHIFT; in bootmem_init() 183 if (max_low_pfn > pfn_base + (SRMMU_MAXMEM >> PAGE_SHIFT)) { in bootmem_init() 184 highstart_pfn = pfn_base + (SRMMU_MAXMEM >> PAGE_SHIFT); in bootmem_init() [all …]
|
/arch/xtensa/include/asm/ |
D | page.h | 23 #define PAGE_SHIFT 12 macro 24 #define PAGE_SIZE (__XTENSA_UL_CONST(1) << PAGE_SHIFT) 67 # define DCACHE_ALIAS_ORDER (DCACHE_WAY_SHIFT - PAGE_SHIFT) 69 # define DCACHE_ALIAS(a) (((a) & DCACHE_ALIAS_MASK) >> PAGE_SHIFT) 78 # define ICACHE_ALIAS_ORDER (ICACHE_WAY_SHIFT - PAGE_SHIFT) 80 # define ICACHE_ALIAS(a) (((a) & ICACHE_ALIAS_MASK) >> PAGE_SHIFT) 120 asm ("nsau %0, %1" : "=r" (lz) : "r" ((size - 1) >> PAGE_SHIFT)); in get_order() 162 #define ARCH_PFN_OFFSET (PHYS_OFFSET >> PAGE_SHIFT) 184 #define virt_to_page(kaddr) pfn_to_page(__pa(kaddr) >> PAGE_SHIFT) 185 #define page_to_virt(page) __va(page_to_pfn(page) << PAGE_SHIFT) [all …]
|
/arch/mips/include/asm/ |
D | page.h | 21 #define PAGE_SHIFT 12 macro 24 #define PAGE_SHIFT 13 macro 27 #define PAGE_SHIFT 14 macro 30 #define PAGE_SHIFT 15 macro 33 #define PAGE_SHIFT 16 macro 35 #define PAGE_SIZE (_AC(1,UL) << PAGE_SHIFT) 36 #define PAGE_MASK (~((1 << PAGE_SHIFT) - 1)) 54 return (PAGE_SHIFT - 10) / 2; in page_size_ftlb() 62 #define HPAGE_SHIFT (PAGE_SHIFT + PAGE_SHIFT - 3) 65 #define HUGETLB_PAGE_ORDER (HPAGE_SHIFT - PAGE_SHIFT) [all …]
|
/arch/openrisc/include/asm/ |
D | page.h | 21 #define PAGE_SHIFT 13 macro 23 #define PAGE_SIZE (1 << PAGE_SHIFT) 25 #define PAGE_SIZE (1UL << PAGE_SHIFT) 75 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) 76 #define pfn_to_virt(pfn) __va((pfn) << PAGE_SHIFT) 79 (mem_map + (((unsigned long)(addr)-PAGE_OFFSET) >> PAGE_SHIFT)) 81 #define page_to_phys(page) ((dma_addr_t)page_to_pfn(page) << PAGE_SHIFT)
|
/arch/arc/include/uapi/asm/ |
D | page.h | 17 #define PAGE_SHIFT 14 macro 19 #define PAGE_SHIFT 12 macro 28 #define PAGE_SHIFT 13 macro 31 #define PAGE_SIZE _BITUL(PAGE_SHIFT) /* Default 8K */
|
/arch/alpha/include/asm/ |
D | mmzone.h | 39 (((p) >> PAGE_SHIFT) - PLAT_NODE_DATA(n)->gendata.node_start_pfn) 45 temp = p >> PAGE_SHIFT; in PLAT_NODE_DATA_LOCALNR() 68 << PAGE_SHIFT)) 73 #define virt_to_page(kaddr) pfn_to_page(__pa(kaddr) >> PAGE_SHIFT) 95 kvirt = (unsigned long)__va(pte_val(x) >> (32-PAGE_SHIFT)); \ 102 (page_to_pfn(page) << PAGE_SHIFT) 104 #define pfn_to_nid(pfn) pa_to_nid(((u64)(pfn) << PAGE_SHIFT)) 109 #define virt_addr_valid(kaddr) pfn_valid((__pa(kaddr) >> PAGE_SHIFT))
|
D | pgtable.h | 32 #define PMD_SHIFT (PAGE_SHIFT + (PAGE_SHIFT-3)) 37 #define PGDIR_SHIFT (PAGE_SHIFT + 2*(PAGE_SHIFT-3)) 45 #define PTRS_PER_PTE (1UL << (PAGE_SHIFT-3)) 46 #define PTRS_PER_PMD (1UL << (PAGE_SHIFT-3)) 47 #define PTRS_PER_PGD (1UL << (PAGE_SHIFT-3)) 52 #define PTRS_PER_PAGE (1UL << (PAGE_SHIFT-3)) 171 ((unsigned long)(address)>>(PAGE_SHIFT-SIZEOF_PTR_LOG2)&PTR_MASK&~PAGE_MASK) 194 #define KSEG_PFN (0xc0000000000UL >> PAGE_SHIFT) 196 ((((pfn) & KSEG_PFN) == (0x40000000000UL >> PAGE_SHIFT)) \ 207 #define page_to_pa(page) (((page) - mem_map) << PAGE_SHIFT) [all …]
|
/arch/sh/include/asm/ |
D | page.h | 13 # define PAGE_SHIFT 12 macro 15 # define PAGE_SHIFT 13 macro 17 # define PAGE_SHIFT 14 macro 19 # define PAGE_SHIFT 16 macro 24 #define PAGE_SIZE (_AC(1, UL) << PAGE_SHIFT) 45 #define HUGETLB_PAGE_ORDER (HPAGE_SHIFT-PAGE_SHIFT) 168 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT) 169 #define page_to_phys(page) (page_to_pfn(page) << PAGE_SHIFT) 177 #define PFN_START (__MEMORY_START >> PAGE_SHIFT) 179 #define virt_to_page(kaddr) pfn_to_page(__pa(kaddr) >> PAGE_SHIFT) [all …]
|
/arch/ia64/include/asm/ |
D | page.h | 31 # define PAGE_SHIFT 12 macro 33 # define PAGE_SHIFT 13 macro 35 # define PAGE_SHIFT 14 macro 37 # define PAGE_SHIFT 16 macro 42 #define PAGE_SIZE (__IA64_UL_CONST(1) << PAGE_SHIFT) 96 #define virt_addr_valid(kaddr) pfn_valid(__pa(kaddr) >> PAGE_SHIFT) 125 #define page_to_phys(page) (page_to_pfn(page) << PAGE_SHIFT) 126 #define virt_to_page(kaddr) pfn_to_page(__pa(kaddr) >> PAGE_SHIFT) 127 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT) 152 | (REGION_OFFSET(x) >> (HPAGE_SHIFT-PAGE_SHIFT))) [all …]
|
/arch/arc/include/asm/ |
D | highmem.h | 17 #define KM_TYPE_NR ((FIXMAP_SIZE >> PAGE_SHIFT)/NR_CPUS) 18 #define FIXMAP_ADDR(nr) (FIXMAP_BASE + ((nr) << PAGE_SHIFT)) 23 #define LAST_PKMAP (PKMAP_SIZE >> PAGE_SHIFT) 25 #define PKMAP_ADDR(nr) (PKMAP_BASE + ((nr) << PAGE_SHIFT)) 26 #define PKMAP_NR(virt) (((virt) - PKMAP_BASE) >> PAGE_SHIFT)
|
/arch/x86/mm/ |
D | init.c | 102 order = get_order((unsigned long)num << PAGE_SHIFT); in alloc_low_pages() 111 min_pfn_mapped << PAGE_SHIFT, in alloc_low_pages() 112 max_pfn_mapped << PAGE_SHIFT, in alloc_low_pages() 123 pfn = ret >> PAGE_SHIFT; in alloc_low_pages() 132 adr = __va((pfn + i) << PAGE_SHIFT); in alloc_low_pages() 136 return __va(pfn << PAGE_SHIFT); in alloc_low_pages() 159 pgt_buf_start = base >> PAGE_SHIFT; in early_alloc_pgt_buf() 161 pgt_buf_top = pgt_buf_start + (tables >> PAGE_SHIFT); in early_alloc_pgt_buf() 293 mr[nr_range].start = start_pfn<<PAGE_SHIFT; in save_mr() 294 mr[nr_range].end = end_pfn<<PAGE_SHIFT; in save_mr() [all …]
|
D | init_32.c | 76 paravirt_alloc_pmd(&init_mm, __pa(pmd_table) >> PAGE_SHIFT); in one_md_table_init() 101 paravirt_alloc_pte(&init_mm, __pa(page_table) >> PAGE_SHIFT); in one_page_table_init() 183 paravirt_alloc_pte(&init_mm, __pa(newpte) >> PAGE_SHIFT); in page_table_kmap_check() 188 paravirt_release_pte(__pa(pte) >> PAGE_SHIFT); in page_table_kmap_check() 269 start_pfn = start >> PAGE_SHIFT; in kernel_physical_mapping_init() 270 end_pfn = end >> PAGE_SHIFT; in kernel_physical_mapping_init() 294 pgd_idx = pgd_index((pfn<<PAGE_SHIFT) + PAGE_OFFSET); in kernel_physical_mapping_init() 302 pmd_idx = pmd_index((pfn<<PAGE_SHIFT) + PAGE_OFFSET); in kernel_physical_mapping_init() 326 pfn &= PMD_MASK >> PAGE_SHIFT; in kernel_physical_mapping_init() 345 pte_ofs = pte_index((pfn<<PAGE_SHIFT) + PAGE_OFFSET); in kernel_physical_mapping_init() [all …]
|
/arch/arm/mm/ |
D | tlb-v7.S | 36 mov r0, r0, lsr #PAGE_SHIFT @ align address 37 mov r1, r1, lsr #PAGE_SHIFT 43 orr r0, r3, r0, lsl #PAGE_SHIFT @ Create initial MVA 44 mov r1, r1, lsl #PAGE_SHIFT 70 mov r0, r0, lsr #PAGE_SHIFT @ align address 71 mov r1, r1, lsr #PAGE_SHIFT 72 mov r0, r0, lsl #PAGE_SHIFT 73 mov r1, r1, lsl #PAGE_SHIFT
|
D | tlb-v6.S | 38 mov r0, r0, lsr #PAGE_SHIFT @ align address 39 mov r1, r1, lsr #PAGE_SHIFT 41 orr r0, r3, r0, lsl #PAGE_SHIFT @ Create initial MVA 42 mov r1, r1, lsl #PAGE_SHIFT 69 mov r0, r0, lsr #PAGE_SHIFT @ align address 70 mov r1, r1, lsr #PAGE_SHIFT 71 mov r0, r0, lsl #PAGE_SHIFT 72 mov r1, r1, lsl #PAGE_SHIFT
|
D | mmap.c | 18 (((pgoff)<<PAGE_SHIFT) & (SHMLBA-1))) 51 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area() 76 info.align_offset = pgoff << PAGE_SHIFT; in arch_get_unmapped_area() 105 (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) in arch_get_unmapped_area_topdown() 127 info.align_offset = pgoff << PAGE_SHIFT; in arch_get_unmapped_area_topdown() 166 return (pfn + (size >> PAGE_SHIFT)) <= (1 + (PHYS_MASK >> PAGE_SHIFT)); in valid_mmap_phys_addr_range() 182 if (iomem_is_exclusive(pfn << PAGE_SHIFT)) in devmem_is_allowed()
|
/arch/sparc/include/asm/ |
D | io-unit.h | 45 unsigned long bmap[(IOUNIT_DMA_SIZE >> (PAGE_SHIFT + 3)) / sizeof(unsigned long)]; 53 #define IOUNIT_BMAP1_END (IOUNIT_DMA_SIZE >> (PAGE_SHIFT + 1)) 55 #define IOUNIT_BMAP2_END IOUNIT_BMAP2_START + (IOUNIT_DMA_SIZE >> (PAGE_SHIFT + 2)) 57 #define IOUNIT_BMAPM_END ((IOUNIT_DMA_SIZE - IOUNIT_DVMA_SIZE) >> PAGE_SHIFT)
|
/arch/microblaze/include/asm/ |
D | page.h | 27 #define PAGE_SHIFT 16 macro 29 #define PAGE_SHIFT 14 macro 31 #define PAGE_SHIFT 12 macro 33 #define PAGE_SIZE (ASM_CONST(1) << PAGE_SHIFT) 38 #define PTE_SHIFT (PAGE_SHIFT - 2) /* 1024 ptes per page */ 153 # define virt_to_page(kaddr) (pfn_to_page(__pa(kaddr) >> PAGE_SHIFT)) 154 # define page_to_virt(page) __va(page_to_pfn(page) << PAGE_SHIFT) 155 # define page_to_phys(page) (page_to_pfn(page) << PAGE_SHIFT) 168 # define ARCH_PFN_OFFSET (PAGE_OFFSET >> PAGE_SHIFT) 170 # define ARCH_PFN_OFFSET (memory_start >> PAGE_SHIFT)
|
/arch/x86/kernel/cpu/mtrr/ |
D | amd.c | 20 *base = (low & 0xFFFE0000) >> PAGE_SHIFT; in amd_get_mtrr() 46 *size = (low + 4) << (15 - PAGE_SHIFT); in amd_get_mtrr() 83 regs[reg] = (-size >> (15 - PAGE_SHIFT) & 0x0001FFFC) in amd_set_mtrr() 84 | (base << PAGE_SHIFT) | (type + 1); in amd_set_mtrr() 106 if (type > MTRR_TYPE_WRCOMB || size < (1 << (17 - PAGE_SHIFT)) in amd_validate_add_page()
|
D | centaur.c | 61 *base = centaur_mcr[reg].high >> PAGE_SHIFT; in centaur_get_mcr() 62 *size = -(centaur_mcr[reg].low & 0xfffff000) >> PAGE_SHIFT; in centaur_get_mcr() 83 high = base << PAGE_SHIFT; in centaur_set_mcr() 86 low = -size << PAGE_SHIFT | 0x1f; in centaur_set_mcr() 89 low = -size << PAGE_SHIFT | 0x02; /* NC */ in centaur_set_mcr() 91 low = -size << PAGE_SHIFT | 0x09; /* WWO, WC */ in centaur_set_mcr()
|
/arch/powerpc/include/asm/ |
D | page.h | 23 #define PAGE_SHIFT CONFIG_PPC_PAGE_SHIFT macro 24 #define PAGE_SIZE (ASM_CONST(1) << PAGE_SHIFT) 28 #define HPAGE_SHIFT PAGE_SHIFT 39 #define HUGETLB_PAGE_ORDER (HPAGE_SHIFT - PAGE_SHIFT) 48 #define PAGE_MASK (~((1 << PAGE_SHIFT) - 1)) 119 #define ARCH_PFN_OFFSET ((unsigned long)(MEMORY_START >> PAGE_SHIFT)) 131 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) 133 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT)
|
/arch/powerpc/mm/book3s64/ |
D | iommu_api.c | 80 mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 91 mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 100 chunk = (1UL << (PAGE_SHIFT + MAX_ORDER - 1)) / in mm_iommu_do_alloc() 106 ret = get_user_pages(ua + (entry << PAGE_SHIFT), n, in mm_iommu_do_alloc() 134 if ((mem2->ua < (ua + (entries << PAGE_SHIFT))) && in mm_iommu_do_alloc() 136 (mem2->entries << PAGE_SHIFT)))) { in mm_iommu_do_alloc() 149 pageshift = PAGE_SHIFT; in mm_iommu_do_alloc() 153 if ((mem->pageshift > PAGE_SHIFT) && PageHuge(page)) in mm_iommu_do_alloc() 160 mem->hpas[i] = page_to_pfn(page) << PAGE_SHIFT; in mm_iommu_do_alloc() 214 page = pfn_to_page(mem->hpas[i] >> PAGE_SHIFT); in mm_iommu_unpin() [all …]
|
/arch/csky/include/asm/ |
D | page.h | 13 #define PAGE_SHIFT 12 macro 14 #define PAGE_SIZE (_AC(1, UL) << PAGE_SHIFT) 18 #define THREAD_SHIFT (PAGE_SHIFT + 1) 37 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) 38 #define pfn_to_virt(pfn) __va((pfn) << PAGE_SHIFT) 50 #define page_to_phys(page) (page_to_pfn(page) << PAGE_SHIFT)
|
/arch/parisc/include/asm/ |
D | page.h | 8 # define PAGE_SHIFT 12 macro 10 # define PAGE_SHIFT 14 macro 12 # define PAGE_SHIFT 16 macro 16 #define PAGE_SIZE (_AC(1,UL) << PAGE_SHIFT) 158 #define HUGETLB_PAGE_ORDER (HPAGE_SHIFT - PAGE_SHIFT) 172 #define virt_addr_valid(kaddr) pfn_valid(__pa(kaddr) >> PAGE_SHIFT) 174 #define page_to_phys(page) (page_to_pfn(page) << PAGE_SHIFT) 175 #define virt_to_page(kaddr) pfn_to_page(__pa(kaddr) >> PAGE_SHIFT)
|