/arch/unicore32/include/asm/ |
D | tlbflush.h | 94 : : "r" (uaddr & PAGE_MASK) : "cc"); in local_flush_tlb_page() 97 : : "r" (uaddr & PAGE_MASK) : "cc"); in local_flush_tlb_page() 101 : : "r" (uaddr & PAGE_MASK) : "cc"); in local_flush_tlb_page() 111 : : "r" (kaddr & PAGE_MASK) : "cc"); in local_flush_tlb_kernel_page() 114 : : "r" (kaddr & PAGE_MASK) : "cc"); in local_flush_tlb_kernel_page() 118 : : "r" (kaddr & PAGE_MASK) : "cc"); in local_flush_tlb_kernel_page()
|
/arch/sh/mm/ |
D | tlbflush_64.c | 72 page &= PAGE_MASK; in local_flush_tlb_page() 94 start &= PAGE_MASK; in local_flush_tlb_range() 95 end &= PAGE_MASK; in local_flush_tlb_range() 105 pteh_epn = pteh & PAGE_MASK; in local_flush_tlb_range() 106 pteh_low = pteh & ~PAGE_MASK; in local_flush_tlb_range() 118 pteh_epn = pteh & PAGE_MASK; in local_flush_tlb_range() 119 pteh_low = pteh & ~PAGE_MASK; in local_flush_tlb_range()
|
D | tlbflush_32.c | 25 page &= PAGE_MASK; in local_flush_tlb_page() 60 start &= PAGE_MASK; in local_flush_tlb_range() 62 end &= PAGE_MASK; in local_flush_tlb_range() 93 start &= PAGE_MASK; in local_flush_tlb_kernel_range() 95 end &= PAGE_MASK; in local_flush_tlb_kernel_range()
|
D | ioremap.c | 67 offset = phys_addr & ~PAGE_MASK; in __ioremap_caller() 68 phys_addr &= PAGE_MASK; in __ioremap_caller() 129 p = remove_vm_area((void *)(vaddr & PAGE_MASK)); in __iounmap()
|
/arch/m68k/mm/ |
D | cache.c | 50 return (mmusr & PAGE_MASK) | (vaddr & ~PAGE_MASK); in virt_to_phys_slow() 68 return (*descaddr & PAGE_MASK) | (vaddr & ~PAGE_MASK); in virt_to_phys_slow() 88 address &= PAGE_MASK; in flush_icache_range()
|
D | memory.c | 41 unsigned long page = ptable & PAGE_MASK; in init_pointer_table() 103 unsigned long page = (unsigned long)ptable & PAGE_MASK; in free_pointer_table() 215 pushcl040(paddr & PAGE_MASK); in cache_clear() 221 paddr &= PAGE_MASK; in cache_clear() 269 paddr &= PAGE_MASK; in cache_push()
|
/arch/microblaze/mm/ |
D | pgtable.c | 64 p = addr & PAGE_MASK; in __ioremap() 121 return (void __iomem *) (v + ((unsigned long)addr & ~PAGE_MASK)); in __ioremap() 134 vfree((void *) (PAGE_MASK & (unsigned long) addr)); in iounmap() 199 pgd = pgd_offset(mm, addr & PAGE_MASK); in get_pteptr() 201 pmd = pmd_offset(pgd, addr & PAGE_MASK); in get_pteptr() 203 pte = pte_offset_kernel(pmd, addr & PAGE_MASK); in get_pteptr() 233 pa = (pte_val(*pte) & PAGE_MASK) | (addr & ~PAGE_MASK); in iopa()
|
/arch/score/mm/ |
D | tlb-score.c | 95 start &= PAGE_MASK; in local_flush_tlb_range() 97 end &= PAGE_MASK; in local_flush_tlb_range() 133 start &= PAGE_MASK; in local_flush_tlb_kernel_range() 135 end &= PAGE_MASK; in local_flush_tlb_kernel_range() 167 page &= PAGE_MASK; in local_flush_tlb_page() 197 page &= (PAGE_MASK << 1); in local_flush_tlb_one() 227 address &= PAGE_MASK; in __update_tlb()
|
/arch/mips/lib/ |
D | r3k_dump_tlb.c | 49 if ((entryhi & PAGE_MASK) != KSEG0 && in dump_tlb() 59 entryhi & PAGE_MASK, in dump_tlb() 61 entrylo0 & PAGE_MASK, in dump_tlb()
|
/arch/m68k/kernel/ |
D | sys_m68k.c | 63 _paddr = (_mmusr & MMU_R_040) ? (_mmusr & PAGE_MASK) : 0; \ 104 paddr += addr & ~(PAGE_MASK | 15); in cache_flush_040() 107 unsigned long tmp = PAGE_SIZE - (addr & ~PAGE_MASK); in cache_flush_040() 125 i = (PAGE_SIZE - (paddr & ~PAGE_MASK)) >> 4; in cache_flush_040() 180 len += (addr & ~PAGE_MASK) + (PAGE_SIZE - 1); in cache_flush_040() 268 unsigned long tmp = PAGE_SIZE - (addr & ~PAGE_MASK); in cache_flush_060() 286 i = (PAGE_SIZE - (paddr & ~PAGE_MASK)) >> 4; in cache_flush_060() 319 addr &= PAGE_MASK; in cache_flush_060() 341 len += (addr & ~PAGE_MASK) + (PAGE_SIZE - 1); in cache_flush_060() 342 addr &= PAGE_MASK; /* Workaround for bug in some in cache_flush_060()
|
/arch/ia64/mm/ |
D | ioremap.c | 71 page_base = phys_addr & PAGE_MASK; in ioremap() 79 offset = phys_addr & ~PAGE_MASK; in ioremap() 80 phys_addr &= PAGE_MASK; in ioremap() 123 vunmap((void *) ((unsigned long) addr & PAGE_MASK)); in iounmap()
|
/arch/sparc/mm/ |
D | iommu.c | 159 start &= PAGE_MASK; in iommu_flush_iotlb() 215 off = (unsigned long)vaddr & ~PAGE_MASK; in iommu_get_scsi_one() 217 page = virt_to_page((unsigned long)vaddr & PAGE_MASK); in iommu_get_scsi_one() 230 unsigned long page = ((unsigned long) vaddr) & PAGE_MASK; in iommu_get_scsi_one_pflush() 305 off = vaddr & ~PAGE_MASK; in iommu_release_scsi_one() 307 iommu_release_one(dev, vaddr & PAGE_MASK, npages); in iommu_release_scsi_one() 318 iommu_release_one(dev, sg->dma_address & PAGE_MASK, n); in iommu_release_scsi_sgl() 334 BUG_ON((va & ~PAGE_MASK) != 0); in iommu_map_dma_area() 335 BUG_ON((addr & ~PAGE_MASK) != 0); in iommu_map_dma_area() 336 BUG_ON((len & ~PAGE_MASK) != 0); in iommu_map_dma_area() [all …]
|
/arch/mips/mm/ |
D | tlb-r8k.c | 87 start &= PAGE_MASK; in local_flush_tlb_range() 89 end &= PAGE_MASK; in local_flush_tlb_range() 127 start &= PAGE_MASK; in local_flush_tlb_kernel_range() 129 end &= PAGE_MASK; in local_flush_tlb_kernel_range() 159 page &= PAGE_MASK; in local_flush_tlb_page() 200 address &= PAGE_MASK; in __update_tlb()
|
/arch/xtensa/mm/ |
D | ioremap.c | 20 unsigned long offset = paddr & ~PAGE_MASK; in xtensa_ioremap() 26 paddr &= PAGE_MASK; in xtensa_ioremap() 64 void *addr = (void *)(PAGE_MASK & (unsigned long)io_addr); in xtensa_iounmap()
|
/arch/cris/mm/ |
D | ioremap.c | 45 offset = phys_addr & ~PAGE_MASK; in __ioremap_prot() 46 phys_addr &= PAGE_MASK; in __ioremap_prot() 89 return vfree((void *) (PAGE_MASK & (unsigned long) addr)); in iounmap()
|
/arch/powerpc/include/asm/nohash/32/ |
D | pte-fsl-booke.h | 35 #define _PMD_PRESENT_MASK (PAGE_MASK) 36 #define _PMD_BAD (~PAGE_MASK)
|
D | pte-44x.h | 90 #define _PMD_PRESENT_MASK (PAGE_MASK) 91 #define _PMD_BAD (~PAGE_MASK)
|
/arch/microblaze/pci/ |
D | indirect_pci.c | 153 resource_size_t base = cfg_addr & PAGE_MASK; in setup_indirect_pci() 157 hose->cfg_addr = mbase + (cfg_addr & ~PAGE_MASK); in setup_indirect_pci() 158 if ((cfg_data & PAGE_MASK) != base) in setup_indirect_pci() 159 mbase = ioremap(cfg_data & PAGE_MASK, PAGE_SIZE); in setup_indirect_pci() 160 hose->cfg_data = mbase + (cfg_data & ~PAGE_MASK); in setup_indirect_pci()
|
/arch/powerpc/include/asm/book3s/32/ |
D | hash.h | 40 #define _PMD_PRESENT_MASK (PAGE_MASK) 41 #define _PMD_BAD (~PAGE_MASK)
|
/arch/metag/mm/ |
D | ioremap.c | 50 offset = phys_addr & ~PAGE_MASK; in __ioremap() 51 phys_addr &= PAGE_MASK; in __ioremap() 82 p = remove_vm_area((void *)(PAGE_MASK & (unsigned long __force)addr)); in __iounmap()
|
/arch/s390/pci/ |
D | pci_mmio.c | 47 if (length <= 0 || PAGE_SIZE - (mmio_addr & ~PAGE_MASK) < length) in SYSCALL_DEFINE3() 59 io_addr = (void __iomem *)((pfn << PAGE_SHIFT) | (mmio_addr & ~PAGE_MASK)); in SYSCALL_DEFINE3() 87 if (length <= 0 || PAGE_SIZE - (mmio_addr & ~PAGE_MASK) < length) in SYSCALL_DEFINE3() 99 io_addr = (void __iomem *)((pfn << PAGE_SHIFT) | (mmio_addr & ~PAGE_MASK)); in SYSCALL_DEFINE3()
|
/arch/parisc/mm/ |
D | ioremap.c | 73 offset = phys_addr & ~PAGE_MASK; in __ioremap() 74 phys_addr &= PAGE_MASK; in __ioremap() 97 unsigned long addr = (unsigned long)io_addr & PAGE_MASK; in iounmap()
|
/arch/m32r/mm/ |
D | ioremap.c | 83 offset = phys_addr & ~PAGE_MASK; in __ioremap() 84 phys_addr &= PAGE_MASK; in __ioremap() 109 vfree((void *) (PAGE_MASK & (unsigned long) addr)); in iounmap()
|
/arch/arc/mm/ |
D | ioremap.c | 77 off = paddr & ~PAGE_MASK; in ioremap_prot() 78 paddr &= PAGE_MASK; in ioremap_prot() 104 vfree((void *)(PAGE_MASK & (unsigned long __force)addr)); in iounmap()
|
/arch/openrisc/mm/ |
D | ioremap.c | 57 offset = addr & ~PAGE_MASK; in __ioremap() 58 p = addr & PAGE_MASK; in __ioremap() 108 return vfree((void *)(PAGE_MASK & (unsigned long)addr)); in iounmap()
|