Home
last modified time | relevance | path

Searched refs:vaddr (Results 1 – 25 of 287) sorted by relevance

12345678910>>...12

/arch/m68k/include/asm/
Dbitops.h31 static inline void bset_reg_set_bit(int nr, volatile unsigned long *vaddr) in bset_reg_set_bit() argument
33 char *p = (char *)vaddr + (nr ^ 31) / 8; in bset_reg_set_bit()
41 static inline void bset_mem_set_bit(int nr, volatile unsigned long *vaddr) in bset_mem_set_bit() argument
43 char *p = (char *)vaddr + (nr ^ 31) / 8; in bset_mem_set_bit()
50 static inline void bfset_mem_set_bit(int nr, volatile unsigned long *vaddr) in bfset_mem_set_bit() argument
54 : "d" (nr ^ 31), "o" (*vaddr) in bfset_mem_set_bit()
59 #define set_bit(nr, vaddr) bset_reg_set_bit(nr, vaddr) argument
61 #define set_bit(nr, vaddr) bset_mem_set_bit(nr, vaddr) argument
63 #define set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \ argument
64 bset_mem_set_bit(nr, vaddr) : \
[all …]
/arch/csky/mm/
Dhighmem.c24 unsigned long vaddr; in kmap_atomic_high_prot() local
29 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic_high_prot()
34 flush_tlb_one((unsigned long)vaddr); in kmap_atomic_high_prot()
36 return (void *)vaddr; in kmap_atomic_high_prot()
42 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in kunmap_atomic_high() local
45 if (vaddr < FIXADDR_START) in kunmap_atomic_high()
51 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); in kunmap_atomic_high()
53 pte_clear(&init_mm, vaddr, kmap_pte - idx); in kunmap_atomic_high()
54 flush_tlb_one(vaddr); in kunmap_atomic_high()
68 unsigned long vaddr; in kmap_atomic_pfn() local
[all …]
/arch/arm/mm/
Dcache-xsc3l2.c88 unsigned long vaddr; in xsc3_l2_inv_range() local
95 vaddr = -1; /* to force the first mapping */ in xsc3_l2_inv_range()
101 vaddr = l2_map_va(start & ~(CACHE_LINE_SIZE - 1), vaddr); in xsc3_l2_inv_range()
102 xsc3_l2_clean_mva(vaddr); in xsc3_l2_inv_range()
103 xsc3_l2_inv_mva(vaddr); in xsc3_l2_inv_range()
111 vaddr = l2_map_va(start, vaddr); in xsc3_l2_inv_range()
112 xsc3_l2_inv_mva(vaddr); in xsc3_l2_inv_range()
120 vaddr = l2_map_va(start, vaddr); in xsc3_l2_inv_range()
121 xsc3_l2_clean_mva(vaddr); in xsc3_l2_inv_range()
122 xsc3_l2_inv_mva(vaddr); in xsc3_l2_inv_range()
[all …]
Dhighmem.c20 unsigned long vaddr = __fix_to_virt(idx); in set_fixmap_pte() local
21 pte_t *ptep = virt_to_kpte(vaddr); in set_fixmap_pte()
24 local_flush_tlb_kernel_page(vaddr); in set_fixmap_pte()
27 static inline pte_t get_fixmap_pte(unsigned long vaddr) in get_fixmap_pte() argument
29 pte_t *ptep = virt_to_kpte(vaddr); in get_fixmap_pte()
37 unsigned long vaddr; in kmap_atomic_high_prot() local
57 vaddr = __fix_to_virt(idx); in kmap_atomic_high_prot()
63 BUG_ON(!pte_none(get_fixmap_pte(vaddr))); in kmap_atomic_high_prot()
72 return (void *)vaddr; in kmap_atomic_high_prot()
78 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in kunmap_atomic_high() local
[all …]
/arch/parisc/kernel/
Dpci-dma.c77 unsigned long vaddr, in map_pte_uncached() argument
81 unsigned long orig_vaddr = vaddr; in map_pte_uncached()
83 vaddr &= ~PMD_MASK; in map_pte_uncached()
84 end = vaddr + size; in map_pte_uncached()
96 vaddr += PAGE_SIZE; in map_pte_uncached()
100 } while (vaddr < end); in map_pte_uncached()
104 static inline int map_pmd_uncached(pmd_t * pmd, unsigned long vaddr, in map_pmd_uncached() argument
108 unsigned long orig_vaddr = vaddr; in map_pmd_uncached()
110 vaddr &= ~PGDIR_MASK; in map_pmd_uncached()
111 end = vaddr + size; in map_pmd_uncached()
[all …]
/arch/parisc/mm/
Dfixmap.c15 unsigned long vaddr = __fix_to_virt(idx); in set_fixmap() local
16 pgd_t *pgd = pgd_offset_k(vaddr); in set_fixmap()
17 p4d_t *p4d = p4d_offset(pgd, vaddr); in set_fixmap()
18 pud_t *pud = pud_offset(p4d, vaddr); in set_fixmap()
19 pmd_t *pmd = pmd_offset(pud, vaddr); in set_fixmap()
23 pte = pte_alloc_kernel(pmd, vaddr); in set_fixmap()
25 pte = pte_offset_kernel(pmd, vaddr); in set_fixmap()
26 set_pte_at(&init_mm, vaddr, pte, __mk_pte(phys, PAGE_KERNEL_RWX)); in set_fixmap()
27 flush_tlb_kernel_range(vaddr, vaddr + PAGE_SIZE); in set_fixmap()
32 unsigned long vaddr = __fix_to_virt(idx); in clear_fixmap() local
[all …]
/arch/m68k/sun3/
Ddvma.c23 static unsigned long dvma_page(unsigned long kaddr, unsigned long vaddr) in dvma_page() argument
35 if(ptelist[(vaddr & 0xff000) >> PAGE_SHIFT] != pte) { in dvma_page()
36 sun3_put_pte(vaddr, pte); in dvma_page()
37 ptelist[(vaddr & 0xff000) >> PAGE_SHIFT] = pte; in dvma_page()
40 return (vaddr + (kaddr & ~PAGE_MASK)); in dvma_page()
49 unsigned long vaddr; in dvma_map_iommu() local
51 vaddr = dvma_btov(baddr); in dvma_map_iommu()
53 end = vaddr + len; in dvma_map_iommu()
55 while(vaddr < end) { in dvma_map_iommu()
56 dvma_page(kaddr, vaddr); in dvma_map_iommu()
[all …]
/arch/nds32/mm/
Dhighmem.c16 unsigned long vaddr, pte; in kmap_atomic_high_prot() local
23 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic_high_prot()
25 ptep = pte_offset_kernel(pmd_off_k(vaddr), vaddr); in kmap_atomic_high_prot()
28 __nds32__tlbop_inv(vaddr); in kmap_atomic_high_prot()
29 __nds32__mtsr_dsb(vaddr, NDS32_SR_TLB_VPN); in kmap_atomic_high_prot()
32 return (void *)vaddr; in kmap_atomic_high_prot()
39 unsigned long vaddr = (unsigned long)kvaddr; in kunmap_atomic_high() local
42 __nds32__tlbop_inv(vaddr); in kunmap_atomic_high()
44 ptep = pte_offset_kernel(pmd_off_k(vaddr), vaddr); in kunmap_atomic_high()
Dcacheflush.c177 void copy_user_page(void *vto, void *vfrom, unsigned long vaddr, in copy_user_page() argument
180 cpu_dcache_wbinval_page((unsigned long)vaddr); in copy_user_page()
181 cpu_icache_inval_page((unsigned long)vaddr); in copy_user_page()
187 void clear_user_page(void *addr, unsigned long vaddr, struct page *page) in clear_user_page() argument
189 cpu_dcache_wbinval_page((unsigned long)vaddr); in clear_user_page()
190 cpu_icache_inval_page((unsigned long)vaddr); in clear_user_page()
197 unsigned long vaddr, struct vm_area_struct *vma) in copy_user_highpage() argument
206 if (aliasing(vaddr, (unsigned long)kfrom)) in copy_user_highpage()
208 vto = kremap0(vaddr, pto); in copy_user_highpage()
209 vfrom = kremap1(vaddr, pfrom); in copy_user_highpage()
[all …]
/arch/sh/mm/
Dkmap.c21 unsigned long vaddr; in kmap_coherent_init() local
24 vaddr = __fix_to_virt(FIX_CMAP_BEGIN); in kmap_coherent_init()
25 kmap_coherent_pte = virt_to_kpte(vaddr); in kmap_coherent_init()
31 unsigned long vaddr; in kmap_coherent() local
42 vaddr = __fix_to_virt(idx); in kmap_coherent()
47 return (void *)vaddr; in kmap_coherent()
53 unsigned long vaddr = (unsigned long)kvaddr & PAGE_MASK; in kunmap_coherent() local
54 enum fixed_addresses idx = __virt_to_fix(vaddr); in kunmap_coherent()
57 __flush_purge_region((void *)vaddr, PAGE_SIZE); in kunmap_coherent()
59 pte_clear(&init_mm, vaddr, kmap_coherent_pte - idx); in kunmap_coherent()
[all …]
/arch/mips/mm/
Dhighmem.c23 unsigned long vaddr; in kmap_atomic_high_prot() local
28 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic_high_prot()
33 local_flush_tlb_one((unsigned long)vaddr); in kmap_atomic_high_prot()
35 return (void*) vaddr; in kmap_atomic_high_prot()
41 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in kunmap_atomic_high() local
44 if (vaddr < FIXADDR_START) in kunmap_atomic_high()
52 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); in kunmap_atomic_high()
58 pte_clear(&init_mm, vaddr, kmap_pte-idx); in kunmap_atomic_high()
59 local_flush_tlb_one(vaddr); in kunmap_atomic_high()
72 unsigned long vaddr; in kmap_atomic_pfn() local
[all …]
Dpgtable-32.c54 unsigned long vaddr; in pagetable_init() local
74 vaddr = __fix_to_virt(__end_of_fixed_addresses - 1); in pagetable_init()
75 fixrange_init(vaddr & PMD_MASK, vaddr + FIXADDR_SIZE, pgd_base); in pagetable_init()
81 vaddr = PKMAP_BASE; in pagetable_init()
82 fixrange_init(vaddr & PMD_MASK, vaddr + PAGE_SIZE*LAST_PKMAP, pgd_base); in pagetable_init()
84 pgd = swapper_pg_dir + pgd_index(vaddr); in pagetable_init()
85 p4d = p4d_offset(pgd, vaddr); in pagetable_init()
86 pud = pud_offset(p4d, vaddr); in pagetable_init()
87 pmd = pmd_offset(pud, vaddr); in pagetable_init()
88 pte = pte_offset_kernel(pmd, vaddr); in pagetable_init()
/arch/sparc/mm/
Dhighmem.c46 unsigned long vaddr; in kmap_atomic_high_prot() local
51 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic_high_prot()
55 __flush_cache_one(vaddr); in kmap_atomic_high_prot()
66 __flush_tlb_one(vaddr); in kmap_atomic_high_prot()
71 return (void*) vaddr; in kmap_atomic_high_prot()
77 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in kunmap_atomic_high() local
80 if (vaddr < FIXADDR_START) in kunmap_atomic_high()
90 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN+idx)); in kunmap_atomic_high()
94 __flush_cache_one(vaddr); in kunmap_atomic_high()
103 pte_clear(&init_mm, vaddr, kmap_pte-idx); in kunmap_atomic_high()
[all …]
Dio-unit.c96 static unsigned long iounit_get_area(struct iounit_struct *iounit, unsigned long vaddr, int size) in iounit_get_area() argument
102 npages = ((vaddr & ~PAGE_MASK) + size + (PAGE_SIZE-1)) >> PAGE_SHIFT; in iounit_get_area()
111 IOD(("iounit_get_area(%08lx,%d[%d])=", vaddr, size, npages)); in iounit_get_area()
126 panic("iounit_get_area: Couldn't find free iopte slots for (%08lx,%d)\n", vaddr, size); in iounit_get_area()
134 iopte = MKIOPTE(__pa(vaddr & PAGE_MASK)); in iounit_get_area()
135 vaddr = IOUNIT_DMA_BASE + (scan << PAGE_SHIFT) + (vaddr & ~PAGE_MASK); in iounit_get_area()
140 IOD(("%08lx\n", vaddr)); in iounit_get_area()
141 return vaddr; in iounit_get_area()
148 void *vaddr = page_address(page) + offset; in iounit_map_page() local
157 ret = iounit_get_area(iounit, (unsigned long)vaddr, len); in iounit_map_page()
[all …]
/arch/m68k/sun3x/
Ddvma.c79 unsigned long vaddr, int len) in dvma_map_cpu() argument
88 vaddr &= PAGE_MASK; in dvma_map_cpu()
90 end = PAGE_ALIGN(vaddr + len); in dvma_map_cpu()
92 pr_debug("dvma: mapping kern %08lx to virt %08lx\n", kaddr, vaddr); in dvma_map_cpu()
93 pgd = pgd_offset_k(vaddr); in dvma_map_cpu()
94 p4d = p4d_offset(pgd, vaddr); in dvma_map_cpu()
95 pud = pud_offset(p4d, vaddr); in dvma_map_cpu()
101 if((pmd = pmd_alloc(&init_mm, pud, vaddr)) == NULL) { in dvma_map_cpu()
106 if((end & PGDIR_MASK) > (vaddr & PGDIR_MASK)) in dvma_map_cpu()
107 end2 = (vaddr + (PGDIR_SIZE-1)) & PGDIR_MASK; in dvma_map_cpu()
[all …]
/arch/microblaze/mm/
Dhighmem.c38 unsigned long vaddr; in kmap_atomic_high_prot() local
43 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic_high_prot()
47 set_pte_at(&init_mm, vaddr, kmap_pte-idx, mk_pte(page, prot)); in kmap_atomic_high_prot()
48 local_flush_tlb_page(NULL, vaddr); in kmap_atomic_high_prot()
50 return (void *) vaddr; in kmap_atomic_high_prot()
56 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in kunmap_atomic_high() local
60 if (vaddr < __fix_to_virt(FIX_KMAP_END)) in kunmap_atomic_high()
67 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); in kunmap_atomic_high()
73 pte_clear(&init_mm, vaddr, kmap_pte-idx); in kunmap_atomic_high()
74 local_flush_tlb_page(NULL, vaddr); in kunmap_atomic_high()
/arch/powerpc/mm/
Dhighmem.c29 unsigned long vaddr; in kmap_atomic_high_prot() local
34 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic_high_prot()
36 __set_pte_at(&init_mm, vaddr, kmap_pte-idx, mk_pte(page, prot), 1); in kmap_atomic_high_prot()
37 local_flush_tlb_page(NULL, vaddr); in kmap_atomic_high_prot()
39 return (void*) vaddr; in kmap_atomic_high_prot()
45 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in kunmap_atomic_high() local
47 if (vaddr < __fix_to_virt(FIX_KMAP_END)) in kunmap_atomic_high()
55 WARN_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); in kunmap_atomic_high()
61 pte_clear(&init_mm, vaddr, kmap_pte-idx); in kunmap_atomic_high()
62 local_flush_tlb_page(NULL, vaddr); in kunmap_atomic_high()
/arch/sparc/include/asm/
Dviking.h214 static inline unsigned long viking_hwprobe(unsigned long vaddr) in viking_hwprobe() argument
218 vaddr &= PAGE_MASK; in viking_hwprobe()
222 : "r" (vaddr | 0x400), "i" (ASI_M_FLUSH_PROBE)); in viking_hwprobe()
229 : "r" (vaddr | 0x200), "i" (ASI_M_FLUSH_PROBE)); in viking_hwprobe()
231 vaddr &= ~PGDIR_MASK; in viking_hwprobe()
232 vaddr >>= PAGE_SHIFT; in viking_hwprobe()
233 return val | (vaddr << 8); in viking_hwprobe()
239 : "r" (vaddr | 0x100), "i" (ASI_M_FLUSH_PROBE)); in viking_hwprobe()
241 vaddr &= ~PMD_MASK; in viking_hwprobe()
242 vaddr >>= PAGE_SHIFT; in viking_hwprobe()
[all …]
/arch/x86/mm/
Dmem_encrypt.c122 static void __init __sme_early_map_unmap_mem(void *vaddr, unsigned long size, in __sme_early_map_unmap_mem() argument
125 unsigned long paddr = (unsigned long)vaddr - __PAGE_OFFSET; in __sme_early_map_unmap_mem()
133 __early_make_pgtable((unsigned long)vaddr, pmd); in __sme_early_map_unmap_mem()
135 vaddr += PMD_SIZE; in __sme_early_map_unmap_mem()
256 static int __init early_set_memory_enc_dec(unsigned long vaddr, in early_set_memory_enc_dec() argument
265 vaddr_next = vaddr; in early_set_memory_enc_dec()
266 vaddr_end = vaddr + size; in early_set_memory_enc_dec()
268 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in early_set_memory_enc_dec()
269 kpte = lookup_address(vaddr, &level); in early_set_memory_enc_dec()
277 vaddr_next = (vaddr & PAGE_MASK) + PAGE_SIZE; in early_set_memory_enc_dec()
[all …]
Dpgtable_32.c27 void set_pte_vaddr(unsigned long vaddr, pte_t pteval) in set_pte_vaddr() argument
35 pgd = swapper_pg_dir + pgd_index(vaddr); in set_pte_vaddr()
40 p4d = p4d_offset(pgd, vaddr); in set_pte_vaddr()
45 pud = pud_offset(p4d, vaddr); in set_pte_vaddr()
50 pmd = pmd_offset(pud, vaddr); in set_pte_vaddr()
55 pte = pte_offset_kernel(pmd, vaddr); in set_pte_vaddr()
57 set_pte_at(&init_mm, vaddr, pte, pteval); in set_pte_vaddr()
59 pte_clear(&init_mm, vaddr, pte); in set_pte_vaddr()
65 flush_tlb_one_kernel(vaddr); in set_pte_vaddr()
Dhighmem_32.c9 unsigned long vaddr; in kmap_atomic_high_prot() local
14 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic_high_prot()
19 return (void *)vaddr; in kmap_atomic_high_prot()
35 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in kunmap_atomic_high() local
37 if (vaddr >= __fix_to_virt(FIX_KMAP_END) && in kunmap_atomic_high()
38 vaddr <= __fix_to_virt(FIX_KMAP_BEGIN)) { in kunmap_atomic_high()
45 WARN_ON_ONCE(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); in kunmap_atomic_high()
53 kpte_clear_flush(kmap_pte-idx, vaddr); in kunmap_atomic_high()
59 BUG_ON(vaddr < PAGE_OFFSET); in kunmap_atomic_high()
60 BUG_ON(vaddr >= (unsigned long)high_memory); in kunmap_atomic_high()
/arch/arm/kernel/
Dcrash_dump.c34 void *vaddr; in copy_oldmem_page() local
39 vaddr = ioremap(__pfn_to_phys(pfn), PAGE_SIZE); in copy_oldmem_page()
40 if (!vaddr) in copy_oldmem_page()
44 if (copy_to_user(buf, vaddr + offset, csize)) { in copy_oldmem_page()
45 iounmap(vaddr); in copy_oldmem_page()
49 memcpy(buf, vaddr + offset, csize); in copy_oldmem_page()
52 iounmap(vaddr); in copy_oldmem_page()
/arch/nios2/mm/
Ddma-mapping.c24 void *vaddr = phys_to_virt(paddr); in arch_sync_dma_for_device() local
28 invalidate_dcache_range((unsigned long)vaddr, in arch_sync_dma_for_device()
29 (unsigned long)(vaddr + size)); in arch_sync_dma_for_device()
37 flush_dcache_range((unsigned long)vaddr, in arch_sync_dma_for_device()
38 (unsigned long)(vaddr + size)); in arch_sync_dma_for_device()
48 void *vaddr = phys_to_virt(paddr); in arch_sync_dma_for_cpu() local
53 invalidate_dcache_range((unsigned long)vaddr, in arch_sync_dma_for_cpu()
54 (unsigned long)(vaddr + size)); in arch_sync_dma_for_cpu()
/arch/arm/mach-ixp4xx/include/mach/
Dio.h97 const u8 *vaddr = p; in __indirect_writesb() local
100 writeb(*vaddr++, bus_addr); in __indirect_writesb()
122 const u16 *vaddr = p; in __indirect_writesw() local
125 writew(*vaddr++, bus_addr); in __indirect_writesw()
143 const u32 *vaddr = p; in __indirect_writesl() local
145 writel(*vaddr++, bus_addr); in __indirect_writesl()
167 u8 *vaddr = p; in __indirect_readsb() local
170 *vaddr++ = readb(bus_addr); in __indirect_readsb()
192 u16 *vaddr = p; in __indirect_readsw() local
195 *vaddr++ = readw(bus_addr); in __indirect_readsw()
[all …]
/arch/um/kernel/
Dmem.c103 unsigned long vaddr; in fixrange_init() local
105 vaddr = start; in fixrange_init()
106 i = pgd_index(vaddr); in fixrange_init()
107 j = pmd_index(vaddr); in fixrange_init()
110 for ( ; (i < PTRS_PER_PGD) && (vaddr < end); pgd++, i++) { in fixrange_init()
111 p4d = p4d_offset(pgd, vaddr); in fixrange_init()
112 pud = pud_offset(p4d, vaddr); in fixrange_init()
115 pmd = pmd_offset(pud, vaddr); in fixrange_init()
116 for (; (j < PTRS_PER_PMD) && (vaddr < end); pmd++, j++) { in fixrange_init()
118 vaddr += PMD_SIZE; in fixrange_init()
[all …]

12345678910>>...12