Home
last modified time | relevance | path

Searched refs:va (Results 1 – 25 of 116) sorted by relevance

12345

/arch/powerpc/mm/
Dhash_native_64.c42 static inline void __tlbie(unsigned long va, int psize, int ssize) in __tlbie() argument
47 va &= ~(0xffffULL << 48); in __tlbie()
51 va &= ~0xffful; in __tlbie()
52 va |= ssize << 8; in __tlbie()
54 : : "r" (va), "r"(0), "i" (CPU_FTR_ARCH_206) in __tlbie()
59 va &= ~((1ul << mmu_psize_defs[psize].shift) - 1); in __tlbie()
60 va |= penc << 12; in __tlbie()
61 va |= ssize << 8; in __tlbie()
62 va |= 1; /* L */ in __tlbie()
64 : : "r" (va), "r"(0), "i" (CPU_FTR_ARCH_206) in __tlbie()
[all …]
Dhugetlbpage-hash64.c22 unsigned long va, rflags, pa, sz; in __hash_page_huge() local
28 va = hpt_va(ea, vsid, ssize); in __hash_page_huge()
72 hash = hpt_hash(va, shift, ssize); in __hash_page_huge()
78 if (ppc_md.hpte_updatepp(slot, rflags, va, mmu_psize, in __hash_page_huge()
84 unsigned long hash = hpt_hash(va, shift, ssize); in __hash_page_huge()
104 slot = ppc_md.hpte_insert(hpte_group, va, pa, rflags, 0, in __hash_page_huge()
111 slot = ppc_md.hpte_insert(hpte_group, va, pa, rflags, in __hash_page_huge()
/arch/frv/mm/
Ddma-alloc.c50 static int map_page(unsigned long va, unsigned long pa, pgprot_t prot) in map_page() argument
59 pge = pgd_offset_k(va); in map_page()
60 pue = pud_offset(pge, va); in map_page()
61 pme = pmd_offset(pue, va); in map_page()
64 pte = pte_alloc_kernel(pme, va); in map_page()
84 unsigned long page, va, pa; in consistent_alloc() local
107 va = VMALLOC_VMADDR(area->addr); in consistent_alloc()
108 ret = (void *) va; in consistent_alloc()
123 err = map_page(va + i, pa + i, PAGE_KERNEL_NOCACHE); in consistent_alloc()
126 vfree((void *) va); in consistent_alloc()
[all …]
/arch/ia64/kvm/
Dvtlb.c36 static int __is_tr_translated(struct thash_data *trp, u64 rid, u64 va) in __is_tr_translated() argument
39 && ((va-trp->vadr) < PSIZE(trp->ps))); in __is_tr_translated()
62 void machine_tlb_purge(u64 va, u64 ps) in machine_tlb_purge() argument
64 ia64_ptcl(va, ps << 2); in machine_tlb_purge()
116 struct thash_data *vsa_thash(union ia64_pta vpta, u64 va, u64 vrr, u64 *tag) in vsa_thash() argument
121 pfn = REGION_OFFSET(va) >> _REGION_PAGE_SIZE(vrr); in vsa_thash()
130 struct thash_data *__vtr_lookup(struct kvm_vcpu *vcpu, u64 va, int type) in __vtr_lookup() argument
137 rid = vcpu_get_rr(vcpu, va); in __vtr_lookup()
140 if (vcpu_quick_region_check(vcpu->arch.dtr_regions, va)) { in __vtr_lookup()
143 if (__is_tr_translated(trp, rid, va)) in __vtr_lookup()
[all …]
/arch/powerpc/platforms/cell/
Dbeat_htab.c91 unsigned long va, unsigned long pa, in beat_lpar_hpte_insert() argument
104 hpte_group, va, pa, rflags, vflags, psize); in beat_lpar_hpte_insert()
106 hpte_v = hpte_encode_v(va, psize, MMU_SEGSIZE_256M) | in beat_lpar_hpte_insert()
187 unsigned long va, in beat_lpar_hpte_updatepp() argument
194 want_v = hpte_encode_v(va, psize, MMU_SEGSIZE_256M); in beat_lpar_hpte_updatepp()
223 static long beat_lpar_hpte_find(unsigned long va, int psize) in beat_lpar_hpte_find() argument
230 hash = hpt_hash(va, mmu_psize_defs[psize].shift, MMU_SEGSIZE_256M); in beat_lpar_hpte_find()
231 want_v = hpte_encode_v(va, psize, MMU_SEGSIZE_256M); in beat_lpar_hpte_find()
258 unsigned long lpar_rc, slot, vsid, va; in beat_lpar_hpte_updateboltedpp() local
262 va = (vsid << 28) | (ea & 0x0fffffff); in beat_lpar_hpte_updateboltedpp()
[all …]
/arch/arm/mach-bcmring/
Dmm.c23 #define IO_DESC(va, sz) { .virtual = va, \ argument
24 .pfn = __phys_to_pfn(HW_IO_VIRT_TO_PHYS(va)), \
28 #define MEM_DESC(va, sz) { .virtual = va, \ argument
29 .pfn = __phys_to_pfn(HW_IO_VIRT_TO_PHYS(va)), \
/arch/arm/mm/
Dmm.h19 static inline void set_top_pte(unsigned long va, pte_t pte) in set_top_pte() argument
21 pte_t *ptep = pte_offset_kernel(top_pmd, va); in set_top_pte()
23 local_flush_tlb_kernel_page(va); in set_top_pte()
26 static inline pte_t get_top_pte(unsigned long va) in get_top_pte() argument
28 pte_t *ptep = pte_offset_kernel(top_pmd, va); in get_top_pte()
Dcache-xsc3l2.c70 static inline void l2_unmap_va(unsigned long va) in l2_unmap_va() argument
73 if (va != -1) in l2_unmap_va()
74 kunmap_atomic((void *)va); in l2_unmap_va()
81 unsigned long va = prev_va & PAGE_MASK; in l2_map_va() local
90 va = (unsigned long)kmap_atomic_pfn(pa >> PAGE_SHIFT); in l2_map_va()
92 return va + (pa_offset >> (32 - PAGE_SHIFT)); in l2_map_va()
/arch/powerpc/platforms/pseries/
Dlpar.c111 unsigned long va, unsigned long pa, in pSeries_lpar_hpte_insert() argument
123 hpte_group, va, pa, rflags, vflags, psize); in pSeries_lpar_hpte_insert()
125 hpte_v = hpte_encode_v(va, psize, ssize) | vflags | HPTE_V_VALID; in pSeries_lpar_hpte_insert()
240 static inline unsigned long hpte_encode_avpn(unsigned long va, int psize, in hpte_encode_avpn() argument
245 v = (va >> 23) & ~(mmu_psize_defs[psize].avpnm); in hpte_encode_avpn()
259 unsigned long va, in pSeries_lpar_hpte_updatepp() argument
266 want_v = hpte_encode_avpn(va, psize, ssize); in pSeries_lpar_hpte_updatepp()
304 static long pSeries_lpar_hpte_find(unsigned long va, int psize, int ssize) in pSeries_lpar_hpte_find() argument
311 hash = hpt_hash(va, mmu_psize_defs[psize].shift, ssize); in pSeries_lpar_hpte_find()
312 want_v = hpte_encode_avpn(va, psize, ssize); in pSeries_lpar_hpte_find()
[all …]
/arch/alpha/kernel/
Dtraps.c446 unsigned long count, va, pc; member
455 do_entUna(void * va, unsigned long opcode, unsigned long reg, in do_entUna() argument
464 unaligned[0].va = (unsigned long) va; in do_entUna()
486 : "r"(va), "0"(0)); in do_entUna()
506 : "r"(va), "0"(0)); in do_entUna()
526 : "r"(va), "0"(0)); in do_entUna()
560 : "r"(va), "r"(una_reg(reg)), "0"(0)); in do_entUna()
590 : "r"(va), "r"(una_reg(reg)), "0"(0)); in do_entUna()
620 : "r"(va), "r"(una_reg(reg)), "0"(0)); in do_entUna()
627 pc, va, opcode, reg); in do_entUna()
[all …]
/arch/powerpc/platforms/ps3/
Dhtab.c46 static long ps3_hpte_insert(unsigned long hpte_group, unsigned long va, in ps3_hpte_insert() argument
64 hpte_v = hpte_encode_v(va, psize, ssize) | vflags | HPTE_V_VALID; in ps3_hpte_insert()
79 __func__, result, va, pa, hpte_group, hpte_v, hpte_r); in ps3_hpte_insert()
110 unsigned long va, int psize, int ssize, int local) in ps3_hpte_updatepp() argument
118 want_v = hpte_encode_v(va, psize, ssize); in ps3_hpte_updatepp()
129 __func__, result, va, slot, psize); in ps3_hpte_updatepp()
162 static void ps3_hpte_invalidate(unsigned long slot, unsigned long va, in ps3_hpte_invalidate() argument
174 __func__, result, va, slot, psize); in ps3_hpte_invalidate()
/arch/powerpc/math-emu/
Dmath_efp.c185 union dw_union vc, va, vb; in do_spe_mathemu() local
204 va.wp[0] = current->thread.evr[fa]; in do_spe_mathemu()
205 va.wp[1] = regs->gpr[fa]; in do_spe_mathemu()
213 pr_debug("va: %08x %08x\n", va.wp[0], va.wp[1]); in do_spe_mathemu()
223 FP_UNPACK_SP(SA, va.wp + 1); in do_spe_mathemu()
228 FP_UNPACK_SP(SA, va.wp + 1); in do_spe_mathemu()
237 vc.wp[1] = va.wp[1] & ~SIGN_BIT_S; in do_spe_mathemu()
241 vc.wp[1] = va.wp[1] | SIGN_BIT_S; in do_spe_mathemu()
245 vc.wp[1] = va.wp[1] ^ SIGN_BIT_S; in do_spe_mathemu()
350 FP_UNPACK_DP(DA, va.dp); in do_spe_mathemu()
[all …]
/arch/mn10300/mm/
Dtlb-smp.c53 unsigned long va);
95 unsigned long va) in flush_tlb_others() argument
120 flush_va = va; in flush_tlb_others()
182 void flush_tlb_page(struct vm_area_struct *vma, unsigned long va) in flush_tlb_page() argument
191 local_flush_tlb_page(mm, va); in flush_tlb_page()
193 flush_tlb_others(cpu_mask, mm, va); in flush_tlb_page()
/arch/openrisc/kernel/
Ddma.c74 unsigned long va; in or1k_dma_alloc_coherent() local
88 va = (unsigned long)page; in or1k_dma_alloc_coherent()
94 if (walk_page_range(va, va + size, &walk)) { in or1k_dma_alloc_coherent()
99 return (void *)va; in or1k_dma_alloc_coherent()
105 unsigned long va = (unsigned long)vaddr; in or1k_dma_free_coherent() local
112 WARN_ON(walk_page_range(va, va + size, &walk)); in or1k_dma_free_coherent()
/arch/x86/mm/
Dtlb.c173 struct mm_struct *mm, unsigned long va) in flush_tlb_others_ipi() argument
186 f->flush_va = va; in flush_tlb_others_ipi()
206 struct mm_struct *mm, unsigned long va) in native_flush_tlb_others() argument
212 cpumask = uv_flush_tlb_others(cpumask, mm, va, cpu); in native_flush_tlb_others()
214 flush_tlb_others_ipi(cpumask, mm, va); in native_flush_tlb_others()
217 flush_tlb_others_ipi(cpumask, mm, va); in native_flush_tlb_others()
303 void flush_tlb_page(struct vm_area_struct *vma, unsigned long va) in flush_tlb_page() argument
311 __flush_tlb_one(va); in flush_tlb_page()
317 flush_tlb_others(mm_cpumask(mm), mm, va); in flush_tlb_page()
/arch/microblaze/mm/
Dpgtable.c42 #define flush_HPTE(X, va, pg) _tlbie(va) argument
139 int map_page(unsigned long va, phys_addr_t pa, int flags) in map_page() argument
145 pd = pmd_offset(pgd_offset_k(va), va); in map_page()
147 pg = pte_alloc_kernel(pd, va); /* from powerpc - pgtable.c */ in map_page()
152 set_pte_at(&init_mm, va, pg, pfn_pte(pa >> PAGE_SHIFT, in map_page()
155 flush_HPTE(0, va, pmd_val(*pd)); in map_page()
/arch/powerpc/include/asm/
Dpte-hash64-64k.h61 #define pte_iterate_hashed_subpages(rpte, psize, va, index, shift) \ argument
63 unsigned long __end = va + PAGE_SIZE; \
67 for (index = 0; va < __end; index++, va += (1L << shift)) { \
/arch/mips/math-emu/
Dcp1emu.c279 u64 __user *va = (u64 __user *) (xcp->regs[MIPSInst_RS(ir)] + in cop1Emulate() local
285 if (!access_ok(VERIFY_READ, va, sizeof(u64))) { in cop1Emulate()
287 *fault_addr = va; in cop1Emulate()
290 if (__get_user(val, va)) { in cop1Emulate()
292 *fault_addr = va; in cop1Emulate()
300 u64 __user *va = (u64 __user *) (xcp->regs[MIPSInst_RS(ir)] + in cop1Emulate() local
306 if (!access_ok(VERIFY_WRITE, va, sizeof(u64))) { in cop1Emulate()
308 *fault_addr = va; in cop1Emulate()
311 if (__put_user(val, va)) { in cop1Emulate()
313 *fault_addr = va; in cop1Emulate()
[all …]
/arch/tile/mm/
Dhighmem.c67 unsigned long va; member
97 unsigned long va, pte_t *ptep, pte_t pteval) in kmap_atomic_register() argument
109 amp->va = va; in kmap_atomic_register()
130 static void kmap_atomic_unregister(struct page *page, unsigned long va) in kmap_atomic_unregister() argument
137 if (amp->page == page && amp->cpu == cpu && amp->va == va) in kmap_atomic_unregister()
149 pte_t *ptep = kmap_get_pte(amp->va); in kmap_atomic_fix_one_kpte()
152 flush_remote(0, 0, NULL, amp->va, PAGE_SIZE, PAGE_SIZE, in kmap_atomic_fix_one_kpte()
/arch/m32r/kernel/
Dsmp.c315 void smp_flush_tlb_page(struct vm_area_struct *vma, unsigned long va) in smp_flush_tlb_page() argument
336 va &= PAGE_MASK; in smp_flush_tlb_page()
337 va |= (*mmc & MMU_CONTEXT_ASID_MASK); in smp_flush_tlb_page()
338 __flush_tlb_page(va); in smp_flush_tlb_page()
342 flush_tlb_others(cpu_mask, mm, vma, va); in smp_flush_tlb_page()
371 struct vm_area_struct *vma, unsigned long va) in flush_tlb_others() argument
408 flush_va = va; in flush_tlb_others()
463 unsigned long va = flush_va; in smp_invalidate_interrupt() local
466 va &= PAGE_MASK; in smp_invalidate_interrupt()
467 va |= (*mmc & MMU_CONTEXT_ASID_MASK); in smp_invalidate_interrupt()
[all …]
/arch/parisc/kernel/
Dentry.S190 va = r8 /* virtual address for which the trap occurred */ define
203 mfctl %pcoq, va
220 mfctl %pcoq, va
234 mfctl %ior,va
252 mfctl %ior,va
266 mfctl %ior, va
284 mfctl %ior, va
296 mfctl %ior,va
312 mfctl %ior,va
326 mfctl %ior,va
[all …]
/arch/tile/include/hv/
Dhypervisor.h569 int hv_store_mapping(HV_VirtAddr va, unsigned int len, HV_PhysAddr pa);
1257 void hv_bzero_page(HV_VirtAddr va, unsigned int size);
1514 int hv_dev_pread(int devhdl, __hv32 flags, HV_VirtAddr va, __hv32 len,
1557 int hv_dev_pwrite(int devhdl, __hv32 flags, HV_VirtAddr va, __hv32 len,
2348 #define HV_L0_INDEX(va) \
2349 (((va) >> HV_LOG2_L1_SPAN) & (HV_L0_ENTRIES - 1))
2354 #define HV_L0_INDEX(va) \ argument
2355 (((HV_VirtAddr)(va) >> HV_LOG2_L1_SPAN) & (HV_L0_ENTRIES - 1))
2390 #define HV_L1_INDEX(va) \
2391 (((va) >> HV_LOG2_PAGE_SIZE_LARGE) & (HV_L1_ENTRIES - 1))
[all …]
/arch/x86/include/asm/
Dedac.h6 static inline void atomic_scrub(void *va, u32 size) in atomic_scrub() argument
8 u32 i, *virt_addr = va; in atomic_scrub()
Dtlbflush.h118 unsigned long va) in native_flush_tlb_others() argument
146 struct mm_struct *mm, unsigned long va);
166 #define flush_tlb_others(mask, mm, va) native_flush_tlb_others(mask, mm, va) argument
/arch/mips/include/asm/
Dedac.h6 static inline void atomic_scrub(void *va, u32 size) in atomic_scrub() argument
8 unsigned long *virt_addr = va; in atomic_scrub()

12345