Home
last modified time | relevance | path

Searched refs:ua (Results 1 – 6 of 6) sorted by relevance

/arch/powerpc/mm/book3s64/
Diommu_api.c33 u64 ua; /* userspace address */ member
56 static long mm_iommu_do_alloc(struct mm_struct *mm, unsigned long ua, in mm_iommu_do_alloc() argument
91 mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc()
106 ret = get_user_pages(ua + (entry << PAGE_SHIFT), n, in mm_iommu_do_alloc()
127 mem->ua = ua; in mm_iommu_do_alloc()
134 if ((mem2->ua < (ua + (entries << PAGE_SHIFT))) && in mm_iommu_do_alloc()
135 (ua < (mem2->ua + in mm_iommu_do_alloc()
186 long mm_iommu_new(struct mm_struct *mm, unsigned long ua, unsigned long entries, in mm_iommu_new() argument
189 return mm_iommu_do_alloc(mm, ua, entries, MM_IOMMU_TABLE_INVALID_HPA, in mm_iommu_new()
194 long mm_iommu_newdev(struct mm_struct *mm, unsigned long ua, in mm_iommu_newdev() argument
[all …]
/arch/powerpc/kvm/
Dbook3s_64_vio_hv.c79 unsigned long *ua, unsigned long **prmap) in kvmppc_rm_tce_to_ua() argument
88 *ua = __gfn_to_hva_memslot(memslot, gfn) | in kvmppc_rm_tce_to_ua()
111 unsigned long ua = 0; in kvmppc_rm_tce_validate() local
120 if (kvmppc_rm_tce_to_ua(stt->kvm, tce, &ua, NULL)) in kvmppc_rm_tce_validate()
128 mem = mm_iommu_lookup_rm(stt->kvm->mm, ua, 1ULL << shift); in kvmppc_rm_tce_validate()
132 if (mm_iommu_ua_to_hpa_rm(mem, ua, shift, &hpa)) in kvmppc_rm_tce_validate()
335 unsigned long entry, unsigned long ua, in kvmppc_rm_tce_iommu_do_map() argument
347 mem = mm_iommu_lookup_rm(kvm->mm, ua, 1ULL << tbl->it_page_shift); in kvmppc_rm_tce_iommu_do_map()
351 if (WARN_ON_ONCE_RM(mm_iommu_ua_to_hpa_rm(mem, ua, tbl->it_page_shift, in kvmppc_rm_tce_iommu_do_map()
371 *pua = cpu_to_be64(ua); in kvmppc_rm_tce_iommu_do_map()
[all …]
Dbook3s_64_vio.c343 unsigned long *ua) in kvmppc_tce_to_ua() argument
352 *ua = __gfn_to_hva_memslot(memslot, gfn) | in kvmppc_tce_to_ua()
364 unsigned long ua = 0; in kvmppc_tce_validate() local
373 if (kvmppc_tce_to_ua(stt->kvm, tce, &ua)) in kvmppc_tce_validate()
382 mem = mm_iommu_lookup(stt->kvm->mm, ua, 1ULL << shift); in kvmppc_tce_validate()
383 if (!mem || mm_iommu_ua_to_hpa(mem, ua, shift, &hpa)) { in kvmppc_tce_validate()
500 unsigned long entry, unsigned long ua, in kvmppc_tce_iommu_do_map() argument
512 mem = mm_iommu_lookup(kvm->mm, ua, 1ULL << tbl->it_page_shift); in kvmppc_tce_iommu_do_map()
517 if (WARN_ON_ONCE(mm_iommu_ua_to_hpa(mem, ua, tbl->it_page_shift, &hpa))) in kvmppc_tce_iommu_do_map()
532 *pua = cpu_to_be64(ua); in kvmppc_tce_iommu_do_map()
[all …]
/arch/powerpc/include/asm/
Dmmu_context.h25 unsigned long ua, unsigned long entries,
27 extern long mm_iommu_newdev(struct mm_struct *mm, unsigned long ua,
35 unsigned long ua, unsigned long size);
37 struct mm_struct *mm, unsigned long ua, unsigned long size);
39 unsigned long ua, unsigned long entries);
41 unsigned long ua, unsigned int pageshift, unsigned long *hpa);
43 unsigned long ua, unsigned int pageshift, unsigned long *hpa);
44 extern void mm_iommu_ua_mark_dirty_rm(struct mm_struct *mm, unsigned long ua);
Dvio.h31 #define h_vio_signal(ua, mode) \ argument
32 plpar_hcall_norets(H_VIO_SIGNAL, ua, mode)
/arch/unicore32/include/asm/
Dassembler.h78 .else; .ifc \cond, ua