Lines Matching refs:ua
334 unsigned long *ua) in kvmppc_tce_to_ua() argument
343 *ua = __gfn_to_hva_memslot(memslot, gfn) | in kvmppc_tce_to_ua()
355 unsigned long ua = 0; in kvmppc_tce_validate() local
364 if (kvmppc_tce_to_ua(stt->kvm, tce, &ua)) in kvmppc_tce_validate()
372 mem = mm_iommu_lookup(stt->kvm->mm, ua, 1ULL << shift); in kvmppc_tce_validate()
376 if (mm_iommu_ua_to_hpa(mem, ua, shift, &hpa)) in kvmppc_tce_validate()
482 unsigned long entry, unsigned long ua, in kvmppc_tce_iommu_do_map() argument
494 mem = mm_iommu_lookup(kvm->mm, ua, 1ULL << tbl->it_page_shift); in kvmppc_tce_iommu_do_map()
499 if (WARN_ON_ONCE(mm_iommu_ua_to_hpa(mem, ua, tbl->it_page_shift, &hpa))) in kvmppc_tce_iommu_do_map()
514 *pua = cpu_to_be64(ua); in kvmppc_tce_iommu_do_map()
521 unsigned long entry, unsigned long ua, in kvmppc_tce_iommu_map() argument
532 io_entry + i, ua + pgoff, dir); in kvmppc_tce_iommu_map()
546 unsigned long entry, ua = 0; in kvmppc_h_put_tce() local
568 if ((dir != DMA_NONE) && kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce()
581 entry, ua, dir); in kvmppc_h_put_tce()
606 unsigned long entry, ua = 0; in kvmppc_h_put_tce_indirect() local
631 if (kvmppc_tce_to_ua(vcpu->kvm, tce_list, &ua)) { in kvmppc_h_put_tce_indirect()
635 tces = (u64 __user *) ua; in kvmppc_h_put_tce_indirect()
666 if (kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) { in kvmppc_h_put_tce_indirect()
673 stit->tbl, entry + i, ua, in kvmppc_h_put_tce_indirect()