Home
last modified time | relevance | path

Searched refs:gpa (Results 1 – 25 of 65) sorted by relevance

123

/arch/powerpc/kvm/
Dbook3s_64_mmu_radix.c138 u64 pte, base, gpa; in kvmppc_mmu_walk_radix_tree() local
192 gpa = pte & 0x01fffffffffff000ul; in kvmppc_mmu_walk_radix_tree()
193 if (gpa & ((1ul << offset) - 1)) in kvmppc_mmu_walk_radix_tree()
195 gpa |= eaddr & ((1ul << offset) - 1); in kvmppc_mmu_walk_radix_tree()
203 gpte->raddr = gpa; in kvmppc_mmu_walk_radix_tree()
411 void kvmppc_unmap_pte(struct kvm *kvm, pte_t *pte, unsigned long gpa, in kvmppc_unmap_pte() argument
418 unsigned long gfn = gpa >> PAGE_SHIFT; in kvmppc_unmap_pte()
422 old = kvmppc_radix_update_pte(kvm, pte, ~0UL, 0, gpa, shift); in kvmppc_unmap_pte()
423 kvmppc_radix_tlbie_page(kvm, gpa, shift, lpid); in kvmppc_unmap_pte()
442 gpa &= ~(page_size - 1); in kvmppc_unmap_pte()
[all …]
Dbook3s_hv_uvmem.c233 unsigned long gpa; member
510 struct kvm *kvm, unsigned long gpa) in __kvmppc_svm_page_out() argument
529 if (!kvmppc_gfn_is_uvmem_pfn(gpa >> page_shift, kvm, NULL)) in __kvmppc_svm_page_out()
562 gpa, 0, page_shift); in __kvmppc_svm_page_out()
582 struct kvm *kvm, unsigned long gpa) in kvmppc_svm_page_out() argument
587 ret = __kvmppc_svm_page_out(vma, start, end, page_shift, kvm, gpa); in kvmppc_svm_page_out()
636 PAGE_SHIFT, kvm, pvt->gpa)) in kvmppc_uvmem_drop_pages()
638 pvt->gpa, addr); in kvmppc_uvmem_drop_pages()
686 static struct page *kvmppc_uvmem_get_page(unsigned long gpa, struct kvm *kvm) in kvmppc_uvmem_get_page() argument
710 kvmppc_gfn_secure_uvmem_pfn(gpa >> PAGE_SHIFT, uvmem_pfn, kvm); in kvmppc_uvmem_get_page()
[all …]
Dbook3s_hv_nested.c911 unsigned long gpa; in kvmhv_update_nest_rmap_rc() local
915 gpa = n_rmap & RMAP_NESTED_GPA_MASK; in kvmhv_update_nest_rmap_rc()
919 ptep = find_kvm_nested_guest_pte(kvm, lpid, gpa, &shift); in kvmhv_update_nest_rmap_rc()
928 kvmppc_radix_tlbie_page(kvm, gpa, shift, lpid); in kvmhv_update_nest_rmap_rc()
958 unsigned long gpa; in kvmhv_remove_nest_rmap() local
962 gpa = n_rmap & RMAP_NESTED_GPA_MASK; in kvmhv_remove_nest_rmap()
969 ptep = find_kvm_nested_guest_pte(kvm, lpid, gpa, &shift); in kvmhv_remove_nest_rmap()
972 kvmppc_unmap_pte(kvm, ptep, gpa, shift, NULL, gp->shadow_lpid); in kvmhv_remove_nest_rmap()
991 unsigned long gpa, unsigned long hpa, in kvmhv_remove_nest_rmap_range() argument
999 gfn = (gpa >> PAGE_SHIFT) - memslot->base_gfn; in kvmhv_remove_nest_rmap_range()
[all …]
De500.h255 gpa_t gpa; in tlbe_is_host_safe() local
267 gpa = get_tlb_raddr(tlbe); in tlbe_is_host_safe()
268 if (!gfn_to_memslot(vcpu->kvm, gpa >> PAGE_SHIFT)) in tlbe_is_host_safe()
/arch/s390/kvm/
Dgaccess.h152 unsigned long gpa = gra + kvm_s390_get_prefix(vcpu); in write_guest_lc() local
154 return kvm_write_guest(vcpu->kvm, gpa, data, len); in write_guest_lc()
178 unsigned long gpa = gra + kvm_s390_get_prefix(vcpu); in read_guest_lc() local
180 return kvm_read_guest(vcpu->kvm, gpa, data, len); in read_guest_lc()
190 u8 ar, unsigned long *gpa, enum gacc_mode mode);
310 int write_guest_abs(struct kvm_vcpu *vcpu, unsigned long gpa, void *data, in write_guest_abs() argument
313 return kvm_write_guest(vcpu->kvm, gpa, data, len); in write_guest_abs()
333 int read_guest_abs(struct kvm_vcpu *vcpu, unsigned long gpa, void *data, in read_guest_abs() argument
336 return kvm_read_guest(vcpu->kvm, gpa, data, len); in read_guest_abs()
Dvsie.c646 static int pin_guest_page(struct kvm *kvm, gpa_t gpa, hpa_t *hpa) in pin_guest_page() argument
650 page = gfn_to_page(kvm, gpa_to_gfn(gpa)); in pin_guest_page()
653 *hpa = (hpa_t) page_to_virt(page) + (gpa & ~PAGE_MASK); in pin_guest_page()
658 static void unpin_guest_page(struct kvm *kvm, gpa_t gpa, hpa_t hpa) in unpin_guest_page() argument
662 mark_page_dirty(kvm, gpa_to_gfn(gpa)); in unpin_guest_page()
727 gpa_t gpa; in pin_blocks() local
730 gpa = READ_ONCE(scb_o->scaol) & ~0xfUL; in pin_blocks()
732 gpa |= (u64) READ_ONCE(scb_o->scaoh) << 32; in pin_blocks()
733 if (gpa) { in pin_blocks()
734 if (gpa < 2 * PAGE_SIZE) in pin_blocks()
[all …]
Dgaccess.c588 static int deref_table(struct kvm *kvm, unsigned long gpa, unsigned long *val) in deref_table() argument
590 return kvm_read_guest(kvm, gpa, val, sizeof(*val)); in deref_table()
615 unsigned long *gpa, const union asce asce, in guest_translate() argument
774 *gpa = raddr.addr; in guest_translate()
834 unsigned long _len, nr_pages, gpa, idx; in access_guest() local
858 gpa = *(pages + idx) + (ga & ~PAGE_MASK); in access_guest()
859 _len = min(PAGE_SIZE - (gpa & ~PAGE_MASK), len); in access_guest()
861 rc = kvm_write_guest(vcpu->kvm, gpa, data, _len); in access_guest()
863 rc = kvm_read_guest(vcpu->kvm, gpa, data, _len); in access_guest()
878 unsigned long _len, gpa; in access_guest_real() local
[all …]
/arch/x86/include/asm/uv/
Duv_hub.h460 uv_gpa_in_mmr_space(unsigned long gpa) in uv_gpa_in_mmr_space() argument
462 return (gpa >> 62) == 0x3UL; in uv_gpa_in_mmr_space()
466 static inline unsigned long uv_gpa_to_soc_phys_ram(unsigned long gpa) in uv_gpa_to_soc_phys_ram() argument
474 gpa = ((gpa << uv_hub_info->m_shift) >> uv_hub_info->m_shift) | in uv_gpa_to_soc_phys_ram()
475 ((gpa >> uv_hub_info->n_lshift) << uv_hub_info->m_val); in uv_gpa_to_soc_phys_ram()
477 paddr = gpa & uv_hub_info->gpa_mask; in uv_gpa_to_soc_phys_ram()
484 static inline unsigned long uv_gpa_to_gnode(unsigned long gpa) in uv_gpa_to_gnode() argument
489 return gpa >> n_lshift; in uv_gpa_to_gnode()
491 return uv_gam_range(gpa)->nasid >> 1; in uv_gpa_to_gnode()
495 static inline int uv_gpa_to_pnode(unsigned long gpa) in uv_gpa_to_pnode() argument
[all …]
/arch/x86/kvm/
Dcpuid.h41 static inline bool kvm_vcpu_is_legal_gpa(struct kvm_vcpu *vcpu, gpa_t gpa) in kvm_vcpu_is_legal_gpa() argument
43 return !(gpa & vcpu->arch.reserved_gpa_bits); in kvm_vcpu_is_legal_gpa()
46 static inline bool kvm_vcpu_is_illegal_gpa(struct kvm_vcpu *vcpu, gpa_t gpa) in kvm_vcpu_is_illegal_gpa() argument
48 return !kvm_vcpu_is_legal_gpa(vcpu, gpa); in kvm_vcpu_is_illegal_gpa()
52 gpa_t gpa, gpa_t alignment) in kvm_vcpu_is_legal_aligned_gpa() argument
54 return IS_ALIGNED(gpa, alignment) && kvm_vcpu_is_legal_gpa(vcpu, gpa); in kvm_vcpu_is_legal_aligned_gpa()
57 static inline bool page_address_valid(struct kvm_vcpu *vcpu, gpa_t gpa) in page_address_valid() argument
59 return kvm_vcpu_is_legal_aligned_gpa(vcpu, gpa, PAGE_SIZE); in page_address_valid()
Dxen.c26 gpa_t gpa = gfn_to_gpa(gfn); in kvm_xen_shared_info_init() local
57 kvm_write_wall_clock(kvm, gpa + wc_ofs, sec_hi_ofs - wc_ofs); in kvm_xen_shared_info_init()
109 kvm_gfn_to_hva_cache_init(v->kvm, ghc, ghc->gpa, ghc->len)) in kvm_xen_update_runstate_guest()
204 mark_page_dirty_in_slot(v->kvm, ghc->memslot, ghc->gpa >> PAGE_SHIFT); in kvm_xen_update_runstate_guest()
353 if (data->u.gpa == GPA_INVALID) { in kvm_xen_vcpu_set_attr()
360 if ((data->u.gpa & ~PAGE_MASK) + sizeof(struct vcpu_info) > PAGE_SIZE) { in kvm_xen_vcpu_set_attr()
367 data->u.gpa, in kvm_xen_vcpu_set_attr()
376 if (data->u.gpa == GPA_INVALID) { in kvm_xen_vcpu_set_attr()
383 if ((data->u.gpa & ~PAGE_MASK) + sizeof(struct pvclock_vcpu_time_info) > PAGE_SIZE) { in kvm_xen_vcpu_set_attr()
390 data->u.gpa, in kvm_xen_vcpu_set_attr()
[all …]
Dx86.c3175 gpa_t gpa = data & ~0x3f; in kvm_pv_enable_async_pf() local
3200 if (kvm_gfn_to_hva_cache_init(vcpu->kvm, &vcpu->arch.apf.data, gpa, in kvm_pv_enable_async_pf()
3287 gpa_t gpa = vcpu->arch.st.msr_val & KVM_STEAL_VALID_BITS; in record_steal_time() local
3305 gpa != ghc->gpa || in record_steal_time()
3310 if (kvm_gfn_to_hva_cache_init(vcpu->kvm, ghc, gpa, sizeof(*st)) || in record_steal_time()
3377 mark_page_dirty_in_slot(vcpu->kvm, ghc->memslot, gpa_to_gfn(ghc->gpa)); in record_steal_time()
4428 gpa_t gpa = vcpu->arch.st.msr_val & KVM_STEAL_VALID_BITS; in kvm_steal_time_set_preempted() local
4456 gpa != ghc->gpa || in kvm_steal_time_set_preempted()
4466 mark_page_dirty_in_slot(vcpu->kvm, ghc->memslot, gpa_to_gfn(ghc->gpa)); in kvm_steal_time_set_preempted()
6571 gpa_t translate_nested_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access, in translate_nested_gpa() argument
[all …]
/arch/arm64/kvm/
Dhypercalls.c66 gpa_t gpa; in kvm_hvc_call_handler() local
131 gpa = kvm_init_stolen_time(vcpu); in kvm_hvc_call_handler()
132 if (gpa != GPA_INVALID) in kvm_hvc_call_handler()
133 val[0] = gpa; in kvm_hvc_call_handler()
/arch/mips/kvm/
Dmmu.c450 gpa_t gpa = range->start << PAGE_SHIFT; in kvm_set_spte_gfn() local
452 pte_t *gpa_pte = kvm_mips_pte_for_gpa(kvm, NULL, gpa); in kvm_set_spte_gfn()
485 gpa_t gpa = range->start << PAGE_SHIFT; in kvm_test_age_gfn() local
486 pte_t *gpa_pte = kvm_mips_pte_for_gpa(kvm, NULL, gpa); in kvm_test_age_gfn()
511 static int _kvm_mips_map_page_fast(struct kvm_vcpu *vcpu, unsigned long gpa, in _kvm_mips_map_page_fast() argument
516 gfn_t gfn = gpa >> PAGE_SHIFT; in _kvm_mips_map_page_fast()
525 ptep = kvm_mips_pte_for_gpa(kvm, NULL, gpa); in _kvm_mips_map_page_fast()
587 static int kvm_mips_map_page(struct kvm_vcpu *vcpu, unsigned long gpa, in kvm_mips_map_page() argument
593 gfn_t gfn = gpa >> PAGE_SHIFT; in kvm_mips_map_page()
603 err = _kvm_mips_map_page_fast(vcpu, gpa, write_fault, out_entry, in kvm_mips_map_page()
[all …]
/arch/x86/xen/
Dmmu_hvm.c53 a.gpa = __pa(mm->pgd); in xen_hvm_exit_mmap()
64 a.gpa = 0x00; in is_pagetable_dying_supported()
/arch/x86/kvm/vmx/
Dsgx.c75 gpa_t *gpa) in sgx_gva_to_gpa() argument
80 *gpa = kvm_mmu_gva_to_gpa_write(vcpu, gva, &ex); in sgx_gva_to_gpa()
82 *gpa = kvm_mmu_gva_to_gpa_read(vcpu, gva, &ex); in sgx_gva_to_gpa()
84 if (*gpa == UNMAPPED_GVA) { in sgx_gva_to_gpa()
92 static int sgx_gpa_to_hva(struct kvm_vcpu *vcpu, gpa_t gpa, unsigned long *hva) in sgx_gpa_to_hva() argument
94 *hva = kvm_vcpu_gfn_to_hva(vcpu, PFN_DOWN(gpa)); in sgx_gpa_to_hva()
96 sgx_handle_emulation_failure(vcpu, gpa, 1); in sgx_gpa_to_hva()
100 *hva |= gpa & ~PAGE_MASK; in sgx_gpa_to_hva()
Dvmx_ops.h20 void invept_error(unsigned long ext, u64 eptp, gpa_t gpa);
268 static inline void __invept(unsigned long ext, u64 eptp, gpa_t gpa) in __invept() argument
271 u64 eptp, gpa; in __invept() member
272 } operand = {eptp, gpa}; in __invept()
274 vmx_asm2(invept, "r"(ext), "m"(operand), ext, eptp, gpa); in __invept()
/arch/x86/include/asm/
Dkvm_page_track.h35 void (*track_write)(struct kvm_vcpu *vcpu, gpa_t gpa, const u8 *new,
71 void kvm_page_track_write(struct kvm_vcpu *vcpu, gpa_t gpa, const u8 *new,
/arch/arm64/include/asm/
Dkvm_mmu.h239 gpa_t gpa, void *data, unsigned long len) in kvm_read_guest_lock() argument
242 int ret = kvm_read_guest(kvm, gpa, data, len); in kvm_read_guest_lock()
249 static inline int kvm_write_guest_lock(struct kvm *kvm, gpa_t gpa, in kvm_write_guest_lock() argument
253 int ret = kvm_write_guest(kvm, gpa, data, len); in kvm_write_guest_lock()
/arch/powerpc/include/asm/
Dultravisor.h75 static inline int uv_page_inval(u64 lpid, u64 gpa, u64 page_shift) in uv_page_inval() argument
77 return ucall_norets(UV_PAGE_INVAL, lpid, gpa, page_shift); in uv_page_inval()
Dkvm_book3s.h163 unsigned long gpa, gva_t ea, int is_store);
195 extern void kvmppc_unmap_pte(struct kvm *kvm, pte_t *pte, unsigned long gpa,
200 bool writing, unsigned long gpa,
203 unsigned long gpa,
236 extern kvm_pfn_t kvmppc_gpa_to_pfn(struct kvm_vcpu *vcpu, gpa_t gpa,
249 unsigned long gpa, bool dirty);
Diommu.h301 unsigned long gpa);
307 #define iommu_tce_put_param_check(tbl, ioba, gpa) \ argument
311 iommu_tce_check_gpa((tbl)->it_page_shift, (gpa)))
/arch/ia64/include/asm/uv/
Duv_hub.h166 static inline void *uv_va(unsigned long gpa) in uv_va() argument
168 return __va(gpa & uv_hub_info->gpa_mask); in uv_va()
/arch/arm/boot/dts/
Ds3c64xx-pinctrl.dtsi19 gpa: gpa-gpio-bank { label
135 samsung,pins = "gpa-0", "gpa-1";
141 samsung,pins = "gpa-2", "gpa-3";
147 samsung,pins = "gpa-4", "gpa-5";
153 samsung,pins = "gpa-6", "gpa-7";
/arch/x86/kvm/mmu/
Dmmu.c306 u64 gpa = spte & shadow_nonpresent_or_rsvd_lower_gfn_mask; in get_mmio_spte_gfn() local
308 gpa |= (spte >> SHADOW_NONPRESENT_OR_RSVD_MASK_LEN) in get_mmio_spte_gfn()
311 return gpa >> PAGE_SHIFT; in get_mmio_spte_gfn()
334 static gpa_t translate_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access, in translate_gpa() argument
337 return gpa; in translate_gpa()
2558 gpa_t gpa; in kvm_mmu_unprotect_page_virt() local
2564 gpa = kvm_mmu_gva_to_gpa_read(vcpu, gva, NULL); in kvm_mmu_unprotect_page_virt()
2566 r = kvm_mmu_unprotect_page(vcpu->kvm, gpa >> PAGE_SHIFT); in kvm_mmu_unprotect_page_virt()
2978 static int __direct_map(struct kvm_vcpu *vcpu, gpa_t gpa, u32 error_code, in __direct_map() argument
2989 gfn_t gfn = gpa >> PAGE_SHIFT; in __direct_map()
[all …]
Dspte.c58 u64 gpa = gfn << PAGE_SHIFT; in make_mmio_spte() local
64 spte |= gpa | shadow_nonpresent_or_rsvd_mask; in make_mmio_spte()
65 spte |= (gpa & shadow_nonpresent_or_rsvd_mask) in make_mmio_spte()

123