Lines Matching refs:get_cpl
642 if (kvm_x86_ops->get_cpl(vcpu) <= required_cpl) in kvm_require_cpl()
905 if (kvm_x86_ops->get_cpl(vcpu) != 0 || in kvm_set_xcr()
3608 vcpu->arch.preempted_in_kernel = !kvm_x86_ops->get_cpl(vcpu); in kvm_arch_vcpu_put()
5420 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_read()
5427 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_fetch()
5435 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_write()
5484 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_fetch_guest_virt()
5509 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_read_guest_virt()
5530 if (!system && kvm_x86_ops->get_cpl(vcpu) == 3) in emulator_read_std()
5583 if (!system && kvm_x86_ops->get_cpl(vcpu) == 3) in emulator_write_std()
5646 u32 access = ((kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0) in vcpu_mmio_gva_to_gpa()
6152 return kvm_x86_ops->get_cpl(emul_to_vcpu(ctxt)); in emulator_get_cpl()
6489 if (!is_guest_mode(vcpu) && kvm_x86_ops->get_cpl(vcpu) == 0) { in handle_emulation_failure()
7233 user_mode = kvm_x86_ops->get_cpl(__this_cpu_read(current_vcpu)); in kvm_is_user_mode()
7543 if (kvm_x86_ops->get_cpl(vcpu) != 0) { in kvm_emulate_hypercall()
10248 kvm_x86_ops->get_cpl(vcpu) == 0)) in kvm_can_deliver_async_pf()