Searched refs:X86_CR0_PG (Results 1 – 19 of 19) sorted by relevance
/arch/x86/platform/pvh/ |
D | head.S | 87 mov $(X86_CR0_PG | X86_CR0_PE), %eax 118 or $(X86_CR0_PG | X86_CR0_PE), %eax 130 and $~X86_CR0_PG, %eax
|
/arch/x86/include/uapi/asm/ |
D | processor-flags.h | 72 #define X86_CR0_PG _BITUL(X86_CR0_PG_BIT) macro 164 X86_CR0_PG)
|
/arch/x86/realmode/rm/ |
D | reboot.S | 32 andl $~X86_CR0_PG, %eax
|
D | trampoline_64.S | 152 movl $(X86_CR0_PG | X86_CR0_WP | X86_CR0_PE), %eax
|
/arch/x86/kernel/ |
D | relocate_kernel_32.S | 117 andl $~(X86_CR0_PG | X86_CR0_AM | X86_CR0_WP | X86_CR0_TS | X86_CR0_EM), %eax 194 orl $X86_CR0_PG, %eax
|
D | relocate_kernel_64.S | 131 orl $(X86_CR0_PG | X86_CR0_PE), %eax
|
D | head_32.S | 180 movl $(CR0_STATE & ~X86_CR0_PG),%eax
|
/arch/x86/boot/compressed/ |
D | head_64.S | 292 movl $(X86_CR0_PG | X86_CR0_PE), %eax /* Enable Paging and Protected mode */ 682 movl $(X86_CR0_PG | X86_CR0_PE), %eax 951 movl $(X86_CR0_PG | X86_CR0_PE), %ecx /* Enable Paging and Protected mode */
|
/arch/x86/kvm/ |
D | mmu.h | 51 #define KVM_MMU_CR0_ROLE_BITS (X86_CR0_PG | X86_CR0_WP)
|
D | x86.h | 201 return likely(kvm_read_cr0_bits(vcpu, X86_CR0_PG)); in is_paging()
|
D | x86.c | 889 if ((cr0 & X86_CR0_PG) && !(cr0 & X86_CR0_PE)) in kvm_is_valid_cr0() 897 if ((cr0 ^ old_cr0) & X86_CR0_PG) { in kvm_post_set_cr0() 915 unsigned long pdptr_bits = X86_CR0_CD | X86_CR0_NW | X86_CR0_PG; in kvm_set_cr0() 927 (cr0 & X86_CR0_PG)) { in kvm_set_cr0() 937 if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) && in kvm_set_cr0() 942 if (!(cr0 & X86_CR0_PG) && kvm_read_cr4_bits(vcpu, X86_CR4_PCIDE)) in kvm_set_cr0() 9448 cr0 = vcpu->arch.cr0 & ~(X86_CR0_PE | X86_CR0_EM | X86_CR0_TS | X86_CR0_PG); in enter_smm() 10596 if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) { in kvm_is_valid_sregs() 10719 bool pae = (sregs2->cr0 & X86_CR0_PG) && (sregs2->cr4 & X86_CR4_PAE) && in __set_sregs2() 11255 if (old_cr0 & X86_CR0_PG) in kvm_vcpu_reset()
|
D | emulate.c | 2610 ctxt->ops->set_cr(ctxt, 0, cr0 & ~(X86_CR0_PG | X86_CR0_PE)); in em_rsm()
|
/arch/x86/kvm/vmx/ |
D | nested.h | 265 fixed0 &= ~(X86_CR0_PE | X86_CR0_PG); in nested_guest_cr0_valid()
|
D | nested.c | 3020 if (CC((vmcs12->guest_cr0 & (X86_CR0_PG | X86_CR0_PE)) == X86_CR0_PG)) in nested_vmx_check_guest_state() 3024 CC(ia32e && !(vmcs12->guest_cr0 & X86_CR0_PG))) in nested_vmx_check_guest_state() 3040 CC(((vmcs12->guest_cr0 & X86_CR0_PG) && in nested_vmx_check_guest_state() 6786 #define VMXON_CR0_ALWAYSON (X86_CR0_PE | X86_CR0_PG | X86_CR0_NE) in nested_vmx_setup_ctls_msrs()
|
D | vmx.c | 139 (KVM_VM_CR0_ALWAYS_ON_UNRESTRICTED_GUEST | X86_CR0_PG | X86_CR0_PE) 3113 old_cr0_pg = kvm_read_cr0_bits(vcpu, X86_CR0_PG); in vmx_set_cr0() 3137 if (!old_cr0_pg && (cr0 & X86_CR0_PG)) in vmx_set_cr0() 3139 else if (old_cr0_pg && !(cr0 & X86_CR0_PG)) in vmx_set_cr0() 3169 if (!(cr0 & X86_CR0_PG)) { in vmx_set_cr0() 3181 if ((old_cr0_pg ^ cr0) & X86_CR0_PG) in vmx_set_cr0()
|
/arch/x86/kvm/svm/ |
D | nested.c | 107 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4, in nested_svm_init_mmu_context() 271 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) { in nested_vmcb_check_cr3_cr4() 1352 if (!(save->cr0 & X86_CR0_PG) || in svm_set_nested_state()
|
D | svm.c | 1754 if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) { in svm_set_cr0() 1760 if (is_paging(vcpu) && !(cr0 & X86_CR0_PG)) { in svm_set_cr0() 1770 hcr0 |= X86_CR0_PG | X86_CR0_WP; in svm_set_cr0()
|
/arch/x86/include/asm/ |
D | kvm_host.h | 110 | X86_CR0_NW | X86_CR0_CD | X86_CR0_PG))
|
/arch/x86/kvm/mmu/ |
D | mmu.c | 211 BUILD_MMU_ROLE_REGS_ACCESSOR(cr0, pg, X86_CR0_PG);
|