Home
last modified time | relevance | path

Searched refs:X86_CR0_PG (Results 1 – 16 of 16) sorted by relevance

/arch/x86/platform/pvh/
Dhead.S88 mov $(X86_CR0_PG | X86_CR0_PE), %eax
129 or $(X86_CR0_PG | X86_CR0_PE), %eax
141 and $~X86_CR0_PG, %eax
/arch/x86/include/uapi/asm/
Dprocessor-flags.h72 #define X86_CR0_PG _BITUL(X86_CR0_PG_BIT) macro
164 X86_CR0_PG)
/arch/x86/realmode/rm/
Dreboot.S32 andl $~X86_CR0_PG, %eax
Dtrampoline_64.S130 movl $(X86_CR0_PG | X86_CR0_WP | X86_CR0_PE), %eax
/arch/x86/kernel/
Drelocate_kernel_32.S113 andl $~(X86_CR0_PG | X86_CR0_AM | X86_CR0_WP | X86_CR0_TS | X86_CR0_EM), %eax
187 orl $X86_CR0_PG, %eax
Drelocate_kernel_64.S124 orl $(X86_CR0_PG | X86_CR0_PE), %eax
Dhead_32.S216 movl $(CR0_STATE & ~X86_CR0_PG),%eax
/arch/x86/boot/compressed/
Dhead_64.S221 movl $(X86_CR0_PG | X86_CR0_PE), %eax /* Enable Paging and Protected mode */
655 movl $(X86_CR0_PG | X86_CR0_PE), %eax
/arch/x86/kvm/vmx/
Dnested.h269 fixed0 &= ~(X86_CR0_PE | X86_CR0_PG); in nested_guest_cr0_valid()
Dnested.c2799 if (CC((vmcs12->guest_cr0 & (X86_CR0_PG | X86_CR0_PE)) == X86_CR0_PG)) in nested_vmx_check_guest_state()
2803 CC(ia32e && !(vmcs12->guest_cr0 & X86_CR0_PG))) in nested_vmx_check_guest_state()
2819 CC(((vmcs12->guest_cr0 & X86_CR0_PG) && in nested_vmx_check_guest_state()
6030 #define VMXON_CR0_ALWAYSON (X86_CR0_PE | X86_CR0_PG | X86_CR0_NE) in nested_vmx_setup_ctls_msrs()
Dvmx.c134 X86_CR0_WP | X86_CR0_PG | X86_CR0_PE)
2994 if (!(cr0 & X86_CR0_PG)) { in ept_update_paging_mode_cr0()
3032 if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) in vmx_set_cr0()
3034 if (is_paging(vcpu) && !(cr0 & X86_CR0_PG)) in vmx_set_cr0()
/arch/x86/kvm/
Dx86.h139 return likely(kvm_read_cr0_bits(vcpu, X86_CR0_PG)); in is_paging()
Dx86.c765 unsigned long update_bits = X86_CR0_PG | X86_CR0_WP; in kvm_set_cr0()
779 if ((cr0 & X86_CR0_PG) && !(cr0 & X86_CR0_PE)) in kvm_set_cr0()
782 if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) { in kvm_set_cr0()
799 if (!(cr0 & X86_CR0_PG) && kvm_read_cr4_bits(vcpu, X86_CR4_PCIDE)) in kvm_set_cr0()
804 if ((cr0 ^ old_cr0) & X86_CR0_PG) { in kvm_set_cr0()
7965 cr0 = vcpu->arch.cr0 & ~(X86_CR0_PE | X86_CR0_EM | X86_CR0_TS | X86_CR0_PG); in enter_smm()
8927 if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) { in kvm_valid_sregs()
Demulate.c2688 ctxt->ops->set_cr(ctxt, 0, cr0 & ~(X86_CR0_PG | X86_CR0_PE)); in em_rsm()
4308 if (((new_val & X86_CR0_PG) && !(new_val & X86_CR0_PE)) || in check_cr_write()
4315 if ((new_val & X86_CR0_PG) && (efer & EFER_LME) && in check_cr_write()
Dsvm.c2618 if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) { in svm_set_cr0()
2623 if (is_paging(vcpu) && !(cr0 & X86_CR0_PG)) { in svm_set_cr0()
2632 cr0 |= X86_CR0_PG | X86_CR0_WP; in svm_set_cr0()
/arch/x86/include/asm/
Dkvm_host.h85 | X86_CR0_NW | X86_CR0_CD | X86_CR0_PG))