Home
last modified time | relevance | path

Searched refs:cr4 (Results 1 – 7 of 7) sorted by relevance

/tools/testing/selftests/kvm/x86_64/
Dcr4_cpuid_sync_test.c30 uint64_t cr4; in cr4_cpuid_is_sync() local
38 cr4 = get_cr4(); in cr4_cpuid_is_sync()
40 return (!!(ecx & X86_FEATURE_OSXSAVE)) == (!!(cr4 & X86_CR4_OSXSAVE)); in cr4_cpuid_is_sync()
45 uint64_t cr4; in guest_code() local
48 cr4 = get_cr4(); in guest_code()
49 cr4 |= X86_CR4_OSXSAVE; in guest_code()
50 set_cr4(cr4); in guest_code()
100 sregs.cr4 &= ~X86_CR4_OSXSAVE; in main()
/tools/testing/selftests/kvm/include/x86_64/
Dprocessor.h216 uint64_t cr4; in get_cr4() local
219 : /* output */ [cr4]"=r"(cr4)); in get_cr4()
220 return cr4; in get_cr4()
Dsvm.h180 u64 cr4; member
/tools/testing/selftests/kvm/lib/x86_64/
Dvmx.c138 unsigned long cr4; in prepare_for_vmx_operation() local
150 __asm__ __volatile__("mov %%cr4, %0" : "=r"(cr4) : : "memory"); in prepare_for_vmx_operation()
151 cr4 &= rdmsr(MSR_IA32_VMX_CR4_FIXED1); in prepare_for_vmx_operation()
152 cr4 |= rdmsr(MSR_IA32_VMX_CR4_FIXED0); in prepare_for_vmx_operation()
154 cr4 |= X86_CR4_VMXE; in prepare_for_vmx_operation()
155 __asm__ __volatile__("mov %0, %%cr4" : : "r"(cr4) : "memory"); in prepare_for_vmx_operation()
Dsvm.c100 asm volatile ("mov %%cr4, %0" : "=r"(save->cr4) : : "memory"); in generic_svm_setup()
Dprocessor.c199 sregs->cr0, sregs->cr2, sregs->cr3, sregs->cr4); in sregs_dump()
565 sregs.cr4 |= X86_CR4_PAE | X86_CR4_OSFXSR; in vcpu_setup()
/tools/arch/x86/include/uapi/asm/
Dkvm.h154 __u64 cr0, cr2, cr3, cr4, cr8; member