/kernel/linux/linux-5.10/tools/testing/selftests/kvm/x86_64/ |
D | cr4_cpuid_sync_test.c | 30 uint64_t cr4; in cr4_cpuid_is_sync() local 38 cr4 = get_cr4(); in cr4_cpuid_is_sync() 40 return (!!(ecx & X86_FEATURE_OSXSAVE)) == (!!(cr4 & X86_CR4_OSXSAVE)); in cr4_cpuid_is_sync() 45 uint64_t cr4; in guest_code() local 48 cr4 = get_cr4(); in guest_code() 49 cr4 |= X86_CR4_OSXSAVE; in guest_code() 50 set_cr4(cr4); in guest_code() 100 sregs.cr4 &= ~X86_CR4_OSXSAVE; in main()
|
/kernel/linux/linux-5.10/drivers/misc/lkdtm/ |
D | bugs.c | 352 unsigned long cr4; in lkdtm_UNSET_SMEP() local 355 cr4 = native_read_cr4(); in lkdtm_UNSET_SMEP() 357 if ((cr4 & X86_CR4_SMEP) != X86_CR4_SMEP) { in lkdtm_UNSET_SMEP() 361 cr4 &= ~(X86_CR4_SMEP); in lkdtm_UNSET_SMEP() 364 native_write_cr4(cr4); in lkdtm_UNSET_SMEP() 365 if (cr4 == native_read_cr4()) { in lkdtm_UNSET_SMEP() 367 cr4 |= X86_CR4_SMEP; in lkdtm_UNSET_SMEP() 369 native_write_cr4(cr4); in lkdtm_UNSET_SMEP() 398 direct_write_cr4(cr4); in lkdtm_UNSET_SMEP() 403 cr4 |= X86_CR4_SMEP; in lkdtm_UNSET_SMEP() [all …]
|
/kernel/linux/linux-5.10/arch/x86/kernel/ |
D | sev_verify_cbit.S | 35 movq %cr4, %rsi 40 movq %rdx, %cr4 71 movq %rsi, %cr4
|
D | process_32.c | 62 unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L; in __show_regs() local 86 cr4 = __read_cr4(); in __show_regs() 88 log_lvl, cr0, cr2, cr3, cr4); in __show_regs()
|
D | relocate_kernel_64.S | 69 movq %cr4, %rax 144 movq %rax, %cr4 227 movq %rax, %cr4
|
D | process.c | 599 unsigned long newval, cr4 = this_cpu_read(cpu_tlbstate.cr4); in cr4_toggle_bits_irqsoff() local 601 newval = cr4 ^ mask; in cr4_toggle_bits_irqsoff() 602 if (newval != cr4) { in cr4_toggle_bits_irqsoff() 603 this_cpu_write(cpu_tlbstate.cr4, newval); in cr4_toggle_bits_irqsoff()
|
D | relocate_kernel_32.S | 56 movl %cr4, %eax 129 movl %eax, %cr4 207 movl %eax, %cr4
|
/kernel/linux/linux-5.10/arch/x86/power/ |
D | hibernate_asm_32.S | 55 jecxz 1f # cr4 Pentium and higher, skip if zero 57 movl %ecx, %cr4; # turn off PGE 89 jecxz 1f # cr4 Pentium and higher, skip if zero 90 movl %ecx, %cr4; # turn PGE back on
|
D | hibernate_asm_64.S | 79 movq %rcx, %cr4; # turn off PGE 82 movq %rbx, %cr4; # turn PGE back on 113 movq %rdx, %cr4; # turn off PGE 116 movq %rax, %cr4; # turn PGE back on
|
D | cpu.c | 125 ctxt->cr4 = __read_cr4(); in __save_processor_state() 205 if (ctxt->cr4) in __restore_processor_state() 206 __write_cr4(ctxt->cr4); in __restore_processor_state() 210 __write_cr4(ctxt->cr4); in __restore_processor_state()
|
/kernel/linux/linux-5.10/arch/x86/platform/pvh/ |
D | head.S | 74 mov %cr4, %eax 76 mov %eax, %cr4 143 mov %cr4, %eax 145 mov %eax, %cr4
|
/kernel/linux/linux-5.10/arch/x86/kernel/cpu/mtrr/ |
D | cyrix.c | 135 static u32 cr4, ccr3; variable 143 cr4 = __read_cr4(); in prepare_set() 144 __write_cr4(cr4 & ~X86_CR4_PGE); in prepare_set() 176 __write_cr4(cr4); in post_set()
|
/kernel/linux/linux-5.10/arch/x86/mm/ |
D | mem_encrypt_boot.S | 102 mov %cr4, %rdx 104 mov %rdx, %cr4 106 mov %rdx, %cr4
|
D | tlb.c | 1048 unsigned long cr4, flags; in native_flush_tlb_global() local 1068 cr4 = this_cpu_read(cpu_tlbstate.cr4); in native_flush_tlb_global() 1070 native_write_cr4(cr4 ^ X86_CR4_PGE); in native_flush_tlb_global() 1072 native_write_cr4(cr4); in native_flush_tlb_global()
|
/kernel/linux/linux-5.10/include/xen/interface/hvm/ |
D | hvm_vcpu.h | 42 uint32_t cr4; member 105 uint64_t cr4; member
|
/kernel/linux/linux-5.10/arch/x86/include/asm/ |
D | tlbflush.h | 131 unsigned long cr4; member 162 this_cpu_write(cpu_tlbstate.cr4, __read_cr4()); in cr4_init_shadow()
|
D | suspend_32.h | 22 unsigned long cr0, cr2, cr3, cr4; member
|
D | suspend_64.h | 37 unsigned long cr0, cr2, cr3, cr4; member
|
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/include/x86_64/ |
D | processor.h | 216 uint64_t cr4; in get_cr4() local 219 : /* output */ [cr4]"=r"(cr4)); in get_cr4() 220 return cr4; in get_cr4()
|
/kernel/linux/linux-5.10/tools/testing/selftests/kvm/lib/x86_64/ |
D | vmx.c | 138 unsigned long cr4; in prepare_for_vmx_operation() local 150 __asm__ __volatile__("mov %%cr4, %0" : "=r"(cr4) : : "memory"); in prepare_for_vmx_operation() 151 cr4 &= rdmsr(MSR_IA32_VMX_CR4_FIXED1); in prepare_for_vmx_operation() 152 cr4 |= rdmsr(MSR_IA32_VMX_CR4_FIXED0); in prepare_for_vmx_operation() 154 cr4 |= X86_CR4_VMXE; in prepare_for_vmx_operation() 155 __asm__ __volatile__("mov %0, %%cr4" : : "r"(cr4) : "memory"); in prepare_for_vmx_operation()
|
/kernel/linux/linux-5.10/arch/x86/kernel/cpu/ |
D | common.c | 402 unsigned long newval, cr4 = this_cpu_read(cpu_tlbstate.cr4); in cr4_update_irqsoff() local 406 newval = (cr4 & ~clear) | set; in cr4_update_irqsoff() 407 if (newval != cr4) { in cr4_update_irqsoff() 408 this_cpu_write(cpu_tlbstate.cr4, newval); in cr4_update_irqsoff() 417 return this_cpu_read(cpu_tlbstate.cr4); in cr4_read_shadow() 423 unsigned long cr4 = __read_cr4(); in cr4_init() local 426 cr4 |= X86_CR4_PCIDE; in cr4_init() 428 cr4 = (cr4 & ~cr4_pinned_mask) | cr4_pinned_bits; in cr4_init() 430 __write_cr4(cr4); in cr4_init() 433 this_cpu_write(cpu_tlbstate.cr4, cr4); in cr4_init() [all …]
|
/kernel/linux/linux-5.10/arch/x86/platform/olpc/ |
D | xo1-wakeup.S | 30 movl %eax, %cr4 64 movl %cr4, %edx
|
/kernel/linux/linux-5.10/arch/x86/boot/compressed/ |
D | efi_thunk_64.S | 137 movl %cr4, %eax 139 movl %eax, %cr4
|
D | head_64.S | 161 movl %cr4, %eax 163 movl %eax, %cr4 612 movl %cr4, %eax 618 movl %cr4, %eax 642 movl %eax, %cr4
|
/kernel/linux/linux-5.10/arch/powerpc/kernel/ |
D | cpu_setup_6xx.S | 337 cmplwi cr4,r3,0x8002 /* 7457 */ 345 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq 408 cmplwi cr4,r3,0x8002 /* 7457 */ 416 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
|