Home
last modified time | relevance | path

Searched refs:cr4 (Results 1 – 25 of 58) sorted by relevance

123

/arch/x86/kernel/
Dsev_verify_cbit.S35 movq %cr4, %rsi
40 movq %rdx, %cr4
71 movq %rsi, %cr4
Drelocate_kernel_64.S71 movq %cr4, %rax
126 movq %cr4, %rax
128 movq %rax, %cr4
154 movq %rax, %cr4
238 movq %rax, %cr4
Dprocess_32.c62 unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L; in __show_regs() local
83 cr4 = __read_cr4(); in __show_regs()
85 log_lvl, cr0, cr2, cr3, cr4); in __show_regs()
Dhead_64.S177 movq %cr4, %rcx
191 movq %rcx, %cr4
219 movq %cr4, %rcx
222 movq %rcx, %cr4
223 movq %rax, %cr4
Drelocate_kernel_32.S56 movl %cr4, %eax
129 movl %eax, %cr4
207 movl %eax, %cr4
Dasm-offsets_64.c52 ENTRY(cr4); in main()
Dprocess.c684 unsigned long newval, cr4 = this_cpu_read(cpu_tlbstate.cr4); in cr4_toggle_bits_irqsoff() local
686 newval = cr4 ^ mask; in cr4_toggle_bits_irqsoff()
687 if (newval != cr4) { in cr4_toggle_bits_irqsoff()
688 this_cpu_write(cpu_tlbstate.cr4, newval); in cr4_toggle_bits_irqsoff()
/arch/x86/kvm/
Dsmm.c30 CHECK_SMRAM32_OFFSET(cr4, 0xFF14); in check_smram_offsets()
96 CHECK_SMRAM64_OFFSET(cr4, 0xFF48); in check_smram_offsets()
222 smram->cr4 = kvm_read_cr4(vcpu); in enter_smm_save_state_32()
251 smram->cr4 = kvm_read_cr4(vcpu); in enter_smm_save_state_64()
423 u64 cr0, u64 cr3, u64 cr4) in rsm_enter_protected_mode() argument
430 if (cr4 & X86_CR4_PCIDE) { in rsm_enter_protected_mode()
444 bad = kvm_set_cr4(vcpu, cr4 & ~X86_CR4_PCIDE); in rsm_enter_protected_mode()
452 if (cr4 & X86_CR4_PCIDE) { in rsm_enter_protected_mode()
453 bad = kvm_set_cr4(vcpu, cr4); in rsm_enter_protected_mode()
507 smstate->cr3, smstate->cr4); in rsm_load_state_32()
[all …]
Dsmm.h31 u32 cr4; member
127 u64 cr4; member
/arch/x86/power/
Dhibernate_asm_32.S55 jecxz 1f # cr4 Pentium and higher, skip if zero
57 movl %ecx, %cr4; # turn off PGE
89 jecxz 1f # cr4 Pentium and higher, skip if zero
90 movl %ecx, %cr4; # turn PGE back on
Dhibernate_asm_64.S36 movq %rdx, %cr4; # turn off PGE
39 movq %rax, %cr4; # turn PGE back on
127 movq %rcx, %cr4; # turn off PGE
130 movq %rbx, %cr4; # turn PGE back on
Dcpu.c126 ctxt->cr4 = __read_cr4(); in __save_processor_state()
206 if (ctxt->cr4) in __restore_processor_state()
207 __write_cr4(ctxt->cr4); in __restore_processor_state()
211 __write_cr4(ctxt->cr4); in __restore_processor_state()
/arch/x86/platform/pvh/
Dhead.S74 mov %cr4, %eax
76 mov %eax, %cr4
133 mov %cr4, %eax
135 mov %eax, %cr4
/arch/x86/kernel/cpu/mtrr/
Dcyrix.c135 static u32 cr4, ccr3; variable
143 cr4 = __read_cr4(); in prepare_set()
144 __write_cr4(cr4 & ~X86_CR4_PGE); in prepare_set()
176 __write_cr4(cr4); in post_set()
/arch/x86/include/asm/
Dtlbflush.h129 unsigned long cr4; member
180 this_cpu_write(cpu_tlbstate.cr4, __read_cr4()); in cr4_init_shadow()
422 static inline void __native_tlb_flush_global(unsigned long cr4) in __native_tlb_flush_global() argument
424 native_write_cr4(cr4 ^ X86_CR4_PGE); in __native_tlb_flush_global()
425 native_write_cr4(cr4); in __native_tlb_flush_global()
Dsuspend_32.h15 unsigned long cr0, cr2, cr3, cr4; member
Dsuspend_64.h41 unsigned long cr0, cr2, cr3, cr4; member
Drealmode.h53 u32 cr4;
/arch/x86/mm/
Dmem_encrypt_boot.S102 mov %cr4, %rdx
104 mov %rdx, %cr4
106 mov %rdx, %cr4
/arch/x86/boot/compressed/
Dhead_64.S169 movl %cr4, %eax
171 movl %eax, %cr4
402 movq %cr4, %rax
404 movq %rax, %cr4
558 movl %cr4, %eax
560 movl %eax, %cr4
/arch/x86/platform/olpc/
Dxo1-wakeup.S30 movl %eax, %cr4
64 movl %cr4, %edx
/arch/x86/kernel/cpu/
Dcommon.c452 unsigned long newval, cr4 = this_cpu_read(cpu_tlbstate.cr4); in cr4_update_irqsoff() local
456 newval = (cr4 & ~clear) | set; in cr4_update_irqsoff()
457 if (newval != cr4) { in cr4_update_irqsoff()
458 this_cpu_write(cpu_tlbstate.cr4, newval); in cr4_update_irqsoff()
467 return this_cpu_read(cpu_tlbstate.cr4); in cr4_read_shadow()
473 unsigned long cr4 = __read_cr4(); in cr4_init() local
476 cr4 |= X86_CR4_PCIDE; in cr4_init()
478 cr4 = (cr4 & ~cr4_pinned_mask) | cr4_pinned_bits; in cr4_init()
480 __write_cr4(cr4); in cr4_init()
483 this_cpu_write(cpu_tlbstate.cr4, cr4); in cr4_init()
[all …]
/arch/powerpc/boot/
Dppc_asm.h17 #define cr4 4 macro
/arch/powerpc/kernel/
Dcpu_setup_6xx.S366 cmplwi cr4,r3,0x8002 /* 7457 */
374 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
437 cmplwi cr4,r3,0x8002 /* 7457 */
445 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
/arch/x86/kvm/svm/
Dnested.c93 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4, in nested_svm_init_mmu_context()
297 if (CC(!(save->cr4 & X86_CR4_PAE)) || in __nested_vmcb_check_save()
304 if (CC(!__kvm_is_valid_cr4(vcpu, save->cr4))) in __nested_vmcb_check_save()
390 to->cr4 = from->cr4; in __nested_copy_vmcb_save_to_cache()
571 svm_set_cr4(vcpu, svm->nested.save.cr4); in nested_vmcb02_prepare_save()
895 vmcb01->save.cr4 = vcpu->arch.cr4; in nested_svm_vmrun()
942 to_save->cr4 = from_save->cr4; in svm_copy_vmrun_state()
1005 vmcb12->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit()
1112 svm_set_cr4(vcpu, vmcb01->save.cr4); in nested_svm_vmexit()

123