Home
last modified time | relevance | path

Searched refs:cr4 (Results 1 – 25 of 36) sorted by relevance

12

/arch/x86/power/
Dhibernate_asm_32.S35 jecxz 1f # cr4 Pentium and higher, skip if zero
37 movl %ecx, %cr4; # turn off PGE
65 jecxz 1f # cr4 Pentium and higher, skip if zero
66 movl %ecx, %cr4; # turn PGE back on
Dcpu.c105 ctxt->cr4 = read_cr4_safe(); in __save_processor_state()
108 ctxt->cr4 = read_cr4(); in __save_processor_state()
168 if (ctxt->cr4) in __restore_processor_state()
169 write_cr4(ctxt->cr4); in __restore_processor_state()
174 write_cr4(ctxt->cr4); in __restore_processor_state()
Dhibernate_asm_64.S65 movq %rdx, %cr4; # turn off PGE
68 movq %rax, %cr4; # turn PGE back on
117 movq %rdx, %cr4; # turn off PGE
120 movq %rax, %cr4; # turn PGE back on
/arch/x86/include/asm/
Dtlbflush.h26 unsigned long cr4; in __native_flush_tlb_global() local
35 cr4 = native_read_cr4(); in __native_flush_tlb_global()
37 native_write_cr4(cr4 & ~X86_CR4_PGE); in __native_flush_tlb_global()
39 native_write_cr4(cr4); in __native_flush_tlb_global()
Dsuspend_32.h15 unsigned long cr0, cr2, cr3, cr4; member
Dprocessor.h551 unsigned long cr4; in set_in_cr4() local
554 cr4 = read_cr4(); in set_in_cr4()
555 cr4 |= mask; in set_in_cr4()
556 write_cr4(cr4); in set_in_cr4()
561 unsigned long cr4; in clear_in_cr4() local
564 cr4 = read_cr4(); in clear_in_cr4()
565 cr4 &= ~mask; in clear_in_cr4()
566 write_cr4(cr4); in clear_in_cr4()
Dsuspend_64.h24 unsigned long cr0, cr2, cr3, cr4, cr8; member
/arch/x86/kernel/cpu/mtrr/
Dcyrix.c133 static u32 cr4, ccr3; variable
141 cr4 = read_cr4(); in prepare_set()
142 write_cr4(cr4 & ~X86_CR4_PGE); in prepare_set()
174 write_cr4(cr4); in post_set()
Dgeneric.c654 static unsigned long cr4; variable
684 cr4 = read_cr4(); in prepare_set()
685 write_cr4(cr4 & ~X86_CR4_PGE); in prepare_set()
712 write_cr4(cr4); in post_set()
/arch/powerpc/boot/
Dppc_asm.h21 #define cr4 4 macro
/arch/x86/platform/olpc/
Dxo1-wakeup.S29 movl %eax, %cr4
63 movl %cr4, %edx
/arch/x86/kernel/
Dprocess_32.c71 unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L; in __show_regs() local
106 cr4 = read_cr4_safe(); in __show_regs()
108 cr0, cr2, cr3, cr4); in __show_regs()
Drelocate_kernel_64.S67 movq %cr4, %rax
127 movq %rax, %cr4
196 movq %rax, %cr4
Drelocate_kernel_32.S57 movl %cr4, %eax
127 movl %eax, %cr4
199 movl %eax, %cr4
Dasm-offsets_64.c74 ENTRY(cr4); in main()
Dprocess_64.c60 unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L, fs, gs, shadowgs; in __show_regs() local
97 cr4 = read_cr4(); in __show_regs()
104 cr4); in __show_regs()
/arch/score/kernel/
Dhead.S55 mtcr r30, cr4
/arch/sh/include/cpu-sh5/cpu/
Dregisters.h29 #define INTEVT cr4
/arch/powerpc/kernel/
Dcpu_setup_6xx.S339 cmplwi cr4,r3,0x8002 /* 7457 */
347 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
410 cmplwi cr4,r3,0x8002 /* 7457 */
418 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
/arch/x86/kvm/
Dkvm_cache_regs.h66 return vcpu->arch.cr4 & mask; in kvm_read_cr4_bits()
Dsvm.c1154 save->cr4 = X86_CR4_PAE; in init_vmcb()
1166 save->cr4 = 0; in init_vmcb()
1592 static int svm_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4) in svm_set_cr4() argument
1595 unsigned long old_cr4 = to_svm(vcpu)->vmcb->save.cr4; in svm_set_cr4()
1597 if (cr4 & X86_CR4_VMXE) in svm_set_cr4()
1600 if (npt_enabled && ((old_cr4 ^ cr4) & X86_CR4_PGE)) in svm_set_cr4()
1603 vcpu->arch.cr4 = cr4; in svm_set_cr4()
1605 cr4 |= X86_CR4_PAE; in svm_set_cr4()
1606 cr4 |= host_cr4_mce; in svm_set_cr4()
1607 to_svm(vcpu)->vmcb->save.cr4 = cr4; in svm_set_cr4()
[all …]
/arch/x86/kernel/acpi/realmode/
Dwakeup.S117 movl %ecx, %cr4
/arch/x86/kernel/acpi/
Dwakeup_64.S83 movq %rbx, %cr4
/arch/s390/kernel/
Dhead31.S52 .long 0 # cr4: instruction authorization
Dhead64.S53 .quad 0 # cr4: instruction authorization

12