/arch/x86/power/ |
D | hibernate_asm_64.S | 30 movq %r9, %cr3 37 movq %cr3, %rcx; # flush TLB 38 movq %rcx, %cr3 93 movq %cr3, %rax 123 movq %rax, %cr3 128 movq %cr3, %rcx; # flush TLB 129 movq %rcx, %cr3;
|
D | hibernate_asm_32.S | 29 movl %cr3, %eax 53 movl %eax, %cr3 58 movl %cr3, %eax; # flush TLB 59 movl %eax, %cr3 87 movl %ebp, %cr3
|
D | hibernate.c | 60 unsigned long cr3; member 118 rdr->cr3 = restore_cr3 & ~CR3_PCID_MASK; in arch_hibernation_header_save() 140 restore_cr3 = rdr->cr3; in arch_hibernation_header_restore()
|
/arch/x86/kernel/ |
D | sev_verify_cbit.S | 56 movq %cr3, %rcx 59 movq %rdi, %cr3 68 movq %rcx, %cr3
|
D | relocate_kernel_64.S | 69 movq %cr3, %rax 102 movq %r9, %cr3 160 movq %r9, %cr3 181 movq %cr3, %rax 182 movq %rax, %cr3 223 movq %rax, %cr3 241 movq %rax, %cr3
|
D | relocate_kernel_32.S | 54 movl %cr3, %eax 87 movl %eax, %cr3 136 movl %eax, %cr3 150 movl %eax, %cr3 192 movl %eax, %cr3 209 movl %eax, %cr3
|
D | process_32.c | 62 unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L; in __show_regs() local 82 cr3 = __read_cr3(); in __show_regs() 85 log_lvl, cr0, cr2, cr3, cr4); in __show_regs()
|
D | asm-offsets_64.c | 51 ENTRY(cr3); in main()
|
/arch/x86/include/asm/xen/ |
D | interface_32.h | 101 #define xen_cr3_to_pfn(cr3) (((unsigned)(cr3) >> 12) | ((unsigned)(cr3) << 20)) argument
|
D | interface_64.h | 134 #define xen_cr3_to_pfn(cr3) ((unsigned long)(cr3) >> 12) argument
|
/arch/x86/platform/olpc/ |
D | xo1-wakeup.S | 25 # Set up %cr3 27 movl %eax, %cr3 51 movl %cr3, %eax 52 movl %eax, %cr3
|
/arch/x86/entry/ |
D | calling.h | 169 mov %cr3, \scratch_reg 171 mov \scratch_reg, %cr3 180 mov %cr3, \scratch_reg 208 mov \scratch_reg, %cr3 220 movq %cr3, \scratch_reg 231 movq \scratch_reg, %cr3 268 movq \save_reg, %cr3
|
D | entry_32.S | 60 movl %cr3, \scratch_reg 62 movl \scratch_reg, %cr3 75 movl %cr3, %eax 90 movl %cr3, \scratch_reg 95 movl \scratch_reg, %cr3 296 movl \cr3_reg, %cr3
|
/arch/x86/kvm/ |
D | smm.c | 57 CHECK_SMRAM32_OFFSET(cr3, 0xFFF8); in check_smram_offsets() 97 CHECK_SMRAM64_OFFSET(cr3, 0xFF50); in check_smram_offsets() 191 smram->cr3 = kvm_read_cr3(vcpu); in enter_smm_save_state_32() 250 smram->cr3 = kvm_read_cr3(vcpu); in enter_smm_save_state_64() 423 u64 cr0, u64 cr3, u64 cr4) in rsm_enter_protected_mode() argument 431 pcid = cr3 & 0xfff; in rsm_enter_protected_mode() 432 cr3 &= ~0xfff; in rsm_enter_protected_mode() 435 bad = kvm_set_cr3(vcpu, cr3); in rsm_enter_protected_mode() 457 bad = kvm_set_cr3(vcpu, cr3 | pcid); in rsm_enter_protected_mode() 507 smstate->cr3, smstate->cr4); in rsm_load_state_32() [all …]
|
D | smm.h | 64 u32 cr3; member 128 u64 cr3; member
|
D | tss.h | 13 u32 cr3; member
|
/arch/x86/boot/compressed/ |
D | efi_mixed.S | 188 movl %cr3, %eax 189 movl %eax, %cr3 220 movl %cr3, %eax 221 movl %eax, %cr3
|
/arch/x86/kernel/acpi/ |
D | wakeup_32.S | 27 movl %cr3, %eax 28 movl %eax, %cr3
|
/arch/x86/mm/ |
D | tlb.c | 160 unsigned long cr3 = __sme_pa(pgd) | lam; in build_cr3() local 164 cr3 |= kern_pcid(asid); in build_cr3() 169 return cr3; in build_cr3() 704 unsigned long cr3 = __read_cr3(); in initialize_tlbstate_and_flush() local 707 WARN_ON((cr3 & CR3_ADDR_MASK) != __pa(mm->pgd)); in initialize_tlbstate_and_flush() 710 WARN_ON(cr3 & (X86_CR3_LAM_U48 | X86_CR3_LAM_U57)); in initialize_tlbstate_and_flush() 1095 unsigned long cr3 = in __get_current_cr3_fast() local 1103 VM_BUG_ON(cr3 != __read_cr3()); in __get_current_cr3_fast() 1104 return cr3; in __get_current_cr3_fast()
|
/arch/x86/include/asm/ |
D | suspend_32.h | 15 unsigned long cr0, cr2, cr3, cr4; member
|
D | suspend_64.h | 41 unsigned long cr0, cr2, cr3, cr4; member
|
/arch/x86/kvm/svm/ |
D | nested.c | 62 u64 cr3 = svm->nested.ctl.nested_cr3; in nested_svm_get_tdp_pdptr() local 66 ret = kvm_vcpu_read_guest_page(vcpu, gpa_to_gfn(cr3), &pdpte, in nested_svm_get_tdp_pdptr() 67 offset_in_page(cr3) + index * 8, 8); in nested_svm_get_tdp_pdptr() 299 CC(kvm_vcpu_is_illegal_gpa(vcpu, save->cr3))) in __nested_vmcb_check_save() 389 to->cr3 = from->cr3; in __nested_copy_vmcb_save_to_cache() 505 static int nested_svm_load_cr3(struct kvm_vcpu *vcpu, unsigned long cr3, in nested_svm_load_cr3() argument 508 if (CC(kvm_vcpu_is_illegal_gpa(vcpu, cr3))) in nested_svm_load_cr3() 512 CC(!load_pdptrs(vcpu, cr3))) in nested_svm_load_cr3() 515 vcpu->arch.cr3 = cr3; in nested_svm_load_cr3() 521 kvm_mmu_new_pgd(vcpu, cr3); in nested_svm_load_cr3() [all …]
|
/arch/x86/platform/pvh/ |
D | head.S | 87 mov %eax, %cr3 116 mov %eax, %cr3
|
/arch/powerpc/boot/ |
D | ppc_asm.h | 16 #define cr3 3 macro
|
/arch/powerpc/kernel/ |
D | cpu_setup_6xx.S | 365 cmplwi cr3,r3,0x8001 /* 7455 */ 373 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq 436 cmplwi cr3,r3,0x8001 /* 7455 */ 444 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
|