Searched refs:pv_mmu_ops (Results 1 – 9 of 9) sorted by relevance
/arch/x86/include/asm/ |
D | paravirt.h | 65 return PVOP_CALL0(unsigned long, pv_mmu_ops.read_cr2); in read_cr2() 70 PVOP_VCALL1(pv_mmu_ops.write_cr2, x); in write_cr2() 75 return PVOP_CALL0(unsigned long, pv_mmu_ops.read_cr3); in read_cr3() 80 PVOP_VCALL1(pv_mmu_ops.write_cr3, x); in write_cr3() 330 PVOP_VCALL2(pv_mmu_ops.activate_mm, prev, next); in paravirt_activate_mm() 336 PVOP_VCALL2(pv_mmu_ops.dup_mmap, oldmm, mm); in arch_dup_mmap() 341 PVOP_VCALL1(pv_mmu_ops.exit_mmap, mm); in arch_exit_mmap() 346 PVOP_VCALL0(pv_mmu_ops.flush_tlb_user); in __flush_tlb() 350 PVOP_VCALL0(pv_mmu_ops.flush_tlb_kernel); in __flush_tlb_global() 354 PVOP_VCALL1(pv_mmu_ops.flush_tlb_single, addr); in __flush_tlb_single() [all …]
|
D | paravirt_types.h | 228 struct pv_mmu_ops { struct 350 struct pv_mmu_ops pv_mmu_ops; member 360 extern struct pv_mmu_ops pv_mmu_ops;
|
/arch/x86/kernel/ |
D | paravirt_patch_64.c | 9 DEF_NATIVE(pv_mmu_ops, read_cr2, "movq %cr2, %rax"); 10 DEF_NATIVE(pv_mmu_ops, read_cr3, "movq %cr3, %rax"); 11 DEF_NATIVE(pv_mmu_ops, write_cr3, "movq %rdi, %cr3"); 12 DEF_NATIVE(pv_mmu_ops, flush_tlb_single, "invlpg (%rdi)"); 56 PATCH_SITE(pv_mmu_ops, read_cr2); in native_patch() 57 PATCH_SITE(pv_mmu_ops, read_cr3); in native_patch() 58 PATCH_SITE(pv_mmu_ops, write_cr3); in native_patch() 60 PATCH_SITE(pv_mmu_ops, flush_tlb_single); in native_patch()
|
D | paravirt_patch_32.c | 9 DEF_NATIVE(pv_mmu_ops, read_cr2, "mov %cr2, %eax"); 10 DEF_NATIVE(pv_mmu_ops, write_cr3, "mov %eax, %cr3"); 11 DEF_NATIVE(pv_mmu_ops, read_cr3, "mov %cr3, %eax"); 45 PATCH_SITE(pv_mmu_ops, read_cr2); in native_patch() 46 PATCH_SITE(pv_mmu_ops, read_cr3); in native_patch() 47 PATCH_SITE(pv_mmu_ops, write_cr3); in native_patch()
|
D | paravirt.c | 137 .pv_mmu_ops = pv_mmu_ops, in get_call_destination() 420 struct pv_mmu_ops pv_mmu_ops = { variable 494 EXPORT_SYMBOL (pv_mmu_ops);
|
D | asm-offsets.c | 54 OFFSET(PV_MMU_read_cr2, pv_mmu_ops, read_cr2); in common()
|
/arch/x86/lguest/ |
D | boot.c | 1326 pv_mmu_ops.write_cr3 = lguest_write_cr3; in lguest_init() 1327 pv_mmu_ops.flush_tlb_user = lguest_flush_tlb_user; in lguest_init() 1328 pv_mmu_ops.flush_tlb_single = lguest_flush_tlb_single; in lguest_init() 1329 pv_mmu_ops.flush_tlb_kernel = lguest_flush_tlb_kernel; in lguest_init() 1330 pv_mmu_ops.set_pte = lguest_set_pte; in lguest_init() 1331 pv_mmu_ops.set_pte_at = lguest_set_pte_at; in lguest_init() 1332 pv_mmu_ops.set_pmd = lguest_set_pmd; in lguest_init() 1334 pv_mmu_ops.set_pte_atomic = lguest_set_pte_atomic; in lguest_init() 1335 pv_mmu_ops.pte_clear = lguest_pte_clear; in lguest_init() 1336 pv_mmu_ops.pmd_clear = lguest_pmd_clear; in lguest_init() [all …]
|
/arch/x86/xen/ |
D | mmu.c | 1970 pv_mmu_ops.write_cr3 = &xen_write_cr3; in xen_write_cr3_init() 2078 pv_mmu_ops.set_pte = xen_set_pte; in xen_post_allocator_init() 2079 pv_mmu_ops.set_pmd = xen_set_pmd; in xen_post_allocator_init() 2080 pv_mmu_ops.set_pud = xen_set_pud; in xen_post_allocator_init() 2082 pv_mmu_ops.set_pgd = xen_set_pgd; in xen_post_allocator_init() 2087 pv_mmu_ops.alloc_pte = xen_alloc_pte; in xen_post_allocator_init() 2088 pv_mmu_ops.alloc_pmd = xen_alloc_pmd; in xen_post_allocator_init() 2089 pv_mmu_ops.release_pte = xen_release_pte; in xen_post_allocator_init() 2090 pv_mmu_ops.release_pmd = xen_release_pmd; in xen_post_allocator_init() 2092 pv_mmu_ops.alloc_pud = xen_alloc_pud; in xen_post_allocator_init() [all …]
|
D | enlighten.c | 1197 pv_mmu_ops.read_cr2 = xen_read_cr2_direct; in xen_setup_vcpu_info_placement() 1642 pv_mmu_ops.ptep_modify_prot_start = xen_ptep_modify_prot_start; in xen_start_kernel() 1643 pv_mmu_ops.ptep_modify_prot_commit = xen_ptep_modify_prot_commit; in xen_start_kernel()
|