Home
last modified time | relevance | path

Searched defs:mmu (Results 1 – 25 of 36) sorted by relevance

12

/arch/arm64/kvm/hyp/nvhe/
Dtlb.c14 struct kvm_s2_mmu *mmu; member
19 static void enter_vmid_context(struct kvm_s2_mmu *mmu, in enter_vmid_context()
94 struct kvm_s2_mmu *mmu = cxt->mmu; in exit_vmid_context() local
124 void __kvm_tlb_flush_vmid_ipa(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_ipa()
178 void __kvm_tlb_flush_vmid(struct kvm_s2_mmu *mmu) in __kvm_tlb_flush_vmid()
194 void __kvm_flush_cpu_context(struct kvm_s2_mmu *mmu) in __kvm_flush_cpu_context()
Dswitch.c282 struct kvm_s2_mmu *mmu; in __kvm_vcpu_run() local
Dmem_protect.c166 struct kvm_s2_mmu *mmu = &host_mmu.arch.mmu; in kvm_host_prepare_stage2() local
273 struct kvm_s2_mmu *mmu = &vm->kvm.arch.mmu; in kvm_guest_prepare_stage2() local
380 struct kvm_s2_mmu *mmu = &host_mmu.arch.mmu; in __pkvm_prot_finalize() local
Dpkvm.c618 struct kvm_s2_mmu *mmu = &hyp_vm->kvm.arch.mmu; in insert_vm_table_entry() local
/arch/arm64/kvm/hyp/vhe/
Dtlb.c19 static void __tlb_switch_to_guest(struct kvm_s2_mmu *mmu, in __tlb_switch_to_guest()
82 void __kvm_tlb_flush_vmid_ipa(struct kvm_s2_mmu *mmu, in __kvm_tlb_flush_vmid_ipa()
114 void __kvm_tlb_flush_vmid(struct kvm_s2_mmu *mmu) in __kvm_tlb_flush_vmid()
130 void __kvm_flush_cpu_context(struct kvm_s2_mmu *mmu) in __kvm_flush_cpu_context()
/arch/arm64/include/asm/
Dkvm_mmu.h272 static __always_inline u64 kvm_get_vttbr(struct kvm_s2_mmu *mmu) in kvm_get_vttbr()
288 static __always_inline void __load_stage2(struct kvm_s2_mmu *mmu, in __load_stage2()
302 static inline struct kvm *kvm_s2_mmu_to_kvm(struct kvm_s2_mmu *mmu) in kvm_s2_mmu_to_kvm()
Dkvm_pgtable.h280 struct kvm_s2_mmu *mmu; member
420 #define kvm_pgtable_stage2_init(pgt, mmu, mm_ops, pte_ops) \ argument
/arch/x86/kvm/mmu/
Dtdp_mmu.h94 static inline bool is_tdp_mmu(struct kvm_mmu *mmu) in is_tdp_mmu()
115 static inline bool is_tdp_mmu(struct kvm_mmu *mmu) { return false; } in is_tdp_mmu()
Dpaging_tmpl.h34 #define PT_HAVE_ACCESSED_DIRTY(mmu) true argument
53 #define PT_HAVE_ACCESSED_DIRTY(mmu) true argument
66 #define PT_HAVE_ACCESSED_DIRTY(mmu) ((mmu)->ept_ad) argument
Dmmu.c3330 void kvm_mmu_free_roots(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in kvm_mmu_free_roots()
3381 void kvm_mmu_free_guest_mode_roots(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu) in kvm_mmu_free_guest_mode_roots()
3433 struct kvm_mmu *mmu = vcpu->arch.mmu; in mmu_alloc_direct_roots() local
3480 struct kvm_mmu *mmu = vcpu->arch.mmu; in mmu_alloc_shadow_roots() local
3590 struct kvm_mmu *mmu = vcpu->arch.mmu; in mmu_alloc_special_roots() local
4136 struct kvm_mmu *mmu = vcpu->arch.mmu; in cached_root_available() local
4160 struct kvm_mmu *mmu = vcpu->arch.mmu; in fast_pgd_switch() local
4514 static void update_permission_bitmask(struct kvm_mmu *mmu, bool ept) in update_permission_bitmask()
4610 static void update_pkru_bitmask(struct kvm_mmu *mmu) in update_pkru_bitmask()
4655 struct kvm_mmu *mmu) in reset_guest_paging_metadata()
[all …]
Dtdp_mmu.c1027 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_tdp_mmu_map() local
1567 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_tdp_mmu_get_walk() local
1596 struct kvm_mmu *mmu = vcpu->arch.mmu; in kvm_tdp_mmu_fast_pf_get_last_sptep() local
/arch/um/kernel/skas/
Dmmu.c62 struct mm_context *mmu = &mm->context; in destroy_context() local
/arch/arc/mm/
Dtlb.c134 struct cpuinfo_arc_mmu *mmu = &cpuinfo_arc700[smp_processor_id()].mmu; in local_flush_tlb_all() local
565 struct cpuinfo_arc_mmu *mmu = &cpuinfo_arc700[smp_processor_id()].mmu; in read_decode_mmu_bcr() local
640 struct cpuinfo_arc_mmu *mmu = &cpuinfo_arc700[smp_processor_id()].mmu; in arc_mmu_init() local
707 #define SET_WAY_TO_IDX(mmu, set, way) ((set) * mmu->ways + (way)) argument
722 struct cpuinfo_arc_mmu *mmu = &cpuinfo_arc700[smp_processor_id()].mmu; in do_tlb_overlap_fault() local
/arch/riscv/kernel/
Dcpu.c109 const char *compat, *isa, *mmu; in c_show() local
/arch/m68k/kernel/
Dsetup_no.c178 char *cpu, *mmu, *fpu; in show_cpuinfo() local
Dsetup_mm.c386 const char *cpu, *mmu, *fpu; in show_cpuinfo() local
/arch/nios2/include/asm/
Dcpuinfo.h15 bool mmu; member
/arch/arm64/kvm/
Dmmu.c211 static void __unmap_stage2_range(struct kvm_s2_mmu *mmu, phys_addr_t start, u64 size, in __unmap_stage2_range()
223 static void unmap_stage2_range(struct kvm_s2_mmu *mmu, phys_addr_t start, u64 size) in unmap_stage2_range()
742 int kvm_init_stage2_mmu(struct kvm *kvm, struct kvm_s2_mmu *mmu, unsigned long type) in kvm_init_stage2_mmu()
878 void kvm_free_stage2_pgd(struct kvm_s2_mmu *mmu) in kvm_free_stage2_pgd()
1042 static void stage2_wp_range(struct kvm_s2_mmu *mmu, phys_addr_t addr, phys_addr_t end) in stage2_wp_range()
1657 struct kvm_s2_mmu *mmu; in handle_access_fault() local
/arch/powerpc/platforms/powernv/
Dopal-call.c101 bool mmu = (msr & (MSR_IR|MSR_DR)); in opal_call() local
/arch/arm64/kvm/hyp/
Dpgtable.c518 struct kvm_s2_mmu *mmu; member
659 static void stage2_clear_pte(kvm_pte_t *ptep, struct kvm_s2_mmu *mmu, u64 addr, in stage2_clear_pte()
669 static void stage2_put_pte(kvm_pte_t *ptep, struct kvm_s2_mmu *mmu, u64 addr, in stage2_put_pte()
1027 struct kvm_s2_mmu *mmu = pgt->mmu; in stage2_unmap_walker() local
1273 int __kvm_pgtable_stage2_init(struct kvm_pgtable *pgt, struct kvm_s2_mmu *mmu, in __kvm_pgtable_stage2_init()
/arch/x86/kvm/
Dmmu.h177 static inline u8 permission_fault(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, in permission_fault()
/arch/microblaze/include/asm/
Dcpuinfo.h37 u32 mmu; member
/arch/powerpc/kvm/
Dbook3s_32_mmu.c399 struct kvmppc_mmu *mmu = &vcpu->arch.mmu; in kvmppc_mmu_book3s_32_init() local
Dbook3s_64_mmu.c653 struct kvmppc_mmu *mmu = &vcpu->arch.mmu; in kvmppc_mmu_book3s_64_init() local
/arch/arc/include/asm/
Darcregs.h274 unsigned int pad3:5, mmu:3, pad2:4, ic:3, dc:3, pad1:6, ver:8; member
329 struct cpuinfo_arc_mmu mmu; member

12