/kernel/linux/linux-5.10/arch/arm64/kvm/ |
D | mmu.c | 61 cond_resched_lock(&kvm->mmu_lock); in stage2_apply_range() 133 assert_spin_locked(&kvm->mmu_lock); in __unmap_stage2_range() 167 spin_lock(&kvm->mmu_lock); in stage2_flush_vm() 173 spin_unlock(&kvm->mmu_lock); in stage2_flush_vm() 459 spin_lock(&kvm->mmu_lock); in stage2_unmap_vm() 465 spin_unlock(&kvm->mmu_lock); in stage2_unmap_vm() 475 spin_lock(&kvm->mmu_lock); in kvm_free_stage2_pgd() 482 spin_unlock(&kvm->mmu_lock); in kvm_free_stage2_pgd() 519 spin_lock(&kvm->mmu_lock); in kvm_phys_addr_ioremap() 522 spin_unlock(&kvm->mmu_lock); in kvm_phys_addr_ioremap() [all …]
|
/kernel/linux/linux-5.10/arch/powerpc/kvm/ |
D | book3s_hv_nested.c | 652 spin_lock(&kvm->mmu_lock); in kvmhv_remove_nested() 663 spin_unlock(&kvm->mmu_lock); in kvmhv_remove_nested() 682 spin_lock(&kvm->mmu_lock); in kvmhv_release_all_nested() 694 spin_unlock(&kvm->mmu_lock); in kvmhv_release_all_nested() 711 spin_lock(&kvm->mmu_lock); in kvmhv_flush_nested() 713 spin_unlock(&kvm->mmu_lock); in kvmhv_flush_nested() 729 spin_lock(&kvm->mmu_lock); in kvmhv_get_nested() 733 spin_unlock(&kvm->mmu_lock); in kvmhv_get_nested() 741 spin_lock(&kvm->mmu_lock); in kvmhv_get_nested() 754 spin_unlock(&kvm->mmu_lock); in kvmhv_get_nested() [all …]
|
D | book3s_mmu_hpte.c | 63 spin_lock(&vcpu3s->mmu_lock); in kvmppc_mmu_hpte_cache_map() 92 spin_unlock(&vcpu3s->mmu_lock); in kvmppc_mmu_hpte_cache_map() 110 spin_lock(&vcpu3s->mmu_lock); in invalidate_pte() 114 spin_unlock(&vcpu3s->mmu_lock); in invalidate_pte() 127 spin_unlock(&vcpu3s->mmu_lock); in invalidate_pte() 369 spin_lock_init(&vcpu3s->mmu_lock); in kvmppc_mmu_hpte_init()
|
D | book3s_64_mmu_radix.c | 618 spin_lock(&kvm->mmu_lock); in kvmppc_create_pte() 754 spin_unlock(&kvm->mmu_lock); in kvmppc_create_pte() 842 spin_lock(&kvm->mmu_lock); in kvmppc_book3s_instantiate_page() 847 spin_unlock(&kvm->mmu_lock); in kvmppc_book3s_instantiate_page() 977 spin_lock(&kvm->mmu_lock); in kvmppc_book3s_radix_page_fault() 981 spin_unlock(&kvm->mmu_lock); in kvmppc_book3s_radix_page_fault() 1087 spin_lock(&kvm->mmu_lock); in kvm_radix_test_clear_dirty() 1099 spin_unlock(&kvm->mmu_lock); in kvm_radix_test_clear_dirty() 1114 spin_unlock(&kvm->mmu_lock); in kvm_radix_test_clear_dirty() 1159 spin_lock(&kvm->mmu_lock); in kvmppc_radix_flush_memslot() [all …]
|
D | book3s_hv_rm_mmu.c | 252 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_enter() 267 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_enter() 281 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_enter() 936 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_zero() 948 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_zero() 964 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_copy() 979 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_do_h_page_init_copy()
|
D | book3s_64_mmu_host.c | 151 spin_lock(&kvm->mmu_lock); in kvmppc_mmu_map_page() 203 spin_unlock(&kvm->mmu_lock); in kvmppc_mmu_map_page()
|
D | book3s_64_vio_hv.c | 551 arch_spin_lock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_rm_h_put_tce_indirect() 596 arch_spin_unlock(&kvm->mmu_lock.rlock.raw_lock); in kvmppc_rm_h_put_tce_indirect()
|
D | book3s_64_mmu_hv.c | 608 spin_lock(&kvm->mmu_lock); in kvmppc_book3s_hv_page_fault() 613 spin_unlock(&kvm->mmu_lock); in kvmppc_book3s_hv_page_fault() 742 spin_lock(&kvm->mmu_lock); in kvmppc_rmap_reset() 749 spin_unlock(&kvm->mmu_lock); in kvmppc_rmap_reset() 1408 spin_lock(&kvm->mmu_lock); in resize_hpt_pivot() 1415 spin_unlock(&kvm->mmu_lock); in resize_hpt_pivot()
|
D | e500_mmu_host.c | 462 spin_lock(&kvm->mmu_lock); in kvmppc_e500_shadow_map() 502 spin_unlock(&kvm->mmu_lock); in kvmppc_e500_shadow_map()
|
/kernel/linux/linux-5.10/arch/x86/kvm/mmu/ |
D | page_track.c | 187 spin_lock(&kvm->mmu_lock); in kvm_page_track_register_notifier() 189 spin_unlock(&kvm->mmu_lock); in kvm_page_track_register_notifier() 205 spin_lock(&kvm->mmu_lock); in kvm_page_track_unregister_notifier() 207 spin_unlock(&kvm->mmu_lock); in kvm_page_track_unregister_notifier()
|
D | mmu.c | 2000 if (need_resched() || spin_needbreak(&vcpu->kvm->mmu_lock)) { in mmu_sync_children() 2002 cond_resched_lock(&vcpu->kvm->mmu_lock); in mmu_sync_children() 2454 spin_lock(&kvm->mmu_lock); in kvm_mmu_change_mmu_pages() 2465 spin_unlock(&kvm->mmu_lock); in kvm_mmu_change_mmu_pages() 2476 spin_lock(&kvm->mmu_lock); in kvm_mmu_unprotect_page() 2484 spin_unlock(&kvm->mmu_lock); in kvm_mmu_unprotect_page() 3176 spin_lock(&kvm->mmu_lock); in kvm_mmu_free_roots() 3199 spin_unlock(&kvm->mmu_lock); in kvm_mmu_free_roots() 3220 spin_lock(&vcpu->kvm->mmu_lock); in mmu_alloc_root() 3223 spin_unlock(&vcpu->kvm->mmu_lock); in mmu_alloc_root() [all …]
|
D | mmu_internal.h | 101 lockdep_assert_held(&kvm->mmu_lock); in kvm_mmu_get_root() 108 lockdep_assert_held(&kvm->mmu_lock); in kvm_mmu_put_root()
|
D | tdp_mmu.c | 54 lockdep_assert_held(&kvm->mmu_lock); in tdp_mmu_next_root_valid() 112 lockdep_assert_held(&kvm->mmu_lock); in kvm_tdp_mmu_free_root() 163 spin_lock(&kvm->mmu_lock); in get_tdp_mmu_vcpu_root() 169 spin_unlock(&kvm->mmu_lock); in get_tdp_mmu_vcpu_root() 179 spin_unlock(&kvm->mmu_lock); in get_tdp_mmu_vcpu_root() 429 if (need_resched() || spin_needbreak(&kvm->mmu_lock)) { in tdp_mmu_iter_cond_resched() 433 cond_resched_lock(&kvm->mmu_lock); in tdp_mmu_iter_cond_resched() 1003 lockdep_assert_held(&kvm->mmu_lock); in kvm_tdp_mmu_clear_dirty_pt_masked() 1158 lockdep_assert_held(&kvm->mmu_lock); in kvm_tdp_mmu_write_protect_gfn()
|
D | paging_tmpl.h | 871 spin_lock(&vcpu->kvm->mmu_lock); in FNAME() 884 spin_unlock(&vcpu->kvm->mmu_lock); in FNAME() 922 spin_lock(&vcpu->kvm->mmu_lock); in FNAME() 957 spin_unlock(&vcpu->kvm->mmu_lock); in FNAME()
|
/kernel/linux/linux-5.10/drivers/misc/habanalabs/common/ |
D | command_buffer.c | 66 mutex_lock(&ctx->mmu_lock); in cb_map_mem() 86 mutex_unlock(&ctx->mmu_lock); in cb_map_mem() 103 mutex_unlock(&ctx->mmu_lock); in cb_map_mem() 120 mutex_lock(&ctx->mmu_lock); in cb_unmap_mem() 132 mutex_unlock(&ctx->mmu_lock); in cb_unmap_mem()
|
D | memory.c | 963 mutex_lock(&ctx->mmu_lock); in map_device_va() 967 mutex_unlock(&ctx->mmu_lock); in map_device_va() 975 mutex_unlock(&ctx->mmu_lock); in map_device_va() 1099 mutex_lock(&ctx->mmu_lock); in unmap_device_va() 1112 mutex_unlock(&ctx->mmu_lock); in unmap_device_va()
|
D | debugfs.c | 411 mutex_lock(&ctx->mmu_lock); in mmu_show() 528 mutex_unlock(&ctx->mmu_lock); in mmu_show() 638 mutex_lock(&ctx->mmu_lock); in device_va_to_pa() 690 mutex_unlock(&ctx->mmu_lock); in device_va_to_pa()
|
D | mmu_v1.c | 485 mutex_init(&ctx->mmu_lock); in hl_mmu_v1_ctx_init() 521 mutex_destroy(&ctx->mmu_lock); in hl_mmu_v1_ctx_fini()
|
/kernel/linux/linux-5.10/arch/mips/kvm/ |
D | mmu.c | 596 spin_lock(&kvm->mmu_lock); in _kvm_mips_map_page_fast() 631 spin_unlock(&kvm->mmu_lock); in _kvm_mips_map_page_fast() 713 spin_lock(&kvm->mmu_lock); in kvm_mips_map_page() 721 spin_unlock(&kvm->mmu_lock); in kvm_mips_map_page() 751 spin_unlock(&kvm->mmu_lock); in kvm_mips_map_page() 1044 spin_lock(&kvm->mmu_lock); in kvm_mips_handle_mapped_seg_tlb_fault() 1049 spin_unlock(&kvm->mmu_lock); in kvm_mips_handle_mapped_seg_tlb_fault()
|
D | mips.c | 220 spin_lock(&kvm->mmu_lock); in kvm_arch_flush_shadow_memslot() 226 spin_unlock(&kvm->mmu_lock); in kvm_arch_flush_shadow_memslot() 261 spin_lock(&kvm->mmu_lock); in kvm_arch_commit_memory_region() 268 spin_unlock(&kvm->mmu_lock); in kvm_arch_commit_memory_region()
|
/kernel/linux/linux-5.10/virt/kvm/ |
D | kvm_main.c | 459 spin_lock(&kvm->mmu_lock); in kvm_mmu_notifier_change_pte() 465 spin_unlock(&kvm->mmu_lock); in kvm_mmu_notifier_change_pte() 476 spin_lock(&kvm->mmu_lock); in kvm_mmu_notifier_invalidate_range_start() 489 spin_unlock(&kvm->mmu_lock); in kvm_mmu_notifier_invalidate_range_start() 500 spin_lock(&kvm->mmu_lock); in kvm_mmu_notifier_invalidate_range_end() 514 spin_unlock(&kvm->mmu_lock); in kvm_mmu_notifier_invalidate_range_end() 528 spin_lock(&kvm->mmu_lock); in kvm_mmu_notifier_clear_flush_young() 534 spin_unlock(&kvm->mmu_lock); in kvm_mmu_notifier_clear_flush_young() 549 spin_lock(&kvm->mmu_lock); in kvm_mmu_notifier_clear_young() 564 spin_unlock(&kvm->mmu_lock); in kvm_mmu_notifier_clear_young() [all …]
|
/kernel/linux/linux-5.10/arch/powerpc/include/asm/ |
D | kvm_book3s_64.h | 653 VM_WARN(!spin_is_locked(&kvm->mmu_lock), in find_kvm_secondary_pte() 665 VM_WARN(!spin_is_locked(&kvm->mmu_lock), in find_kvm_host_pte()
|
D | kvm_book3s.h | 134 spinlock_t mmu_lock; member
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gvt/ |
D | kvmgt.c | 1706 spin_lock(&kvm->mmu_lock); in kvmgt_page_track_add() 1715 spin_unlock(&kvm->mmu_lock); in kvmgt_page_track_add() 1740 spin_lock(&kvm->mmu_lock); in kvmgt_page_track_remove() 1749 spin_unlock(&kvm->mmu_lock); in kvmgt_page_track_remove() 1775 spin_lock(&kvm->mmu_lock); in kvmgt_page_track_flush_slot() 1784 spin_unlock(&kvm->mmu_lock); in kvmgt_page_track_flush_slot()
|
/kernel/linux/linux-5.10/Documentation/virt/kvm/ |
D | locking.rst | 215 :Name: kvm->mmu_lock
|