• Home
  • Raw
  • Download

Lines Matching defs:kvm

162 __weak void kvm_arch_mmu_notifier_invalidate_range(struct kvm *kvm,  in kvm_arch_mmu_notifier_invalidate_range()
167 __weak void kvm_arch_guest_memory_reclaimed(struct kvm *kvm) in kvm_arch_guest_memory_reclaimed()
262 static void kvm_make_vcpu_request(struct kvm *kvm, struct kvm_vcpu *vcpu, in kvm_make_vcpu_request()
298 bool kvm_make_vcpus_request_mask(struct kvm *kvm, unsigned int req, in kvm_make_vcpus_request_mask()
321 bool kvm_make_all_cpus_request_except(struct kvm *kvm, unsigned int req, in kvm_make_all_cpus_request_except()
346 bool kvm_make_all_cpus_request(struct kvm *kvm, unsigned int req) in kvm_make_all_cpus_request()
353 void kvm_flush_remote_tlbs(struct kvm *kvm) in kvm_flush_remote_tlbs()
375 void kvm_reload_remote_mmus(struct kvm *kvm) in kvm_reload_remote_mmus()
380 static void kvm_flush_shadow_all(struct kvm *kvm) in kvm_flush_shadow_all()
441 static void kvm_vcpu_init(struct kvm_vcpu *vcpu, struct kvm *kvm, unsigned id) in kvm_vcpu_init()
489 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_mmu_notifier_invalidate_range() local
528 static __always_inline int __kvm_handle_hva_range(struct kvm *kvm, in __kvm_handle_hva_range()
605 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_handle_hva_range() local
625 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_handle_hva_range_no_flush() local
640 static bool kvm_change_spte_gfn(struct kvm *kvm, struct kvm_gfn_range *range) in kvm_change_spte_gfn()
662 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_mmu_notifier_change_pte() local
680 void kvm_inc_notifier_count(struct kvm *kvm, unsigned long start, in kvm_inc_notifier_count()
712 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_mmu_notifier_invalidate_range_start() local
743 void kvm_dec_notifier_count(struct kvm *kvm, unsigned long start, in kvm_dec_notifier_count()
764 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_mmu_notifier_invalidate_range_end() local
840 struct kvm *kvm = mmu_notifier_to_kvm(mn); in kvm_mmu_notifier_release() local
859 static int kvm_init_mmu_notifier(struct kvm *kvm) in kvm_init_mmu_notifier()
867 static int kvm_init_mmu_notifier(struct kvm *kvm) in kvm_init_mmu_notifier()
879 struct kvm *kvm = container_of(bl, struct kvm, pm_notifier); in kvm_pm_notifier_call() local
884 static void kvm_init_pm_notifier(struct kvm *kvm) in kvm_init_pm_notifier()
892 static void kvm_destroy_pm_notifier(struct kvm *kvm) in kvm_destroy_pm_notifier()
897 static void kvm_init_pm_notifier(struct kvm *kvm) in kvm_init_pm_notifier()
901 static void kvm_destroy_pm_notifier(struct kvm *kvm) in kvm_destroy_pm_notifier()
930 static void kvm_free_memslot(struct kvm *kvm, struct kvm_memory_slot *slot) in kvm_free_memslot()
940 static void kvm_free_memslots(struct kvm *kvm, struct kvm_memslots *slots) in kvm_free_memslots()
966 static void kvm_destroy_vm_debugfs(struct kvm *kvm) in kvm_destroy_vm_debugfs()
984 static int kvm_create_vm_debugfs(struct kvm *kvm, int fd) in kvm_create_vm_debugfs()
1062 int __weak kvm_arch_post_init_vm(struct kvm *kvm) in kvm_arch_post_init_vm()
1071 void __weak kvm_arch_pre_destroy_vm(struct kvm *kvm) in kvm_arch_pre_destroy_vm()
1081 int __weak kvm_arch_create_vm_debugfs(struct kvm *kvm) in kvm_arch_create_vm_debugfs()
1088 struct kvm *kvm = kvm_arch_alloc_vm(); in kvm_create_vm() local
1198 static void kvm_destroy_devices(struct kvm *kvm) in kvm_destroy_devices()
1213 static void kvm_destroy_vm(struct kvm *kvm) in kvm_destroy_vm()
1264 void kvm_get_kvm(struct kvm *kvm) in kvm_get_kvm()
1274 bool kvm_get_kvm_safe(struct kvm *kvm) in kvm_get_kvm_safe()
1280 void kvm_put_kvm(struct kvm *kvm) in kvm_put_kvm()
1294 void kvm_put_kvm_no_destroy(struct kvm *kvm) in kvm_put_kvm_no_destroy()
1302 struct kvm *kvm = filp->private_data; in kvm_vm_release() local
1501 static struct kvm_memslots *install_new_memslots(struct kvm *kvm, in install_new_memslots()
1595 static int kvm_set_memslot(struct kvm *kvm, in kvm_set_memslot()
1711 static int kvm_delete_memslot(struct kvm *kvm, in kvm_delete_memslot()
1739 int __kvm_set_memory_region(struct kvm *kvm, in __kvm_set_memory_region()
1858 int kvm_set_memory_region(struct kvm *kvm, in kvm_set_memory_region()
1870 static int kvm_vm_ioctl_set_memory_region(struct kvm *kvm, in kvm_vm_ioctl_set_memory_region()
1887 int kvm_get_dirty_log(struct kvm *kvm, struct kvm_dirty_log *log, in kvm_get_dirty_log()
1950 static int kvm_get_dirty_log_protect(struct kvm *kvm, struct kvm_dirty_log *log) in kvm_get_dirty_log_protect()
2041 static int kvm_vm_ioctl_get_dirty_log(struct kvm *kvm, in kvm_vm_ioctl_get_dirty_log()
2060 static int kvm_clear_dirty_log_protect(struct kvm *kvm, in kvm_clear_dirty_log_protect()
2136 static int kvm_vm_ioctl_clear_dirty_log(struct kvm *kvm, in kvm_vm_ioctl_clear_dirty_log()
2150 struct kvm_memory_slot *gfn_to_memslot(struct kvm *kvm, gfn_t gfn) in gfn_to_memslot()
2180 bool kvm_is_visible_gfn(struct kvm *kvm, gfn_t gfn) in kvm_is_visible_gfn()
2253 unsigned long gfn_to_hva(struct kvm *kvm, gfn_t gfn) in gfn_to_hva()
2284 unsigned long gfn_to_hva_prot(struct kvm *kvm, gfn_t gfn, bool *writable) in gfn_to_hva_prot()
2552 kvm_pfn_t gfn_to_pfn_prot(struct kvm *kvm, gfn_t gfn, bool write_fault, in gfn_to_pfn_prot()
2578 kvm_pfn_t gfn_to_pfn(struct kvm *kvm, gfn_t gfn) in gfn_to_pfn()
2620 struct page *gfn_to_page(struct kvm *kvm, gfn_t gfn) in gfn_to_page()
2725 static void __kvm_unmap_gfn(struct kvm *kvm, in __kvm_unmap_gfn()
2867 int kvm_read_guest_page(struct kvm *kvm, gfn_t gfn, void *data, int offset, in kvm_read_guest_page()
2885 int kvm_read_guest(struct kvm *kvm, gpa_t gpa, void *data, unsigned long len) in kvm_read_guest()
2953 static int __kvm_write_guest_page(struct kvm *kvm, in __kvm_write_guest_page()
2970 int kvm_write_guest_page(struct kvm *kvm, gfn_t gfn, in kvm_write_guest_page()
2988 int kvm_write_guest(struct kvm *kvm, gpa_t gpa, const void *data, in kvm_write_guest()
3071 int kvm_gfn_to_hva_cache_init(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_gfn_to_hva_cache_init()
3079 int kvm_write_guest_offset_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_write_guest_offset_cached()
3110 int kvm_write_guest_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_write_guest_cached()
3117 int kvm_read_guest_offset_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_read_guest_offset_cached()
3147 int kvm_read_guest_cached(struct kvm *kvm, struct gfn_to_hva_cache *ghc, in kvm_read_guest_cached()
3154 int kvm_clear_guest(struct kvm *kvm, gpa_t gpa, unsigned long len) in kvm_clear_guest()
3174 void mark_page_dirty_in_slot(struct kvm *kvm, in mark_page_dirty_in_slot()
3191 void mark_page_dirty(struct kvm *kvm, gfn_t gfn) in mark_page_dirty()
3529 struct kvm *kvm = me->kvm; in kvm_vcpu_on_spin() local
3584 static bool kvm_page_in_dirty_ring(struct kvm *kvm, unsigned long pgoff) in kvm_page_in_dirty_ring()
3686 static int kvm_vm_ioctl_create_vcpu(struct kvm *kvm, u32 id) in kvm_vm_ioctl_create_vcpu()
4153 struct kvm *kvm = dev->kvm; in kvm_device_release() local
4206 static int kvm_ioctl_create_device(struct kvm *kvm, in kvm_ioctl_create_device()
4264 static long kvm_vm_ioctl_check_extension_generic(struct kvm *kvm, long arg) in kvm_vm_ioctl_check_extension_generic()
4317 static int kvm_vm_ioctl_enable_dirty_log_ring(struct kvm *kvm, u32 size) in kvm_vm_ioctl_enable_dirty_log_ring()
4355 static int kvm_vm_ioctl_reset_dirty_pages(struct kvm *kvm) in kvm_vm_ioctl_reset_dirty_pages()
4377 int __attribute__((weak)) kvm_vm_ioctl_enable_cap(struct kvm *kvm, in kvm_vm_ioctl_enable_cap()
4383 static int kvm_vm_ioctl_enable_cap_generic(struct kvm *kvm, in kvm_vm_ioctl_enable_cap_generic()
4417 struct kvm *kvm = file->private_data; in kvm_vm_stats_read() local
4426 struct kvm *kvm = file->private_data; in kvm_vm_stats_release() local
4438 static int kvm_vm_ioctl_get_stats_fd(struct kvm *kvm) in kvm_vm_ioctl_get_stats_fd()
4465 struct kvm *kvm = filp->private_data; in kvm_vm_ioctl() local
4683 struct kvm *kvm = filp->private_data; in kvm_vm_compat_ioctl() local
4750 struct kvm *kvm; in kvm_dev_ioctl_create_vm() local
5131 int kvm_io_bus_register_dev(struct kvm *kvm, enum kvm_bus bus_idx, gpa_t addr, in kvm_io_bus_register_dev()
5173 int kvm_io_bus_unregister_dev(struct kvm *kvm, enum kvm_bus bus_idx, in kvm_io_bus_unregister_dev()
5220 struct kvm_io_device *kvm_io_bus_get_dev(struct kvm *kvm, enum kvm_bus bus_idx, in kvm_io_bus_get_dev()
5283 static int kvm_get_stat_per_vm(struct kvm *kvm, size_t offset, u64 *val) in kvm_get_stat_per_vm()
5290 static int kvm_clear_stat_per_vm(struct kvm *kvm, size_t offset) in kvm_clear_stat_per_vm()
5297 static int kvm_get_stat_per_vcpu(struct kvm *kvm, size_t offset, u64 *val) in kvm_get_stat_per_vcpu()
5310 static int kvm_clear_stat_per_vcpu(struct kvm *kvm, size_t offset) in kvm_clear_stat_per_vcpu()
5381 struct kvm *kvm; in vm_stat_get() local
5397 struct kvm *kvm; in vm_stat_clear() local
5417 struct kvm *kvm; in vcpu_stat_get() local
5433 struct kvm *kvm; in vcpu_stat_clear() local
5451 static void kvm_uevent_notify_change(unsigned int type, struct kvm *kvm) in kvm_uevent_notify_change()
5776 struct kvm *kvm; member
5791 struct kvm *kvm = init_context->kvm; in kvm_vm_worker_thread() local
5828 int kvm_vm_create_worker_thread(struct kvm *kvm, kvm_vm_thread_fn_t thread_fn, in kvm_vm_create_worker_thread()