• Home
  • Raw
  • Download

Lines Matching refs:vgpu

54 bool intel_gvt_ggtt_validate_range(struct intel_vgpu *vgpu, u64 addr, u32 size)  in intel_gvt_ggtt_validate_range()  argument
57 return vgpu_gmadr_is_valid(vgpu, addr); in intel_gvt_ggtt_validate_range()
59 if (vgpu_gmadr_is_aperture(vgpu, addr) && in intel_gvt_ggtt_validate_range()
60 vgpu_gmadr_is_aperture(vgpu, addr + size - 1)) in intel_gvt_ggtt_validate_range()
62 else if (vgpu_gmadr_is_hidden(vgpu, addr) && in intel_gvt_ggtt_validate_range()
63 vgpu_gmadr_is_hidden(vgpu, addr + size - 1)) in intel_gvt_ggtt_validate_range()
72 int intel_gvt_ggtt_gmadr_g2h(struct intel_vgpu *vgpu, u64 g_addr, u64 *h_addr) in intel_gvt_ggtt_gmadr_g2h() argument
74 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_ggtt_gmadr_g2h()
76 if (drm_WARN(&i915->drm, !vgpu_gmadr_is_valid(vgpu, g_addr), in intel_gvt_ggtt_gmadr_g2h()
80 if (vgpu_gmadr_is_aperture(vgpu, g_addr)) in intel_gvt_ggtt_gmadr_g2h()
81 *h_addr = vgpu_aperture_gmadr_base(vgpu) in intel_gvt_ggtt_gmadr_g2h()
82 + (g_addr - vgpu_aperture_offset(vgpu)); in intel_gvt_ggtt_gmadr_g2h()
84 *h_addr = vgpu_hidden_gmadr_base(vgpu) in intel_gvt_ggtt_gmadr_g2h()
85 + (g_addr - vgpu_hidden_offset(vgpu)); in intel_gvt_ggtt_gmadr_g2h()
90 int intel_gvt_ggtt_gmadr_h2g(struct intel_vgpu *vgpu, u64 h_addr, u64 *g_addr) in intel_gvt_ggtt_gmadr_h2g() argument
92 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_ggtt_gmadr_h2g()
94 if (drm_WARN(&i915->drm, !gvt_gmadr_is_valid(vgpu->gvt, h_addr), in intel_gvt_ggtt_gmadr_h2g()
98 if (gvt_gmadr_is_aperture(vgpu->gvt, h_addr)) in intel_gvt_ggtt_gmadr_h2g()
99 *g_addr = vgpu_aperture_gmadr_base(vgpu) in intel_gvt_ggtt_gmadr_h2g()
100 + (h_addr - gvt_aperture_gmadr_base(vgpu->gvt)); in intel_gvt_ggtt_gmadr_h2g()
102 *g_addr = vgpu_hidden_gmadr_base(vgpu) in intel_gvt_ggtt_gmadr_h2g()
103 + (h_addr - gvt_hidden_gmadr_base(vgpu->gvt)); in intel_gvt_ggtt_gmadr_h2g()
107 int intel_gvt_ggtt_index_g2h(struct intel_vgpu *vgpu, unsigned long g_index, in intel_gvt_ggtt_index_g2h() argument
113 ret = intel_gvt_ggtt_gmadr_g2h(vgpu, g_index << I915_GTT_PAGE_SHIFT, in intel_gvt_ggtt_index_g2h()
122 int intel_gvt_ggtt_h2g_index(struct intel_vgpu *vgpu, unsigned long h_index, in intel_gvt_ggtt_h2g_index() argument
128 ret = intel_gvt_ggtt_gmadr_h2g(vgpu, h_index << I915_GTT_PAGE_SHIFT, in intel_gvt_ggtt_h2g_index()
306 struct intel_vgpu *vgpu) in gtt_get_entry64() argument
308 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in gtt_get_entry64()
315 ret = intel_gvt_hypervisor_read_gpa(vgpu, gpa + in gtt_get_entry64()
321 e->val64 = read_pte64(vgpu->gvt->gt->ggtt, index); in gtt_get_entry64()
331 struct intel_vgpu *vgpu) in gtt_set_entry64() argument
333 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in gtt_set_entry64()
340 ret = intel_gvt_hypervisor_write_gpa(vgpu, gpa + in gtt_set_entry64()
346 write_pte64(vgpu->gvt->gt->ggtt, index, e->val64); in gtt_set_entry64()
556 struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in _ppgtt_get_root_entry()
563 entry, index, false, 0, mm->vgpu); in _ppgtt_get_root_entry()
583 struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in _ppgtt_set_root_entry()
587 entry, index, false, 0, mm->vgpu); in _ppgtt_set_root_entry()
605 struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in ggtt_get_guest_entry()
611 false, 0, mm->vgpu); in ggtt_get_guest_entry()
617 struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in ggtt_set_guest_entry()
622 false, 0, mm->vgpu); in ggtt_set_guest_entry()
628 struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in ggtt_get_host_entry()
632 pte_ops->get_entry(NULL, entry, index, false, 0, mm->vgpu); in ggtt_get_host_entry()
638 struct intel_gvt_gtt_pte_ops *pte_ops = mm->vgpu->gvt->gtt.pte_ops; in ggtt_set_host_entry()
643 if (vgpu_gmadr_is_aperture(mm->vgpu, index << I915_GTT_PAGE_SHIFT)) { in ggtt_set_host_entry()
644 offset -= (vgpu_aperture_gmadr_base(mm->vgpu) >> PAGE_SHIFT); in ggtt_set_host_entry()
646 } else if (vgpu_gmadr_is_hidden(mm->vgpu, index << I915_GTT_PAGE_SHIFT)) { in ggtt_set_host_entry()
647 offset -= (vgpu_hidden_gmadr_base(mm->vgpu) >> PAGE_SHIFT); in ggtt_set_host_entry()
651 pte_ops->set_entry(NULL, entry, index, false, 0, mm->vgpu); in ggtt_set_host_entry()
663 struct intel_gvt *gvt = spt->vgpu->gvt; in ppgtt_spt_get_entry()
674 spt->vgpu); in ppgtt_spt_get_entry()
692 struct intel_gvt *gvt = spt->vgpu->gvt; in ppgtt_spt_set_entry()
703 spt->vgpu); in ppgtt_spt_set_entry()
744 static int detach_oos_page(struct intel_vgpu *vgpu,
749 struct device *kdev = &spt->vgpu->gvt->gt->i915->drm.pdev->dev; in ppgtt_free_spt()
751 trace_spt_free(spt->vgpu->id, spt, spt->guest_page.type); in ppgtt_free_spt()
756 radix_tree_delete(&spt->vgpu->gtt.spt_tree, spt->shadow_page.mfn); in ppgtt_free_spt()
760 detach_oos_page(spt->vgpu, spt->guest_page.oos_page); in ppgtt_free_spt()
762 intel_vgpu_unregister_page_track(spt->vgpu, spt->guest_page.gfn); in ppgtt_free_spt()
769 static void ppgtt_free_all_spt(struct intel_vgpu *vgpu) in ppgtt_free_all_spt() argument
777 radix_tree_for_each_slot(slot, &vgpu->gtt.spt_tree, &iter, 0) { in ppgtt_free_all_spt()
810 struct intel_vgpu *vgpu, unsigned long gfn) in intel_vgpu_find_spt_by_gfn() argument
814 track = intel_vgpu_find_page_track(vgpu, gfn); in intel_vgpu_find_spt_by_gfn()
823 struct intel_vgpu *vgpu, unsigned long mfn) in intel_vgpu_find_spt_by_mfn() argument
825 return radix_tree_lookup(&vgpu->gtt.spt_tree, mfn); in intel_vgpu_find_spt_by_mfn()
832 struct intel_vgpu *vgpu, enum intel_gvt_gtt_type type) in ppgtt_alloc_spt() argument
834 struct device *kdev = &vgpu->gvt->gt->i915->drm.pdev->dev; in ppgtt_alloc_spt()
842 if (reclaim_one_ppgtt_mm(vgpu->gvt)) in ppgtt_alloc_spt()
849 spt->vgpu = vgpu; in ppgtt_alloc_spt()
867 ret = radix_tree_insert(&vgpu->gtt.spt_tree, spt->shadow_page.mfn, spt); in ppgtt_alloc_spt()
882 struct intel_vgpu *vgpu, enum intel_gvt_gtt_type type, in ppgtt_alloc_spt_gfn() argument
888 spt = ppgtt_alloc_spt(vgpu, type); in ppgtt_alloc_spt_gfn()
895 ret = intel_vgpu_register_page_track(vgpu, gfn, in ppgtt_alloc_spt_gfn()
906 trace_spt_alloc(vgpu->id, spt, type, spt->shadow_page.mfn, gfn); in ppgtt_alloc_spt_gfn()
912 ((spt)->vgpu->gvt->device_info.gtt_entry_size_shift)
921 spt->vgpu->gvt->gtt.pte_ops->test_present(e))
927 spt->vgpu->gvt->gtt.pte_ops->test_present(e))
938 trace_spt_refcount(spt->vgpu->id, "inc", spt, v, (v + 1)); in ppgtt_get_spt()
946 trace_spt_refcount(spt->vgpu->id, "dec", spt, v, (v - 1)); in ppgtt_put_spt()
952 static int ppgtt_invalidate_spt_by_shadow_entry(struct intel_vgpu *vgpu, in ppgtt_invalidate_spt_by_shadow_entry() argument
955 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in ppgtt_invalidate_spt_by_shadow_entry()
956 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_invalidate_spt_by_shadow_entry()
977 vgpu->gtt.scratch_pt[cur_pt_type].page_mfn) in ppgtt_invalidate_spt_by_shadow_entry()
980 s = intel_vgpu_find_spt_by_mfn(vgpu, ops->get_pfn(e)); in ppgtt_invalidate_spt_by_shadow_entry()
992 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_invalidate_pte() local
993 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_invalidate_pte()
1001 if (!pfn || pfn == vgpu->gtt.scratch_pt[type].page_mfn) in ppgtt_invalidate_pte()
1004 intel_gvt_hypervisor_dma_unmap_guest_page(vgpu, pfn << PAGE_SHIFT); in ppgtt_invalidate_pte()
1009 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_invalidate_spt() local
1014 trace_spt_change(spt->vgpu->id, "die", spt, in ppgtt_invalidate_spt()
1041 spt->vgpu, &e); in ppgtt_invalidate_spt()
1050 trace_spt_change(spt->vgpu->id, "release", spt, in ppgtt_invalidate_spt()
1060 static bool vgpu_ips_enabled(struct intel_vgpu *vgpu) in vgpu_ips_enabled() argument
1062 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in vgpu_ips_enabled()
1065 u32 ips = vgpu_vreg_t(vgpu, GEN8_GAMW_ECO_DEV_RW_IA) & in vgpu_ips_enabled()
1079 struct intel_vgpu *vgpu, struct intel_gvt_gtt_entry *we) in ppgtt_populate_spt_by_guest_entry() argument
1081 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_populate_spt_by_guest_entry()
1089 ips = vgpu_ips_enabled(vgpu) && ops->test_ips(we); in ppgtt_populate_spt_by_guest_entry()
1091 spt = intel_vgpu_find_spt_by_gfn(vgpu, ops->get_pfn(we)); in ppgtt_populate_spt_by_guest_entry()
1114 spt = ppgtt_alloc_spt_gfn(vgpu, type, ops->get_pfn(we), ips); in ppgtt_populate_spt_by_guest_entry()
1120 ret = intel_vgpu_enable_page_track(vgpu, spt->guest_page.gfn); in ppgtt_populate_spt_by_guest_entry()
1128 trace_spt_change(vgpu->id, "new", spt, spt->guest_page.gfn, in ppgtt_populate_spt_by_guest_entry()
1145 struct intel_gvt_gtt_pte_ops *ops = s->vgpu->gvt->gtt.pte_ops; in ppgtt_generate_shadow_entry()
1165 static int is_2MB_gtt_possible(struct intel_vgpu *vgpu, in is_2MB_gtt_possible() argument
1168 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in is_2MB_gtt_possible()
1171 if (!HAS_PAGE_SIZES(vgpu->gvt->gt->i915, I915_GTT_PAGE_SIZE_2M)) in is_2MB_gtt_possible()
1174 pfn = intel_gvt_hypervisor_gfn_to_mfn(vgpu, ops->get_pfn(entry)); in is_2MB_gtt_possible()
1181 static int split_2MB_gtt_entry(struct intel_vgpu *vgpu, in split_2MB_gtt_entry() argument
1185 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in split_2MB_gtt_entry()
1197 sub_spt = ppgtt_alloc_spt(vgpu, GTT_TYPE_PPGTT_PTE_PT); in split_2MB_gtt_entry()
1202 ret = intel_gvt_hypervisor_dma_map_guest_page(vgpu, in split_2MB_gtt_entry()
1231 trace_spt_change(sub_spt->vgpu->id, "release", sub_spt, in split_2MB_gtt_entry()
1237 static int split_64KB_gtt_entry(struct intel_vgpu *vgpu, in split_64KB_gtt_entry() argument
1241 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in split_64KB_gtt_entry()
1257 ret = intel_gvt_hypervisor_dma_map_guest_page(vgpu, in split_64KB_gtt_entry()
1268 static int ppgtt_populate_shadow_entry(struct intel_vgpu *vgpu, in ppgtt_populate_shadow_entry() argument
1272 struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops; in ppgtt_populate_shadow_entry()
1294 return split_64KB_gtt_entry(vgpu, spt, index, &se); in ppgtt_populate_shadow_entry()
1297 ret = is_2MB_gtt_possible(vgpu, ge); in ppgtt_populate_shadow_entry()
1299 return split_2MB_gtt_entry(vgpu, spt, index, &se); in ppgtt_populate_shadow_entry()
1312 ret = intel_gvt_hypervisor_dma_map_guest_page(vgpu, gfn, page_size, in ppgtt_populate_shadow_entry()
1324 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_populate_spt() local
1325 struct intel_gvt *gvt = vgpu->gvt; in ppgtt_populate_spt()
1332 trace_spt_change(spt->vgpu->id, "born", spt, in ppgtt_populate_spt()
1337 s = ppgtt_populate_spt_by_guest_entry(vgpu, &ge); in ppgtt_populate_spt()
1347 if (!intel_gvt_hypervisor_is_valid_gfn(vgpu, gfn)) { in ppgtt_populate_spt()
1353 ret = ppgtt_populate_shadow_entry(vgpu, spt, i, &ge); in ppgtt_populate_spt()
1368 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_entry_removal() local
1369 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_handle_guest_entry_removal()
1372 trace_spt_guest_change(spt->vgpu->id, "remove", spt, in ppgtt_handle_guest_entry_removal()
1382 vgpu->gtt.scratch_pt[spt->shadow_page.type].page_mfn) in ppgtt_handle_guest_entry_removal()
1387 intel_vgpu_find_spt_by_mfn(vgpu, ops->get_pfn(se)); in ppgtt_handle_guest_entry_removal()
1413 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_entry_add() local
1418 trace_spt_guest_change(spt->vgpu->id, "add", spt, spt->shadow_page.type, in ppgtt_handle_guest_entry_add()
1425 s = ppgtt_populate_spt_by_guest_entry(vgpu, we); in ppgtt_handle_guest_entry_add()
1434 ret = ppgtt_populate_shadow_entry(vgpu, spt, index, we); in ppgtt_handle_guest_entry_add()
1445 static int sync_oos_page(struct intel_vgpu *vgpu, in sync_oos_page() argument
1448 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in sync_oos_page()
1449 struct intel_gvt *gvt = vgpu->gvt; in sync_oos_page()
1456 trace_oos_change(vgpu->id, "sync", oos_page->id, in sync_oos_page()
1464 ops->get_entry(oos_page->mem, &old, index, false, 0, vgpu); in sync_oos_page()
1466 spt->guest_page.gfn << PAGE_SHIFT, vgpu); in sync_oos_page()
1472 trace_oos_sync(vgpu->id, oos_page->id, in sync_oos_page()
1476 ret = ppgtt_populate_shadow_entry(vgpu, spt, index, &new); in sync_oos_page()
1480 ops->set_entry(oos_page->mem, &new, index, false, 0, vgpu); in sync_oos_page()
1488 static int detach_oos_page(struct intel_vgpu *vgpu, in detach_oos_page() argument
1491 struct intel_gvt *gvt = vgpu->gvt; in detach_oos_page()
1494 trace_oos_change(vgpu->id, "detach", oos_page->id, in detach_oos_page()
1510 struct intel_gvt *gvt = spt->vgpu->gvt; in attach_oos_page()
1513 ret = intel_gvt_hypervisor_read_gpa(spt->vgpu, in attach_oos_page()
1524 trace_oos_change(spt->vgpu->id, "attach", oos_page->id, in attach_oos_page()
1534 ret = intel_vgpu_enable_page_track(spt->vgpu, spt->guest_page.gfn); in ppgtt_set_guest_page_sync()
1538 trace_oos_change(spt->vgpu->id, "set page sync", oos_page->id, in ppgtt_set_guest_page_sync()
1542 return sync_oos_page(spt->vgpu, oos_page); in ppgtt_set_guest_page_sync()
1547 struct intel_gvt *gvt = spt->vgpu->gvt; in ppgtt_allocate_oos_page()
1560 ret = detach_oos_page(spt->vgpu, oos_page); in ppgtt_allocate_oos_page()
1576 trace_oos_change(spt->vgpu->id, "set page out of sync", oos_page->id, in ppgtt_set_guest_page_oos()
1579 list_add_tail(&oos_page->vm_list, &spt->vgpu->gtt.oos_page_list_head); in ppgtt_set_guest_page_oos()
1580 return intel_vgpu_disable_page_track(spt->vgpu, spt->guest_page.gfn); in ppgtt_set_guest_page_oos()
1593 int intel_vgpu_sync_oos_pages(struct intel_vgpu *vgpu) in intel_vgpu_sync_oos_pages() argument
1602 list_for_each_safe(pos, n, &vgpu->gtt.oos_page_list_head) { in intel_vgpu_sync_oos_pages()
1619 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_write_page_table() local
1621 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_handle_guest_write_page_table()
1653 vgpu->gtt.scratch_pt[type].page_mfn); in ppgtt_handle_guest_write_page_table()
1660 vgpu->gtt.scratch_pt[type].page_mfn); in ppgtt_handle_guest_write_page_table()
1664 vgpu->gtt.scratch_pt[type].page_mfn); in ppgtt_handle_guest_write_page_table()
1693 &spt->vgpu->gtt.post_shadow_list_head); in ppgtt_set_post_shadow()
1706 int intel_vgpu_flush_post_shadow(struct intel_vgpu *vgpu) in intel_vgpu_flush_post_shadow() argument
1714 list_for_each_safe(pos, n, &vgpu->gtt.post_shadow_list_head) { in intel_vgpu_flush_post_shadow()
1737 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_write_page_table_bytes() local
1738 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_handle_guest_write_page_table_bytes()
1739 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in ppgtt_handle_guest_write_page_table_bytes()
1772 ops->set_pfn(&se, vgpu->gtt.scratch_pt[type].page_mfn); in ppgtt_handle_guest_write_page_table_bytes()
1785 false, 0, vgpu); in ppgtt_handle_guest_write_page_table_bytes()
1800 struct intel_vgpu *vgpu = mm->vgpu; in invalidate_ppgtt_mm() local
1801 struct intel_gvt *gvt = vgpu->gvt; in invalidate_ppgtt_mm()
1816 ppgtt_invalidate_spt_by_shadow_entry(vgpu, &se); in invalidate_ppgtt_mm()
1820 trace_spt_guest_change(vgpu->id, "destroy root pointer", in invalidate_ppgtt_mm()
1830 struct intel_vgpu *vgpu = mm->vgpu; in shadow_ppgtt_mm() local
1831 struct intel_gvt *gvt = vgpu->gvt; in shadow_ppgtt_mm()
1849 trace_spt_guest_change(vgpu->id, __func__, NULL, in shadow_ppgtt_mm()
1852 spt = ppgtt_populate_spt_by_guest_entry(vgpu, &ge); in shadow_ppgtt_mm()
1861 trace_spt_guest_change(vgpu->id, "populate root pointer", in shadow_ppgtt_mm()
1871 static struct intel_vgpu_mm *vgpu_alloc_mm(struct intel_vgpu *vgpu) in vgpu_alloc_mm() argument
1879 mm->vgpu = vgpu; in vgpu_alloc_mm()
1902 struct intel_vgpu_mm *intel_vgpu_create_ppgtt_mm(struct intel_vgpu *vgpu, in intel_vgpu_create_ppgtt_mm() argument
1905 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_create_ppgtt_mm()
1909 mm = vgpu_alloc_mm(vgpu); in intel_vgpu_create_ppgtt_mm()
1936 list_add_tail(&mm->ppgtt_mm.list, &vgpu->gtt.ppgtt_mm_list_head); in intel_vgpu_create_ppgtt_mm()
1945 static struct intel_vgpu_mm *intel_vgpu_create_ggtt_mm(struct intel_vgpu *vgpu) in intel_vgpu_create_ggtt_mm() argument
1950 mm = vgpu_alloc_mm(vgpu); in intel_vgpu_create_ggtt_mm()
1956 nr_entries = gvt_ggtt_gm_sz(vgpu->gvt) >> I915_GTT_PAGE_SHIFT; in intel_vgpu_create_ggtt_mm()
1959 vgpu->gvt->device_info.gtt_entry_size)); in intel_vgpu_create_ggtt_mm()
1965 mm->ggtt_mm.host_ggtt_aperture = vzalloc((vgpu_aperture_sz(vgpu) >> PAGE_SHIFT) * sizeof(u64)); in intel_vgpu_create_ggtt_mm()
1972 mm->ggtt_mm.host_ggtt_hidden = vzalloc((vgpu_hidden_sz(vgpu) >> PAGE_SHIFT) * sizeof(u64)); in intel_vgpu_create_ggtt_mm()
2000 mutex_lock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock); in _intel_vgpu_mm_release()
2002 mutex_unlock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock); in _intel_vgpu_mm_release()
2047 mutex_lock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock); in intel_vgpu_pin_mm()
2049 &mm->vgpu->gvt->gtt.ppgtt_mm_lru_list_head); in intel_vgpu_pin_mm()
2050 mutex_unlock(&mm->vgpu->gvt->gtt.ppgtt_mm_lock); in intel_vgpu_pin_mm()
2084 struct intel_vgpu *vgpu = mm->vgpu; in ppgtt_get_next_level_entry() local
2085 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in ppgtt_get_next_level_entry()
2088 s = intel_vgpu_find_spt_by_mfn(vgpu, ops->get_pfn(e)); in ppgtt_get_next_level_entry()
2112 struct intel_vgpu *vgpu = mm->vgpu; in intel_vgpu_gma_to_gpa() local
2113 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_gma_to_gpa()
2126 if (!vgpu_gmadr_is_valid(vgpu, gma)) in intel_vgpu_gma_to_gpa()
2135 trace_gma_translate(vgpu->id, "ggtt", 0, 0, gma, gpa); in intel_vgpu_gma_to_gpa()
2174 trace_gma_translate(vgpu->id, "ppgtt", 0, in intel_vgpu_gma_to_gpa()
2184 static int emulate_ggtt_mmio_read(struct intel_vgpu *vgpu, in emulate_ggtt_mmio_read() argument
2187 struct intel_vgpu_mm *ggtt_mm = vgpu->gtt.ggtt_mm; in emulate_ggtt_mmio_read()
2188 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in emulate_ggtt_mmio_read()
2197 if (!intel_gvt_ggtt_validate_range(vgpu, in emulate_ggtt_mmio_read()
2222 int intel_vgpu_emulate_ggtt_mmio_read(struct intel_vgpu *vgpu, unsigned int off, in intel_vgpu_emulate_ggtt_mmio_read() argument
2225 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in intel_vgpu_emulate_ggtt_mmio_read()
2232 ret = emulate_ggtt_mmio_read(vgpu, off, p_data, bytes); in intel_vgpu_emulate_ggtt_mmio_read()
2236 static void ggtt_invalidate_pte(struct intel_vgpu *vgpu, in ggtt_invalidate_pte() argument
2239 struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops; in ggtt_invalidate_pte()
2243 if (pfn != vgpu->gvt->gtt.scratch_mfn) in ggtt_invalidate_pte()
2244 intel_gvt_hypervisor_dma_unmap_guest_page(vgpu, in ggtt_invalidate_pte()
2248 static int emulate_ggtt_mmio_write(struct intel_vgpu *vgpu, unsigned int off, in emulate_ggtt_mmio_write() argument
2251 struct intel_gvt *gvt = vgpu->gvt; in emulate_ggtt_mmio_write()
2253 struct intel_vgpu_mm *ggtt_mm = vgpu->gtt.ggtt_mm; in emulate_ggtt_mmio_write()
2270 if (!vgpu_gmadr_is_valid(vgpu, gma)) in emulate_ggtt_mmio_write()
2331 if (!intel_gvt_hypervisor_is_valid_gfn(vgpu, gfn)) { in emulate_ggtt_mmio_write()
2336 ret = intel_gvt_hypervisor_dma_map_guest_page(vgpu, gfn, in emulate_ggtt_mmio_write()
2356 ggtt_invalidate_pte(vgpu, &e); in emulate_ggtt_mmio_write()
2375 int intel_vgpu_emulate_ggtt_mmio_write(struct intel_vgpu *vgpu, in intel_vgpu_emulate_ggtt_mmio_write() argument
2378 const struct intel_gvt_device_info *info = &vgpu->gvt->device_info; in intel_vgpu_emulate_ggtt_mmio_write()
2380 struct intel_vgpu_submission *s = &vgpu->submission; in intel_vgpu_emulate_ggtt_mmio_write()
2388 ret = emulate_ggtt_mmio_write(vgpu, off, p_data, bytes); in intel_vgpu_emulate_ggtt_mmio_write()
2394 for_each_engine(engine, vgpu->gvt->gt, i) { in intel_vgpu_emulate_ggtt_mmio_write()
2404 static int alloc_scratch_pages(struct intel_vgpu *vgpu, in alloc_scratch_pages() argument
2407 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in alloc_scratch_pages()
2408 struct intel_vgpu_gtt *gtt = &vgpu->gtt; in alloc_scratch_pages()
2409 struct intel_gvt_gtt_pte_ops *ops = vgpu->gvt->gtt.pte_ops; in alloc_scratch_pages()
2411 vgpu->gvt->device_info.gtt_entry_size_shift; in alloc_scratch_pages()
2414 struct device *dev = &vgpu->gvt->gt->i915->drm.pdev->dev; in alloc_scratch_pages()
2438 vgpu->id, type, gtt->scratch_pt[type].page_mfn); in alloc_scratch_pages()
2463 ops->set_entry(scratch_pt, &se, i, false, 0, vgpu); in alloc_scratch_pages()
2469 static int release_scratch_page_tree(struct intel_vgpu *vgpu) in release_scratch_page_tree() argument
2472 struct device *dev = &vgpu->gvt->gt->i915->drm.pdev->dev; in release_scratch_page_tree()
2476 if (vgpu->gtt.scratch_pt[i].page != NULL) { in release_scratch_page_tree()
2477 daddr = (dma_addr_t)(vgpu->gtt.scratch_pt[i].page_mfn << in release_scratch_page_tree()
2480 __free_page(vgpu->gtt.scratch_pt[i].page); in release_scratch_page_tree()
2481 vgpu->gtt.scratch_pt[i].page = NULL; in release_scratch_page_tree()
2482 vgpu->gtt.scratch_pt[i].page_mfn = 0; in release_scratch_page_tree()
2489 static int create_scratch_page_tree(struct intel_vgpu *vgpu) in create_scratch_page_tree() argument
2494 ret = alloc_scratch_pages(vgpu, i); in create_scratch_page_tree()
2502 release_scratch_page_tree(vgpu); in create_scratch_page_tree()
2516 int intel_vgpu_init_gtt(struct intel_vgpu *vgpu) in intel_vgpu_init_gtt() argument
2518 struct intel_vgpu_gtt *gtt = &vgpu->gtt; in intel_vgpu_init_gtt()
2526 gtt->ggtt_mm = intel_vgpu_create_ggtt_mm(vgpu); in intel_vgpu_init_gtt()
2532 intel_vgpu_reset_ggtt(vgpu, false); in intel_vgpu_init_gtt()
2536 return create_scratch_page_tree(vgpu); in intel_vgpu_init_gtt()
2539 void intel_vgpu_destroy_all_ppgtt_mm(struct intel_vgpu *vgpu) in intel_vgpu_destroy_all_ppgtt_mm() argument
2544 list_for_each_safe(pos, n, &vgpu->gtt.ppgtt_mm_list_head) { in intel_vgpu_destroy_all_ppgtt_mm()
2549 if (GEM_WARN_ON(!list_empty(&vgpu->gtt.ppgtt_mm_list_head))) in intel_vgpu_destroy_all_ppgtt_mm()
2552 if (GEM_WARN_ON(!radix_tree_empty(&vgpu->gtt.spt_tree))) { in intel_vgpu_destroy_all_ppgtt_mm()
2554 ppgtt_free_all_spt(vgpu); in intel_vgpu_destroy_all_ppgtt_mm()
2558 static void intel_vgpu_destroy_ggtt_mm(struct intel_vgpu *vgpu) in intel_vgpu_destroy_ggtt_mm() argument
2563 &vgpu->gtt.ggtt_mm->ggtt_mm.partial_pte_list, in intel_vgpu_destroy_ggtt_mm()
2569 intel_vgpu_destroy_mm(vgpu->gtt.ggtt_mm); in intel_vgpu_destroy_ggtt_mm()
2570 vgpu->gtt.ggtt_mm = NULL; in intel_vgpu_destroy_ggtt_mm()
2583 void intel_vgpu_clean_gtt(struct intel_vgpu *vgpu) in intel_vgpu_clean_gtt() argument
2585 intel_vgpu_destroy_all_ppgtt_mm(vgpu); in intel_vgpu_clean_gtt()
2586 intel_vgpu_destroy_ggtt_mm(vgpu); in intel_vgpu_clean_gtt()
2587 release_scratch_page_tree(vgpu); in intel_vgpu_clean_gtt()
2654 struct intel_vgpu_mm *intel_vgpu_find_ppgtt_mm(struct intel_vgpu *vgpu, in intel_vgpu_find_ppgtt_mm() argument
2660 list_for_each(pos, &vgpu->gtt.ppgtt_mm_list_head) { in intel_vgpu_find_ppgtt_mm()
2691 struct intel_vgpu_mm *intel_vgpu_get_ppgtt_mm(struct intel_vgpu *vgpu, in intel_vgpu_get_ppgtt_mm() argument
2696 mm = intel_vgpu_find_ppgtt_mm(vgpu, pdps); in intel_vgpu_get_ppgtt_mm()
2700 mm = intel_vgpu_create_ppgtt_mm(vgpu, root_entry_type, pdps); in intel_vgpu_get_ppgtt_mm()
2717 int intel_vgpu_put_ppgtt_mm(struct intel_vgpu *vgpu, u64 pdps[]) in intel_vgpu_put_ppgtt_mm() argument
2721 mm = intel_vgpu_find_ppgtt_mm(vgpu, pdps); in intel_vgpu_put_ppgtt_mm()
2812 void intel_vgpu_invalidate_ppgtt(struct intel_vgpu *vgpu) in intel_vgpu_invalidate_ppgtt() argument
2817 list_for_each_safe(pos, n, &vgpu->gtt.ppgtt_mm_list_head) { in intel_vgpu_invalidate_ppgtt()
2820 mutex_lock(&vgpu->gvt->gtt.ppgtt_mm_lock); in intel_vgpu_invalidate_ppgtt()
2822 mutex_unlock(&vgpu->gvt->gtt.ppgtt_mm_lock); in intel_vgpu_invalidate_ppgtt()
2838 void intel_vgpu_reset_ggtt(struct intel_vgpu *vgpu, bool invalidate_old) in intel_vgpu_reset_ggtt() argument
2840 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reset_ggtt()
2841 struct intel_gvt_gtt_pte_ops *pte_ops = vgpu->gvt->gtt.pte_ops; in intel_vgpu_reset_ggtt()
2850 index = vgpu_aperture_gmadr_base(vgpu) >> PAGE_SHIFT; in intel_vgpu_reset_ggtt()
2851 num_entries = vgpu_aperture_sz(vgpu) >> PAGE_SHIFT; in intel_vgpu_reset_ggtt()
2854 ggtt_get_host_entry(vgpu->gtt.ggtt_mm, &old_entry, index); in intel_vgpu_reset_ggtt()
2855 ggtt_invalidate_pte(vgpu, &old_entry); in intel_vgpu_reset_ggtt()
2857 ggtt_set_host_entry(vgpu->gtt.ggtt_mm, &entry, index++); in intel_vgpu_reset_ggtt()
2860 index = vgpu_hidden_gmadr_base(vgpu) >> PAGE_SHIFT; in intel_vgpu_reset_ggtt()
2861 num_entries = vgpu_hidden_sz(vgpu) >> PAGE_SHIFT; in intel_vgpu_reset_ggtt()
2864 ggtt_get_host_entry(vgpu->gtt.ggtt_mm, &old_entry, index); in intel_vgpu_reset_ggtt()
2865 ggtt_invalidate_pte(vgpu, &old_entry); in intel_vgpu_reset_ggtt()
2867 ggtt_set_host_entry(vgpu->gtt.ggtt_mm, &entry, index++); in intel_vgpu_reset_ggtt()
2883 struct intel_vgpu *vgpu; in intel_gvt_restore_ggtt() local
2890 idr_for_each_entry(&(gvt)->vgpu_idr, vgpu, id) { in intel_gvt_restore_ggtt()
2891 mm = vgpu->gtt.ggtt_mm; in intel_gvt_restore_ggtt()
2893 num_low = vgpu_aperture_sz(vgpu) >> PAGE_SHIFT; in intel_gvt_restore_ggtt()
2894 offset = vgpu_aperture_gmadr_base(vgpu) >> PAGE_SHIFT; in intel_gvt_restore_ggtt()
2898 write_pte64(vgpu->gvt->gt->ggtt, offset + idx, pte); in intel_gvt_restore_ggtt()
2901 num_hi = vgpu_hidden_sz(vgpu) >> PAGE_SHIFT; in intel_gvt_restore_ggtt()
2902 offset = vgpu_hidden_gmadr_base(vgpu) >> PAGE_SHIFT; in intel_gvt_restore_ggtt()
2906 write_pte64(vgpu->gvt->gt->ggtt, offset + idx, pte); in intel_gvt_restore_ggtt()