• Home
  • Raw
  • Download

Lines Matching refs:e

135 #define gtt_init_entry(e, t, p, v) do { \  argument
136 (e)->type = t; \
137 (e)->pdev = p; \
138 memcpy(&(e)->val64, &v, sizeof(v)); \
263 struct intel_gvt_gtt_entry *e, in gtt_get_entry64() argument
276 &e->val64, 8); in gtt_get_entry64()
280 e->val64 = read_pte64(vgpu->gvt->dev_priv, index); in gtt_get_entry64()
282 e->val64 = *((u64 *)pt + index); in gtt_get_entry64()
288 struct intel_gvt_gtt_entry *e, in gtt_set_entry64() argument
301 &e->val64, 8); in gtt_set_entry64()
305 write_pte64(vgpu->gvt->dev_priv, index, e->val64); in gtt_set_entry64()
307 *((u64 *)pt + index) = e->val64; in gtt_set_entry64()
318 static unsigned long gen8_gtt_get_pfn(struct intel_gvt_gtt_entry *e) in gen8_gtt_get_pfn() argument
322 if (e->type == GTT_TYPE_PPGTT_PTE_1G_ENTRY) in gen8_gtt_get_pfn()
323 pfn = (e->val64 & ADDR_1G_MASK) >> 12; in gen8_gtt_get_pfn()
324 else if (e->type == GTT_TYPE_PPGTT_PTE_2M_ENTRY) in gen8_gtt_get_pfn()
325 pfn = (e->val64 & ADDR_2M_MASK) >> 12; in gen8_gtt_get_pfn()
327 pfn = (e->val64 & ADDR_4K_MASK) >> 12; in gen8_gtt_get_pfn()
331 static void gen8_gtt_set_pfn(struct intel_gvt_gtt_entry *e, unsigned long pfn) in gen8_gtt_set_pfn() argument
333 if (e->type == GTT_TYPE_PPGTT_PTE_1G_ENTRY) { in gen8_gtt_set_pfn()
334 e->val64 &= ~ADDR_1G_MASK; in gen8_gtt_set_pfn()
336 } else if (e->type == GTT_TYPE_PPGTT_PTE_2M_ENTRY) { in gen8_gtt_set_pfn()
337 e->val64 &= ~ADDR_2M_MASK; in gen8_gtt_set_pfn()
340 e->val64 &= ~ADDR_4K_MASK; in gen8_gtt_set_pfn()
344 e->val64 |= (pfn << 12); in gen8_gtt_set_pfn()
347 static bool gen8_gtt_test_pse(struct intel_gvt_gtt_entry *e) in gen8_gtt_test_pse() argument
350 if (get_pse_type(e->type) == GTT_TYPE_INVALID) in gen8_gtt_test_pse()
353 e->type = get_entry_type(e->type); in gen8_gtt_test_pse()
354 if (!(e->val64 & (1 << 7))) in gen8_gtt_test_pse()
357 e->type = get_pse_type(e->type); in gen8_gtt_test_pse()
361 static bool gen8_gtt_test_present(struct intel_gvt_gtt_entry *e) in gen8_gtt_test_present() argument
368 if (e->type == GTT_TYPE_PPGTT_ROOT_L3_ENTRY in gen8_gtt_test_present()
369 || e->type == GTT_TYPE_PPGTT_ROOT_L4_ENTRY) in gen8_gtt_test_present()
370 return (e->val64 != 0); in gen8_gtt_test_present()
372 return (e->val64 & (1 << 0)); in gen8_gtt_test_present()
375 static void gtt_entry_clear_present(struct intel_gvt_gtt_entry *e) in gtt_entry_clear_present() argument
377 e->val64 &= ~(1 << 0); in gtt_entry_clear_present()
451 void *page_table, struct intel_gvt_gtt_entry *e, in intel_vgpu_mm_get_entry() argument
458 e->type = mm->page_table_entry_type; in intel_vgpu_mm_get_entry()
460 ret = ops->get_entry(page_table, e, index, false, 0, mm->vgpu); in intel_vgpu_mm_get_entry()
464 ops->test_pse(e); in intel_vgpu_mm_get_entry()
469 void *page_table, struct intel_gvt_gtt_entry *e, in intel_vgpu_mm_set_entry() argument
475 return ops->set_entry(page_table, e, index, false, 0, mm->vgpu); in intel_vgpu_mm_set_entry()
484 struct intel_gvt_gtt_entry *e, unsigned long index, in ppgtt_spt_get_entry() argument
491 e->type = get_entry_type(type); in ppgtt_spt_get_entry()
493 if (WARN(!gtt_type_is_entry(e->type), "invalid entry type\n")) in ppgtt_spt_get_entry()
496 ret = ops->get_entry(page_table, e, index, guest, in ppgtt_spt_get_entry()
502 ops->test_pse(e); in ppgtt_spt_get_entry()
509 struct intel_gvt_gtt_entry *e, unsigned long index, in ppgtt_spt_set_entry() argument
515 if (WARN(!gtt_type_is_entry(e->type), "invalid entry type\n")) in ppgtt_spt_set_entry()
518 return ops->set_entry(page_table, e, index, guest, in ppgtt_spt_set_entry()
523 #define ppgtt_get_guest_entry(spt, e, index) \ argument
525 spt->guest_page_type, e, index, true)
527 #define ppgtt_set_guest_entry(spt, e, index) \ argument
529 spt->guest_page_type, e, index, true)
531 #define ppgtt_get_shadow_entry(spt, e, index) \ argument
533 spt->shadow_page.type, e, index, false)
535 #define ppgtt_set_shadow_entry(spt, e, index) \ argument
537 spt->shadow_page.type, e, index, false)
803 #define for_each_present_guest_entry(spt, e, i) \ argument
805 if (!ppgtt_get_guest_entry(spt, e, i) && \
806 spt->vgpu->gvt->gtt.pte_ops->test_present(e))
808 #define for_each_present_shadow_entry(spt, e, i) \ argument
810 if (!ppgtt_get_shadow_entry(spt, e, i) && \
811 spt->vgpu->gvt->gtt.pte_ops->test_present(e))
825 struct intel_gvt_gtt_entry *e) in ppgtt_invalidate_shadow_page_by_shadow_entry() argument
831 if (WARN_ON(!gtt_type_is_pt(get_next_pt_type(e->type)))) in ppgtt_invalidate_shadow_page_by_shadow_entry()
834 if (e->type != GTT_TYPE_PPGTT_ROOT_L3_ENTRY in ppgtt_invalidate_shadow_page_by_shadow_entry()
835 && e->type != GTT_TYPE_PPGTT_ROOT_L4_ENTRY) { in ppgtt_invalidate_shadow_page_by_shadow_entry()
836 cur_pt_type = get_next_pt_type(e->type) + 1; in ppgtt_invalidate_shadow_page_by_shadow_entry()
837 if (ops->get_pfn(e) == in ppgtt_invalidate_shadow_page_by_shadow_entry()
841 s = ppgtt_find_shadow_page(vgpu, ops->get_pfn(e)); in ppgtt_invalidate_shadow_page_by_shadow_entry()
844 ops->get_pfn(e)); in ppgtt_invalidate_shadow_page_by_shadow_entry()
853 struct intel_gvt_gtt_entry e; in ppgtt_invalidate_shadow_page() local
869 for_each_present_shadow_entry(spt, &e, index) { in ppgtt_invalidate_shadow_page()
870 if (!gtt_type_is_pt(get_next_pt_type(e.type))) { in ppgtt_invalidate_shadow_page()
875 spt->vgpu, &e); in ppgtt_invalidate_shadow_page()
886 spt, e.val64, e.type); in ppgtt_invalidate_shadow_page()
1690 struct intel_gvt_gtt_entry *e, unsigned long index, bool guest) in ppgtt_get_next_level_entry() argument
1699 s = ppgtt_find_shadow_page(vgpu, ops->get_pfn(e)); in ppgtt_get_next_level_entry()
1704 ppgtt_get_shadow_entry(s, e, index); in ppgtt_get_next_level_entry()
1706 ppgtt_get_guest_entry(s, e, index); in ppgtt_get_next_level_entry()
1729 struct intel_gvt_gtt_entry e; in intel_vgpu_gma_to_gpa() local
1740 ret = ggtt_get_guest_entry(mm, &e, in intel_vgpu_gma_to_gpa()
1744 gpa = (pte_ops->get_pfn(&e) << GTT_PAGE_SHIFT) in intel_vgpu_gma_to_gpa()
1753 ret = ppgtt_get_shadow_root_entry(mm, &e, 0); in intel_vgpu_gma_to_gpa()
1763 ret = ppgtt_get_shadow_root_entry(mm, &e, in intel_vgpu_gma_to_gpa()
1772 ret = ppgtt_get_shadow_root_entry(mm, &e, in intel_vgpu_gma_to_gpa()
1786 ret = ppgtt_get_next_level_entry(mm, &e, gma_index[i], in intel_vgpu_gma_to_gpa()
1791 if (!pte_ops->test_present(&e)) { in intel_vgpu_gma_to_gpa()
1797 gpa = (pte_ops->get_pfn(&e) << GTT_PAGE_SHIFT) in intel_vgpu_gma_to_gpa()
1814 struct intel_gvt_gtt_entry e; in emulate_gtt_mmio_read() local
1819 ggtt_get_guest_entry(ggtt_mm, &e, index); in emulate_gtt_mmio_read()
1820 memcpy(p_data, (void *)&e.val64 + (off & (info->gtt_entry_size - 1)), in emulate_gtt_mmio_read()
1860 struct intel_gvt_gtt_entry e, m; in emulate_gtt_mmio_write() local
1872 ggtt_get_guest_entry(ggtt_mm, &e, g_gtt_index); in emulate_gtt_mmio_write()
1874 memcpy((void *)&e.val64 + (off & (info->gtt_entry_size - 1)), p_data, in emulate_gtt_mmio_write()
1877 if (ops->test_present(&e)) { in emulate_gtt_mmio_write()
1878 ret = gtt_entry_p2m(vgpu, &e, &m); in emulate_gtt_mmio_write()
1888 m = e; in emulate_gtt_mmio_write()
1894 ggtt_set_guest_entry(ggtt_mm, &e, g_gtt_index); in emulate_gtt_mmio_write()
2346 struct intel_gvt_gtt_entry e; in intel_vgpu_reset_ggtt() local
2348 memset(&e, 0, sizeof(struct intel_gvt_gtt_entry)); in intel_vgpu_reset_ggtt()
2349 e.type = GTT_TYPE_GGTT_PTE; in intel_vgpu_reset_ggtt()
2350 ops->set_pfn(&e, gvt->gtt.scratch_ggtt_mfn); in intel_vgpu_reset_ggtt()
2351 e.val64 |= _PAGE_PRESENT; in intel_vgpu_reset_ggtt()
2356 ops->set_entry(NULL, &e, index + offset, false, 0, vgpu); in intel_vgpu_reset_ggtt()
2361 ops->set_entry(NULL, &e, index + offset, false, 0, vgpu); in intel_vgpu_reset_ggtt()