Home
last modified time | relevance | path

Searched refs:vgpu (Results 1 – 25 of 42) sorted by relevance

12

/drivers/gpu/drm/i915/gvt/
Ddisplay.c38 static int get_edp_pipe(struct intel_vgpu *vgpu) in get_edp_pipe() argument
40 u32 data = vgpu_vreg(vgpu, _TRANS_DDI_FUNC_CTL_EDP); in get_edp_pipe()
58 static int edp_pipe_is_enabled(struct intel_vgpu *vgpu) in edp_pipe_is_enabled() argument
60 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in edp_pipe_is_enabled()
62 if (!(vgpu_vreg_t(vgpu, PIPECONF(_PIPE_EDP)) & PIPECONF_ENABLE)) in edp_pipe_is_enabled()
65 if (!(vgpu_vreg(vgpu, _TRANS_DDI_FUNC_CTL_EDP) & TRANS_DDI_FUNC_ENABLE)) in edp_pipe_is_enabled()
70 int pipe_is_enabled(struct intel_vgpu *vgpu, int pipe) in pipe_is_enabled() argument
72 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in pipe_is_enabled()
78 if (vgpu_vreg_t(vgpu, PIPECONF(pipe)) & PIPECONF_ENABLE) in pipe_is_enabled()
81 if (edp_pipe_is_enabled(vgpu) && in pipe_is_enabled()
[all …]
Dvgpu.c38 void populate_pvinfo_page(struct intel_vgpu *vgpu) in populate_pvinfo_page() argument
40 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in populate_pvinfo_page()
42 vgpu_vreg64_t(vgpu, vgtif_reg(magic)) = VGT_MAGIC; in populate_pvinfo_page()
43 vgpu_vreg_t(vgpu, vgtif_reg(version_major)) = 1; in populate_pvinfo_page()
44 vgpu_vreg_t(vgpu, vgtif_reg(version_minor)) = 0; in populate_pvinfo_page()
45 vgpu_vreg_t(vgpu, vgtif_reg(display_ready)) = 0; in populate_pvinfo_page()
46 vgpu_vreg_t(vgpu, vgtif_reg(vgt_id)) = vgpu->id; in populate_pvinfo_page()
48 vgpu_vreg_t(vgpu, vgtif_reg(vgt_caps)) = VGT_CAPS_FULL_PPGTT; in populate_pvinfo_page()
49 vgpu_vreg_t(vgpu, vgtif_reg(vgt_caps)) |= VGT_CAPS_HWSP_EMULATION; in populate_pvinfo_page()
50 vgpu_vreg_t(vgpu, vgtif_reg(vgt_caps)) |= VGT_CAPS_HUGE_GTT; in populate_pvinfo_page()
[all …]
Dcfg_space.c68 static void vgpu_pci_cfg_mem_write(struct intel_vgpu *vgpu, unsigned int off, in vgpu_pci_cfg_mem_write() argument
71 u8 *cfg_base = vgpu_cfg_space(vgpu); in vgpu_pci_cfg_mem_write()
96 if (off == vgpu->cfg_space.pmcsr_off && vgpu->cfg_space.pmcsr_off) { in vgpu_pci_cfg_mem_write()
97 pwr = (pci_power_t __force)(*(u16*)(&vgpu_cfg_space(vgpu)[off]) in vgpu_pci_cfg_mem_write()
100 vgpu->d3_entered = true; in vgpu_pci_cfg_mem_write()
102 vgpu->id, pwr); in vgpu_pci_cfg_mem_write()
116 int intel_vgpu_emulate_cfg_read(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_emulate_cfg_read() argument
119 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_emulate_cfg_read()
125 offset + bytes > vgpu->gvt->device_info.cfg_space_size)) in intel_vgpu_emulate_cfg_read()
128 memcpy(p_data, vgpu_cfg_space(vgpu) + offset, bytes); in intel_vgpu_emulate_cfg_read()
[all …]
Dgvt.h112 #define vgpu_cfg_space(vgpu) ((vgpu)->cfg_space.virtual_cfg_space) argument
126 #define vgpu_opregion(vgpu) (&(vgpu->opregion)) argument
145 int (*init)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
146 void (*clean)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
147 void (*reset)(struct intel_vgpu *vgpu, intel_engine_mask_t engine_mask);
217 static inline void *intel_vgpu_vdev(struct intel_vgpu *vgpu) in intel_vgpu_vdev() argument
219 return vgpu->vdev; in intel_vgpu_vdev()
391 #define vgpu_aperture_offset(vgpu) ((vgpu)->gm.low_gm_node.start) argument
392 #define vgpu_hidden_offset(vgpu) ((vgpu)->gm.high_gm_node.start) argument
393 #define vgpu_aperture_sz(vgpu) ((vgpu)->gm.aperture_sz) argument
[all …]
Dedid.c49 static unsigned char edid_get_byte(struct intel_vgpu *vgpu) in edid_get_byte() argument
51 struct intel_vgpu_i2c_edid *edid = &vgpu->display.i2c_edid; in edid_get_byte()
68 if (intel_vgpu_has_monitor_on_port(vgpu, edid->port)) { in edid_get_byte()
70 intel_vgpu_port(vgpu, edid->port)->edid; in edid_get_byte()
126 static void reset_gmbus_controller(struct intel_vgpu *vgpu) in reset_gmbus_controller() argument
128 vgpu_vreg_t(vgpu, PCH_GMBUS2) = GMBUS_HW_RDY; in reset_gmbus_controller()
129 if (!vgpu->display.i2c_edid.edid_available) in reset_gmbus_controller()
130 vgpu_vreg_t(vgpu, PCH_GMBUS2) |= GMBUS_SATOER; in reset_gmbus_controller()
131 vgpu->display.i2c_edid.gmbus.phase = GMBUS_IDLE_PHASE; in reset_gmbus_controller()
135 static int gmbus0_mmio_write(struct intel_vgpu *vgpu, in gmbus0_mmio_write() argument
[all …]
Dmmio.c47 int intel_vgpu_gpa_to_mmio_offset(struct intel_vgpu *vgpu, u64 gpa) in intel_vgpu_gpa_to_mmio_offset() argument
49 u64 gttmmio_gpa = intel_vgpu_get_bar_gpa(vgpu, PCI_BASE_ADDRESS_0); in intel_vgpu_gpa_to_mmio_offset()
60 static void failsafe_emulate_mmio_rw(struct intel_vgpu *vgpu, u64 pa, in failsafe_emulate_mmio_rw() argument
67 if (!vgpu || !p_data) in failsafe_emulate_mmio_rw()
70 gvt = vgpu->gvt; in failsafe_emulate_mmio_rw()
71 mutex_lock(&vgpu->vgpu_lock); in failsafe_emulate_mmio_rw()
72 offset = intel_vgpu_gpa_to_mmio_offset(vgpu, pa); in failsafe_emulate_mmio_rw()
75 intel_vgpu_default_mmio_read(vgpu, offset, p_data, in failsafe_emulate_mmio_rw()
78 intel_vgpu_default_mmio_write(vgpu, offset, p_data, in failsafe_emulate_mmio_rw()
82 pt = vgpu->gtt.ggtt_mm->ggtt_mm.virtual_ggtt + offset; in failsafe_emulate_mmio_rw()
[all …]
Daperture_gm.c41 static int alloc_gm(struct intel_vgpu *vgpu, bool high_gm) in alloc_gm() argument
43 struct intel_gvt *gvt = vgpu->gvt; in alloc_gm()
51 node = &vgpu->gm.high_gm_node; in alloc_gm()
52 size = vgpu_hidden_sz(vgpu); in alloc_gm()
57 node = &vgpu->gm.low_gm_node; in alloc_gm()
58 size = vgpu_aperture_sz(vgpu); in alloc_gm()
79 static int alloc_vgpu_gm(struct intel_vgpu *vgpu) in alloc_vgpu_gm() argument
81 struct intel_gvt *gvt = vgpu->gvt; in alloc_vgpu_gm()
85 ret = alloc_gm(vgpu, false); in alloc_vgpu_gm()
89 ret = alloc_gm(vgpu, true); in alloc_vgpu_gm()
[all …]
Dmpt.h80 static inline int intel_gvt_hypervisor_attach_vgpu(struct intel_vgpu *vgpu) in intel_gvt_hypervisor_attach_vgpu() argument
86 return intel_gvt_host.mpt->attach_vgpu(vgpu, &vgpu->handle); in intel_gvt_hypervisor_attach_vgpu()
96 static inline void intel_gvt_hypervisor_detach_vgpu(struct intel_vgpu *vgpu) in intel_gvt_hypervisor_detach_vgpu() argument
102 intel_gvt_host.mpt->detach_vgpu(vgpu); in intel_gvt_hypervisor_detach_vgpu()
116 static inline int intel_gvt_hypervisor_inject_msi(struct intel_vgpu *vgpu) in intel_gvt_hypervisor_inject_msi() argument
118 unsigned long offset = vgpu->gvt->device_info.msi_cap_offset; in intel_gvt_hypervisor_inject_msi()
123 control = *(u16 *)(vgpu_cfg_space(vgpu) + MSI_CAP_CONTROL(offset)); in intel_gvt_hypervisor_inject_msi()
124 addr = *(u32 *)(vgpu_cfg_space(vgpu) + MSI_CAP_ADDRESS(offset)); in intel_gvt_hypervisor_inject_msi()
125 data = *(u16 *)(vgpu_cfg_space(vgpu) + MSI_CAP_DATA(offset)); in intel_gvt_hypervisor_inject_msi()
134 trace_inject_msi(vgpu->id, addr, data); in intel_gvt_hypervisor_inject_msi()
[all …]
Dsched_policy.c37 static bool vgpu_has_pending_workload(struct intel_vgpu *vgpu) in vgpu_has_pending_workload() argument
42 for_each_engine(engine, vgpu->gvt->gt, i) { in vgpu_has_pending_workload()
43 if (!list_empty(workload_q_head(vgpu, engine))) in vgpu_has_pending_workload()
55 struct intel_vgpu *vgpu; member
75 static void vgpu_update_timeslice(struct intel_vgpu *vgpu, ktime_t cur_time) in vgpu_update_timeslice() argument
80 if (!vgpu || vgpu == vgpu->gvt->idle_vgpu) in vgpu_update_timeslice()
83 vgpu_data = vgpu->sched_data; in vgpu_update_timeslice()
179 struct intel_vgpu *vgpu = NULL; in find_busy_vgpu() local
187 if (!vgpu_has_pending_workload(vgpu_data->vgpu)) in find_busy_vgpu()
192 vgpu = vgpu_data->vgpu; in find_busy_vgpu()
[all …]
Dgtt.c54 bool intel_gvt_ggtt_validate_range(struct intel_vgpu *vgpu, u64 addr, u32 size) in intel_gvt_ggtt_validate_range() argument
57 return vgpu_gmadr_is_valid(vgpu, addr); in intel_gvt_ggtt_validate_range()
59 if (vgpu_gmadr_is_aperture(vgpu, addr) && in intel_gvt_ggtt_validate_range()
60 vgpu_gmadr_is_aperture(vgpu, addr + size - 1)) in intel_gvt_ggtt_validate_range()
62 else if (vgpu_gmadr_is_hidden(vgpu, addr) && in intel_gvt_ggtt_validate_range()
63 vgpu_gmadr_is_hidden(vgpu, addr + size - 1)) in intel_gvt_ggtt_validate_range()
72 int intel_gvt_ggtt_gmadr_g2h(struct intel_vgpu *vgpu, u64 g_addr, u64 *h_addr) in intel_gvt_ggtt_gmadr_g2h() argument
74 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_gvt_ggtt_gmadr_g2h()
76 if (drm_WARN(&i915->drm, !vgpu_gmadr_is_valid(vgpu, g_addr), in intel_gvt_ggtt_gmadr_g2h()
80 if (vgpu_gmadr_is_aperture(vgpu, g_addr)) in intel_gvt_ggtt_gmadr_g2h()
[all …]
Dkvmgt.c66 size_t (*rw)(struct intel_vgpu *vgpu, char *buf,
68 void (*release)(struct intel_vgpu *vgpu,
93 struct intel_vgpu *vgpu; member
102 struct intel_vgpu *vgpu; member
112 struct intel_vgpu *vgpu; member
137 static inline struct kvmgt_vdev *kvmgt_vdev(struct intel_vgpu *vgpu) in kvmgt_vdev() argument
139 return intel_vgpu_vdev(vgpu); in kvmgt_vdev()
151 static void gvt_unpin_guest_page(struct intel_vgpu *vgpu, unsigned long gfn, in gvt_unpin_guest_page() argument
154 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gvt_unpin_guest_page()
155 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in gvt_unpin_guest_page()
[all …]
Ddmabuf.c39 static int vgpu_pin_dma_address(struct intel_vgpu *vgpu, in vgpu_pin_dma_address() argument
45 if (intel_gvt_hypervisor_dma_pin_guest_page(vgpu, dma_addr)) in vgpu_pin_dma_address()
51 static void vgpu_unpin_dma_address(struct intel_vgpu *vgpu, in vgpu_unpin_dma_address() argument
54 intel_gvt_hypervisor_dma_unmap_guest_page(vgpu, dma_addr); in vgpu_unpin_dma_address()
61 struct intel_vgpu *vgpu; in vgpu_gem_get_pages() local
73 vgpu = fb_info->obj->vgpu; in vgpu_gem_get_pages()
74 if (drm_WARN_ON(&dev_priv->drm, !vgpu)) in vgpu_gem_get_pages()
92 if (vgpu_pin_dma_address(vgpu, PAGE_SIZE, dma_addr)) { in vgpu_gem_get_pages()
111 vgpu_unpin_dma_address(vgpu, dma_addr); in vgpu_gem_get_pages()
129 struct intel_vgpu *vgpu = obj->vgpu; in vgpu_gem_put_pages() local
[all …]
Dinterrupt.c51 static void update_upstream_irq(struct intel_vgpu *vgpu,
175 int intel_vgpu_reg_imr_handler(struct intel_vgpu *vgpu, in intel_vgpu_reg_imr_handler() argument
178 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_imr_handler()
182 trace_write_ir(vgpu->id, "IMR", reg, imr, vgpu_vreg(vgpu, reg), in intel_vgpu_reg_imr_handler()
183 (vgpu_vreg(vgpu, reg) ^ imr)); in intel_vgpu_reg_imr_handler()
185 vgpu_vreg(vgpu, reg) = imr; in intel_vgpu_reg_imr_handler()
187 ops->check_pending_irq(vgpu); in intel_vgpu_reg_imr_handler()
205 int intel_vgpu_reg_master_irq_handler(struct intel_vgpu *vgpu, in intel_vgpu_reg_master_irq_handler() argument
208 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_reg_master_irq_handler()
211 u32 virtual_ier = vgpu_vreg(vgpu, reg); in intel_vgpu_reg_master_irq_handler()
[all …]
Dexeclist.c93 struct intel_vgpu *vgpu = execlist->vgpu; in emulate_execlist_status() local
98 status.ldw = vgpu_vreg(vgpu, status_reg); in emulate_execlist_status()
99 status.udw = vgpu_vreg(vgpu, status_reg + 4); in emulate_execlist_status()
117 vgpu_vreg(vgpu, status_reg) = status.ldw; in emulate_execlist_status()
118 vgpu_vreg(vgpu, status_reg + 4) = status.udw; in emulate_execlist_status()
121 vgpu->id, status_reg, status.ldw, status.udw); in emulate_execlist_status()
128 struct intel_vgpu *vgpu = execlist->vgpu; in emulate_csb_update() local
139 ctx_status_ptr.dw = vgpu_vreg(vgpu, ctx_status_ptr_reg); in emulate_csb_update()
152 vgpu_vreg(vgpu, offset) = status->ldw; in emulate_csb_update()
153 vgpu_vreg(vgpu, offset + 4) = status->udw; in emulate_csb_update()
[all …]
Dhandlers.c74 static void read_vreg(struct intel_vgpu *vgpu, unsigned int offset, in read_vreg() argument
77 memcpy(p_data, &vgpu_vreg(vgpu, offset), bytes); in read_vreg()
80 static void write_vreg(struct intel_vgpu *vgpu, unsigned int offset, in write_vreg() argument
83 memcpy(&vgpu_vreg(vgpu, offset), p_data, bytes); in write_vreg()
175 void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason) in enter_failsafe_mode() argument
190 pr_err("Now vgpu %d will enter failsafe mode.\n", vgpu->id); in enter_failsafe_mode()
191 vgpu->failsafe = true; in enter_failsafe_mode()
194 static int sanitize_fence_mmio_access(struct intel_vgpu *vgpu, in sanitize_fence_mmio_access() argument
197 unsigned int max_fence = vgpu_fence_sz(vgpu); in sanitize_fence_mmio_access()
207 if (!vgpu->pv_notified) in sanitize_fence_mmio_access()
[all …]
Dpage_track.c35 struct intel_vgpu *vgpu, unsigned long gfn) in intel_vgpu_find_page_track() argument
37 return radix_tree_lookup(&vgpu->page_track_tree, gfn); in intel_vgpu_find_page_track()
50 int intel_vgpu_register_page_track(struct intel_vgpu *vgpu, unsigned long gfn, in intel_vgpu_register_page_track() argument
56 track = intel_vgpu_find_page_track(vgpu, gfn); in intel_vgpu_register_page_track()
67 ret = radix_tree_insert(&vgpu->page_track_tree, gfn, track); in intel_vgpu_register_page_track()
82 void intel_vgpu_unregister_page_track(struct intel_vgpu *vgpu, in intel_vgpu_unregister_page_track() argument
87 track = radix_tree_delete(&vgpu->page_track_tree, gfn); in intel_vgpu_unregister_page_track()
90 intel_gvt_hypervisor_disable_page_track(vgpu, gfn); in intel_vgpu_unregister_page_track()
103 int intel_vgpu_enable_page_track(struct intel_vgpu *vgpu, unsigned long gfn) in intel_vgpu_enable_page_track() argument
108 track = intel_vgpu_find_page_track(vgpu, gfn); in intel_vgpu_enable_page_track()
[all …]
Dscheduler.c83 struct drm_i915_private *dev_priv = workload->vgpu->gvt->gt->i915; in sr_oa_regs()
125 struct intel_vgpu *vgpu = workload->vgpu; in populate_shadow_context() local
126 struct intel_gvt *gvt = vgpu->gvt; in populate_shadow_context()
134 struct intel_vgpu_submission *s = &vgpu->submission; in populate_shadow_context()
148 intel_gvt_hypervisor_read_gpa(vgpu, workload->ring_context_gpa \ in populate_shadow_context()
151 intel_gvt_hypervisor_read_gpa(vgpu, workload->ring_context_gpa \ in populate_shadow_context()
168 intel_gvt_hypervisor_read_gpa(vgpu, in populate_shadow_context()
213 context_gpa = intel_vgpu_gma_to_gpa(vgpu->gtt.ggtt_mm, in populate_shadow_context()
235 intel_gvt_hypervisor_read_gpa(vgpu, gpa_base, dst, gpa_size); in populate_shadow_context()
249 static void save_ring_hw_state(struct intel_vgpu *vgpu, in save_ring_hw_state() argument
[all …]
Ddebugfs.c29 struct intel_vgpu *vgpu; member
66 vreg = vgpu_vreg(param->vgpu, offset); in mmio_diff_handler()
86 struct intel_vgpu *vgpu = s->private; in vgpu_mmio_diff_show() local
87 struct intel_gvt *gvt = vgpu->gvt; in vgpu_mmio_diff_show()
89 .vgpu = vgpu, in vgpu_mmio_diff_show()
129 struct intel_vgpu *vgpu = (struct intel_vgpu *)data; in vgpu_scan_nonprivbb_get() local
131 *val = vgpu->scan_nonprivbb; in vgpu_scan_nonprivbb_get()
144 struct intel_vgpu *vgpu = (struct intel_vgpu *)data; in vgpu_scan_nonprivbb_set() local
146 vgpu->scan_nonprivbb = val; in vgpu_scan_nonprivbb_set()
158 void intel_gvt_debugfs_add_vgpu(struct intel_vgpu *vgpu) in intel_gvt_debugfs_add_vgpu() argument
[all …]
Dfb_decoder.c146 static u32 intel_vgpu_get_stride(struct intel_vgpu *vgpu, int pipe, in intel_vgpu_get_stride() argument
149 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_get_stride()
151 u32 stride_reg = vgpu_vreg_t(vgpu, DSPSTRIDE(pipe)) & stride_mask; in intel_vgpu_get_stride()
182 static int get_active_pipe(struct intel_vgpu *vgpu) in get_active_pipe() argument
187 if (pipe_is_enabled(vgpu, i)) in get_active_pipe()
202 int intel_vgpu_decode_primary_plane(struct intel_vgpu *vgpu, in intel_vgpu_decode_primary_plane() argument
205 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_decode_primary_plane()
209 pipe = get_active_pipe(vgpu); in intel_vgpu_decode_primary_plane()
213 val = vgpu_vreg_t(vgpu, DSPCNTR(pipe)); in intel_vgpu_decode_primary_plane()
247 plane->base = vgpu_vreg_t(vgpu, DSPSURF(pipe)) & I915_GTT_PAGE_MASK; in intel_vgpu_decode_primary_plane()
[all …]
Dopregion.c220 int intel_vgpu_init_opregion(struct intel_vgpu *vgpu) in intel_vgpu_init_opregion() argument
227 gvt_dbg_core("init vgpu%d opregion\n", vgpu->id); in intel_vgpu_init_opregion()
228 vgpu_opregion(vgpu)->va = (void *)__get_free_pages(GFP_KERNEL | in intel_vgpu_init_opregion()
231 if (!vgpu_opregion(vgpu)->va) { in intel_vgpu_init_opregion()
237 buf = (u8 *)vgpu_opregion(vgpu)->va; in intel_vgpu_init_opregion()
258 static int map_vgpu_opregion(struct intel_vgpu *vgpu, bool map) in map_vgpu_opregion() argument
264 mfn = intel_gvt_hypervisor_virt_to_mfn(vgpu_opregion(vgpu)->va in map_vgpu_opregion()
270 ret = intel_gvt_hypervisor_map_gfn_to_mfn(vgpu, in map_vgpu_opregion()
271 vgpu_opregion(vgpu)->gfn[i], in map_vgpu_opregion()
280 vgpu_opregion(vgpu)->mapped = map; in map_vgpu_opregion()
[all …]
Ddisplay.h46 #define intel_vgpu_port(vgpu, port) \ argument
47 (&(vgpu->display.ports[port]))
49 #define intel_vgpu_has_monitor_on_port(vgpu, port) \ argument
50 (intel_vgpu_port(vgpu, port)->edid && \
51 intel_vgpu_port(vgpu, port)->edid->data_valid)
53 #define intel_vgpu_port_is_dp(vgpu, port) \ argument
54 ((intel_vgpu_port(vgpu, port)->type == GVT_DP_A) || \
55 (intel_vgpu_port(vgpu, port)->type == GVT_DP_B) || \
56 (intel_vgpu_port(vgpu, port)->type == GVT_DP_C) || \
57 (intel_vgpu_port(vgpu, port)->type == GVT_DP_D))
[all …]
Dgtt.h54 struct intel_vgpu *vgpu);
60 struct intel_vgpu *vgpu);
144 struct intel_vgpu *vgpu; member
175 struct intel_vgpu_mm *intel_vgpu_create_ppgtt_mm(struct intel_vgpu *vgpu,
212 int intel_vgpu_init_gtt(struct intel_vgpu *vgpu);
213 void intel_vgpu_clean_gtt(struct intel_vgpu *vgpu);
214 void intel_vgpu_reset_ggtt(struct intel_vgpu *vgpu, bool invalidate_old);
215 void intel_vgpu_invalidate_ppgtt(struct intel_vgpu *vgpu);
220 struct intel_vgpu_mm *intel_gvt_find_ppgtt_mm(struct intel_vgpu *vgpu,
237 struct intel_vgpu *vgpu; member
[all …]
Dsched_policy.h43 int (*init_vgpu)(struct intel_vgpu *vgpu);
44 void (*clean_vgpu)(struct intel_vgpu *vgpu);
45 void (*start_schedule)(struct intel_vgpu *vgpu);
46 void (*stop_schedule)(struct intel_vgpu *vgpu);
55 int intel_vgpu_init_sched_policy(struct intel_vgpu *vgpu);
57 void intel_vgpu_clean_sched_policy(struct intel_vgpu *vgpu);
59 void intel_vgpu_start_schedule(struct intel_vgpu *vgpu);
61 void intel_vgpu_stop_schedule(struct intel_vgpu *vgpu);
Dmmio.h83 int intel_vgpu_init_mmio(struct intel_vgpu *vgpu);
84 void intel_vgpu_reset_mmio(struct intel_vgpu *vgpu, bool dmlr);
85 void intel_vgpu_clean_mmio(struct intel_vgpu *vgpu);
87 int intel_vgpu_gpa_to_mmio_offset(struct intel_vgpu *vgpu, u64 gpa);
89 int intel_vgpu_emulate_mmio_read(struct intel_vgpu *vgpu, u64 pa,
91 int intel_vgpu_emulate_mmio_write(struct intel_vgpu *vgpu, u64 pa,
94 int intel_vgpu_default_mmio_read(struct intel_vgpu *vgpu, unsigned int offset,
96 int intel_vgpu_default_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
102 int intel_vgpu_mmio_reg_rw(struct intel_vgpu *vgpu, unsigned int offset,
105 int intel_vgpu_mask_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
Dscheduler.h81 struct intel_vgpu *vgpu; member
131 #define workload_q_head(vgpu, e) \ argument
132 (&(vgpu)->submission.workload_q_head[(e)->id])
140 void intel_gvt_wait_vgpu_idle(struct intel_vgpu *vgpu);
142 int intel_vgpu_setup_submission(struct intel_vgpu *vgpu);
144 void intel_vgpu_reset_submission(struct intel_vgpu *vgpu,
147 void intel_vgpu_clean_submission(struct intel_vgpu *vgpu);
149 int intel_vgpu_select_submission_ops(struct intel_vgpu *vgpu,
157 intel_vgpu_create_workload(struct intel_vgpu *vgpu,
163 void intel_vgpu_clean_workloads(struct intel_vgpu *vgpu,

12