Home
last modified time | relevance | path

Searched refs:ring_id (Results 1 – 25 of 49) sorted by relevance

12

/drivers/gpu/drm/i915/gvt/
Dscheduler.c59 int ring_id = workload->ring_id; in populate_shadow_context() local
62 shadow_ctx->engine[ring_id].state->obj; in populate_shadow_context()
69 gvt_dbg_sched("ring id %d workload lrca %x", ring_id, in populate_shadow_context()
72 context_page_num = gvt->dev_priv->engine[ring_id]->context_size; in populate_shadow_context()
76 if (IS_BROADWELL(gvt->dev_priv) && ring_id == RCS) in populate_shadow_context()
108 if (ring_id == RCS) { in populate_shadow_context()
141 enum intel_engine_id ring_id = req->engine->id; in shadow_context_status_change() local
147 scheduler->engine_owner[ring_id]) { in shadow_context_status_change()
149 intel_gvt_switch_mmio(scheduler->engine_owner[ring_id], in shadow_context_status_change()
150 NULL, ring_id); in shadow_context_status_change()
[all …]
Drender.c41 int ring_id; member
147 static void handle_tlb_pending_event(struct intel_vgpu *vgpu, int ring_id) in handle_tlb_pending_event() argument
160 if (WARN_ON(ring_id >= ARRAY_SIZE(regs))) in handle_tlb_pending_event()
163 if (!test_and_clear_bit(ring_id, (void *)vgpu->tlb_handle_pending)) in handle_tlb_pending_event()
166 reg = _MMIO(regs[ring_id]); in handle_tlb_pending_event()
175 if (ring_id == RCS && (IS_SKYLAKE(dev_priv) || IS_KABYLAKE(dev_priv))) in handle_tlb_pending_event()
183 gvt_vgpu_err("timeout in invalidate ring (%d) tlb\n", ring_id); in handle_tlb_pending_event()
185 vgpu_vreg(vgpu, regs[ring_id]) = 0; in handle_tlb_pending_event()
189 gvt_dbg_core("invalidate TLB for ring %d\n", ring_id); in handle_tlb_pending_event()
192 static void load_mocs(struct intel_vgpu *vgpu, int ring_id) in load_mocs() argument
[all …]
Dexeclist.c42 #define execlist_ring_mmio(gvt, ring_id, offset) \ argument
43 (gvt->dev_priv->engine[ring_id]->mmio_base + (offset))
59 static int ring_id_to_context_switch_event(int ring_id) in ring_id_to_context_switch_event() argument
61 if (WARN_ON(ring_id < RCS || in ring_id_to_context_switch_event()
62 ring_id >= ARRAY_SIZE(context_switch_events))) in ring_id_to_context_switch_event()
65 return context_switch_events[ring_id]; in ring_id_to_context_switch_event()
99 int ring_id = execlist->ring_id; in emulate_execlist_status() local
101 ring_id, _EL_OFFSET_STATUS); in emulate_execlist_status()
134 int ring_id = execlist->ring_id; in emulate_csb_update() local
139 ctx_status_ptr_reg = execlist_ring_mmio(vgpu->gvt, ring_id, in emulate_csb_update()
[all …]
Dscheduler.h82 int ring_id; member
124 #define workload_q_head(vgpu, ring_id) \ argument
125 (&(vgpu->workload_q_head[ring_id]))
129 workload_q_head(workload->vgpu, workload->ring_id)); \
131 scheduler.waitq[workload->ring_id]); \
Dtrace.h116 TP_PROTO(int id, char *type, int ring_id, int pt_level,
119 TP_ARGS(id, type, ring_id, pt_level, gma, gpa),
128 id, type, ring_id, pt_level, gma, gpa);
228 TP_PROTO(u8 vgpu_id, u8 ring_id, u32 ip_gma, u32 *cmd_va, u32 cmd_len,
231 TP_ARGS(vgpu_id, ring_id, ip_gma, cmd_va, cmd_len, buf_type),
235 __field(u8, ring_id)
244 __entry->ring_id = ring_id;
254 __entry->ring_id,
Dsched_policy.c379 int ring_id; in intel_vgpu_stop_schedule() local
395 for (ring_id = 0; ring_id < I915_NUM_ENGINES; ring_id++) { in intel_vgpu_stop_schedule()
396 if (scheduler->engine_owner[ring_id] == vgpu) { in intel_vgpu_stop_schedule()
397 intel_gvt_switch_mmio(vgpu, NULL, ring_id); in intel_vgpu_stop_schedule()
398 scheduler->engine_owner[ring_id] = NULL; in intel_vgpu_stop_schedule()
Dcmd_parser.c443 int ring_id; member
615 static inline u32 get_opcode(u32 cmd, int ring_id) in get_opcode() argument
619 d_info = ring_decode_info[ring_id][CMD_TYPE(cmd)]; in get_opcode()
627 unsigned int opcode, int ring_id) in find_cmd_entry() argument
633 (e->info->rings & (1 << ring_id))) in find_cmd_entry()
640 u32 cmd, int ring_id) in get_cmd_info() argument
644 opcode = get_opcode(cmd, ring_id); in get_cmd_info()
648 return find_cmd_entry(gvt, opcode, ring_id); in get_cmd_info()
656 static inline void print_opcode(u32 cmd, int ring_id) in print_opcode() argument
661 d_info = ring_decode_info[ring_id][CMD_TYPE(cmd)]; in print_opcode()
[all …]
Dinterrupt.h229 int gvt_ring_id_to_pipe_control_notify_event(int ring_id);
230 int gvt_ring_id_to_mi_flush_dw_event(int ring_id);
231 int gvt_ring_id_to_mi_user_interrupt_event(int ring_id);
Dexeclist.h174 int ring_id; member
183 int intel_vgpu_submit_execlist(struct intel_vgpu *vgpu, int ring_id);
Drender.h40 struct intel_vgpu *next, int ring_id);
/drivers/crypto/inside-secure/
Dsafexcel_ring.c95 int ring_id, in safexcel_add_cdesc() argument
103 cdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].cdr); in safexcel_add_cdesc()
138 int ring_id, in safexcel_add_rdesc() argument
144 rdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].rdr); in safexcel_add_rdesc()
Dsafexcel.h555 int ring_id,
561 int ring_id,
/drivers/gpu/drm/amd/amdkfd/
Dcik_event_interrupt.c34 pasid = (ihre->ring_id & 0xffff0000) >> 16; in cik_event_interrupt_isr()
50 pasid = (ihre->ring_id & 0xffff0000) >> 16; in cik_event_interrupt_wq()
Dcik_int.h31 uint32_t ring_id; member
/drivers/net/ethernet/apm/xgene/
Dxgene_enet_ring2.c132 u32 ring_id; in xgene_enet_clr_desc_ring_id() local
134 ring_id = ring->id | OVERWRITE; in xgene_enet_clr_desc_ring_id()
135 xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id); in xgene_enet_clr_desc_ring_id()
Dxgene_enet_main.c1223 enum xgene_enet_ring_cfgsize cfgsize, u32 ring_id) in xgene_enet_create_desc_ring() argument
1243 ring->id = ring_id; in xgene_enet_create_desc_ring()
1324 u16 ring_id, slots; in xgene_enet_create_desc_rings() local
1334 ring_id = xgene_enet_get_ring_id(RING_OWNER_CPU, cpu_bufnum++); in xgene_enet_create_desc_rings()
1337 ring_id); in xgene_enet_create_desc_rings()
1345 ring_id = xgene_enet_get_ring_id(owner, bp_bufnum++); in xgene_enet_create_desc_rings()
1348 ring_id); in xgene_enet_create_desc_rings()
1376 ring_id = xgene_enet_get_ring_id(owner, bp_bufnum++); in xgene_enet_create_desc_rings()
1379 ring_id); in xgene_enet_create_desc_rings()
1409 ring_id = xgene_enet_get_ring_id(owner, eth_bufnum++); in xgene_enet_create_desc_rings()
[all …]
Dxgene_enet_hw.c139 u32 ring_id; in xgene_enet_clr_desc_ring_id() local
141 ring_id = ring->id | OVERWRITE; in xgene_enet_clr_desc_ring_id()
142 xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id); in xgene_enet_clr_desc_ring_id()
/drivers/crypto/qat/qat_common/
Dadf_transport_debug.c212 int ring_id = *((int *)v) - 1; in adf_bank_show() local
213 struct adf_etr_ring_data *ring = &bank->rings[ring_id]; in adf_bank_show()
217 if (!(bank->ring_mask & 1 << ring_id)) in adf_bank_show()
/drivers/net/ethernet/intel/i40e/
Di40e_debugfs.c546 static void i40e_dbg_dump_desc(int cnt, int vsi_seid, int ring_id, int desc_n, in i40e_dbg_dump_desc() argument
560 if (ring_id >= vsi->num_queue_pairs || ring_id < 0) { in i40e_dbg_dump_desc()
561 dev_info(&pf->pdev->dev, "ring %d not found\n", ring_id); in i40e_dbg_dump_desc()
572 ? vsi->rx_rings[ring_id] : vsi->tx_rings[ring_id], in i40e_dbg_dump_desc()
579 vsi_seid, is_rx_ring ? "rx" : "tx", ring_id); in i40e_dbg_dump_desc()
606 vsi_seid, ring_id, desc_n, in i40e_dbg_dump_desc()
612 vsi_seid, ring_id, desc_n, in i40e_dbg_dump_desc()
971 int ring_id, desc_n; in i40e_dbg_command_write() local
974 &vsi_seid, &ring_id, &desc_n); in i40e_dbg_command_write()
975 i40e_dbg_dump_desc(cnt, vsi_seid, ring_id, in i40e_dbg_command_write()
[all …]
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_trace.h118 __field(unsigned, ring_id)
129 __entry->ring_id = iv->ring_id;
142 __entry->ring_id, __entry->vm_id,
Damdgpu_ih.h97 unsigned ring_id; member
Dgfx_v9_0.c1206 static int gfx_v9_0_compute_ring_init(struct amdgpu_device *adev, int ring_id, in gfx_v9_0_compute_ring_init() argument
1211 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; in gfx_v9_0_compute_ring_init()
1213 ring = &adev->gfx.compute_ring[ring_id]; in gfx_v9_0_compute_ring_init()
1222 ring->doorbell_index = (AMDGPU_DOORBELL_MEC_RING0 + ring_id) << 1; in gfx_v9_0_compute_ring_init()
1224 + (ring_id * GFX9_MEC_HPD_SIZE); in gfx_v9_0_compute_ring_init()
1243 int i, j, k, r, ring_id; in gfx_v9_0_sw_init() local
1319 ring_id = 0; in gfx_v9_0_sw_init()
1327 ring_id, in gfx_v9_0_sw_init()
1332 ring_id++; in gfx_v9_0_sw_init()
3990 me_id = (entry->ring_id & 0x0c) >> 2; in gfx_v9_0_eop_irq()
[all …]
Dgfx_v7_0.c3064 static int gfx_v7_0_compute_queue_init(struct amdgpu_device *adev, int ring_id) in gfx_v7_0_compute_queue_init() argument
3069 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; in gfx_v7_0_compute_queue_init()
4485 static int gfx_v7_0_compute_ring_init(struct amdgpu_device *adev, int ring_id, in gfx_v7_0_compute_ring_init() argument
4490 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; in gfx_v7_0_compute_ring_init()
4499 ring->doorbell_index = AMDGPU_DOORBELL_MEC_RING0 + ring_id; in gfx_v7_0_compute_ring_init()
4520 int i, j, k, r, ring_id; in gfx_v7_0_sw_init() local
4586 ring_id = 0; in gfx_v7_0_sw_init()
4594 ring_id, in gfx_v7_0_sw_init()
4599 ring_id++; in gfx_v7_0_sw_init()
4974 me_id = (entry->ring_id & 0x0c) >> 2; in gfx_v7_0_eop_irq()
[all …]
/drivers/infiniband/hw/bnxt_re/
Dmain.c235 req.ring_id = cpu_to_le16(fw_ring_id); in bnxt_re_net_ring_free()
241 "Failed to free HW ring:%d :%#x", req.ring_id, rc); in bnxt_re_net_ring_free()
280 *fw_ring_id = le16_to_cpu(resp.ring_id); in bnxt_re_net_ring_alloc()
699 bnxt_re_net_ring_free(rdev, rdev->nq[i].ring_id, lock_wait); in bnxt_re_free_nq_res()
755 &rdev->nq[i].ring_id); in bnxt_re_alloc_res()
/drivers/net/wireless/broadcom/brcm80211/brcmfmac/
Dpcie.c993 brcmf_pcie_alloc_dma_and_ring(struct brcmf_pciedev_info *devinfo, u32 ring_id, in brcmf_pcie_alloc_dma_and_ring() argument
1002 size = brcmf_ring_max_item[ring_id] * brcmf_ring_itemsize[ring_id]; in brcmf_pcie_alloc_dma_and_ring()
1010 brcmf_pcie_write_tcm16(devinfo, addr, brcmf_ring_max_item[ring_id]); in brcmf_pcie_alloc_dma_and_ring()
1012 brcmf_pcie_write_tcm16(devinfo, addr, brcmf_ring_itemsize[ring_id]); in brcmf_pcie_alloc_dma_and_ring()
1020 brcmf_commonring_config(&ring->commonring, brcmf_ring_max_item[ring_id], in brcmf_pcie_alloc_dma_and_ring()
1021 brcmf_ring_itemsize[ring_id], dma_buf); in brcmf_pcie_alloc_dma_and_ring()

12