Home
last modified time | relevance | path

Searched refs:ring_id (Results 1 – 25 of 75) sorted by relevance

123

/drivers/net/wireless/ath/ath11k/
Ddp_tx.c120 ti.ring_id = 0; in ath11k_dp_tx()
122 ti.ring_id = ring_selector % DP_TCL_NUM_RING_MAX; in ath11k_dp_tx()
124 ring_map |= BIT(ti.ring_id); in ath11k_dp_tx()
126 tx_ring = &dp->tx_ring[ti.ring_id]; in ath11k_dp_tx()
220 hal_ring_id = tx_ring->tcl_data_ring.ring_id; in ath11k_dp_tx()
233 ab->soc_stats.tx_err.desc_na[ti.ring_id]++; in ath11k_dp_tx()
256 ath11k_dp_shadow_start_timer(ab, tcl_ring, &dp->tx_ring_timer[ti.ring_id]); in ath11k_dp_tx()
527 void ath11k_dp_tx_completion_handler(struct ath11k_base *ab, int ring_id) in ath11k_dp_tx_completion_handler() argument
531 int hal_ring_id = dp->tx_ring[ring_id].tcl_comp_ring.ring_id; in ath11k_dp_tx_completion_handler()
535 struct dp_tx_ring *tx_ring = &dp->tx_ring[ring_id]; in ath11k_dp_tx_completion_handler()
[all …]
Dhal.c356 val = FIELD_PREP(HAL_REO1_RING_ID_RING_ID, srng->ring_id) | in ath11k_hal_srng_dst_hw_init()
463 if (srng->ring_id != HAL_SRNG_RING_ID_WBM_IDLE_LINK) { in ath11k_hal_srng_src_hw_init()
512 int ring_id; in ath11k_hal_srng_get_ring_id() local
519 ring_id = srng_config->start_ring_id + ring_num; in ath11k_hal_srng_get_ring_id()
521 ring_id += mac_id * HAL_SRNG_RINGS_PER_LMAC; in ath11k_hal_srng_get_ring_id()
523 if (WARN_ON(ring_id >= HAL_SRNG_RING_ID_MAX)) in ath11k_hal_srng_get_ring_id()
526 return ring_id; in ath11k_hal_srng_get_ring_id()
968 int ring_id; in ath11k_hal_srng_setup() local
973 ring_id = ath11k_hal_srng_get_ring_id(ab, type, ring_num, mac_id); in ath11k_hal_srng_setup()
974 if (ring_id < 0) in ath11k_hal_srng_setup()
[all …]
Ddp_rx.c294 srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id]; in ath11k_dp_rxbufs_replenish()
792 srng = &ab->hal.srng_list[dp->wbm_desc_rel_ring.ring_id]; in ath11k_dp_rx_link_desc_return()
1601 u8 pdev_id, ring_type, ring_id, pdev_idx; in ath11k_htt_backpressure_event_handler() local
1608 ring_id = FIELD_GET(HTT_BACKPRESSURE_EVENT_RING_ID_M, *data); in ath11k_htt_backpressure_event_handler()
1618 pdev_id, ring_type, ring_id, hp, tp, backpressure_time); in ath11k_htt_backpressure_event_handler()
1621 if (ring_id >= HTT_SW_UMAC_RING_IDX_MAX) in ath11k_htt_backpressure_event_handler()
1624 bp_stats = &ab->soc_stats.bp_stats.umac_ring_bp_stats[ring_id]; in ath11k_htt_backpressure_event_handler()
1628 if (ring_id >= HTT_SW_LMAC_RING_IDX_MAX || pdev_idx >= MAX_RADIOS) in ath11k_htt_backpressure_event_handler()
1631 bp_stats = &ab->soc_stats.bp_stats.lmac_ring_bp_stats[ring_id][pdev_idx]; in ath11k_htt_backpressure_event_handler()
2493 int *quota, int ring_id) in ath11k_dp_rx_process_received_packets() argument
[all …]
Ddp_tx.h21 void ath11k_dp_tx_completion_handler(struct ath11k_base *ab, int ring_id);
35 int ath11k_dp_tx_htt_rx_filter_setup(struct ath11k_base *ab, u32 ring_id,
Ddp.c302 ring->ring_id = ret; in ath11k_dp_srng_setup()
389 srng = &ab->hal.srng_list[dp->tx_ring[i].tcl_data_ring.ring_id]; in ath11k_dp_srng_common_setup()
394 dp->tx_ring[i].tcl_data_ring.ring_id); in ath11k_dp_srng_common_setup()
427 srng = &ab->hal.srng_list[dp->reo_cmd_ring.ring_id]; in ath11k_dp_srng_common_setup()
432 dp->reo_cmd_ring.ring_id); in ath11k_dp_srng_common_setup()
1046 srng = &ab->hal.srng_list[dp->wbm_idle_ring.ring_id]; in ath11k_dp_alloc()
1097 struct hal_srng *srng = &ab->hal.srng_list[update_timer->ring_id]; in ath11k_dp_shadow_timer_handler()
1152 u32 interval, u32 ring_id) in ath11k_dp_shadow_init_timer() argument
1160 update_timer->ring_id = ring_id; in ath11k_dp_shadow_init_timer()
Ddbring.c21 srng = &ab->hal.srng_list[ring->refill_srng.ring_id]; in ath11k_dbring_bufs_replenish()
82 srng = &ar->ab->hal.srng_list[ring->refill_srng.ring_id]; in ath11k_dbring_fill_bufs()
167 srng = &ab->hal.srng_list[ring->refill_srng.ring_id]; in ath11k_dbring_buf_setup()
283 srng = &ab->hal.srng_list[ring->refill_srng.ring_id]; in ath11k_dbring_buffer_release_event()
Ddebugfs.c606 u32 enable, rx_filter = 0, ring_id; in ath11k_write_extd_rx_stats() local
651 ring_id = ar->dp.rx_mon_status_refill_ring[i].refill_buf_ring.ring_id; in ath11k_write_extd_rx_stats()
652 ret = ath11k_dp_tx_htt_rx_filter_setup(ar->ab, ring_id, ar->dp.mac_id, in ath11k_write_extd_rx_stats()
934 u32 rx_filter = 0, ring_id, filter, mode; in ath11k_write_pktlog_filter() local
1018 ring_id = ar->dp.rx_mon_status_refill_ring[i].refill_buf_ring.ring_id; in ath11k_write_pktlog_filter()
1019 ret = ath11k_dp_tx_htt_rx_filter_setup(ab, ring_id, in ath11k_write_pktlog_filter()
/drivers/gpu/drm/amd/amdkfd/
Dcik_event_interrupt.c56 tmp_ihre->ring_id &= 0x000000ff; in cik_event_interrupt_isr()
57 tmp_ihre->ring_id |= vmid << 8; in cik_event_interrupt_isr()
58 tmp_ihre->ring_id |= pasid << 16; in cik_event_interrupt_isr()
66 vmid = (ihre->ring_id & 0x0000ff00) >> 8; in cik_event_interrupt_isr()
72 pasid = (ihre->ring_id & 0xffff0000) >> 16; in cik_event_interrupt_isr()
93 unsigned int vmid = (ihre->ring_id & 0x0000ff00) >> 8; in cik_event_interrupt_wq()
94 u32 pasid = (ihre->ring_id & 0xffff0000) >> 16; in cik_event_interrupt_wq()
Dkfd_int_process_v9.c111 uint16_t ring_id = SOC15_RING_ID_FROM_IH_ENTRY(ih_ring_entry); in event_interrupt_wq_v9() local
117 info.prot_valid = ring_id & 0x08; in event_interrupt_wq_v9()
118 info.prot_read = ring_id & 0x10; in event_interrupt_wq_v9()
119 info.prot_write = ring_id & 0x20; in event_interrupt_wq_v9()
Dcik_int.h31 uint32_t ring_id; member
/drivers/net/wireless/ath/wil6210/
Dtxrx_edma.c108 int ring_id = wil_find_free_sring(wil); in wil_tx_init_edma() local
120 status_ring_size, ring_id); in wil_tx_init_edma()
122 if (ring_id < 0) in wil_tx_init_edma()
123 return ring_id; in wil_tx_init_edma()
128 sring = &wil->srings[ring_id]; in wil_tx_init_edma()
137 rc = wil_wmi_tx_sring_cfg(wil, ring_id); in wil_tx_init_edma()
142 wil->tx_sring_idx = ring_id; in wil_tx_init_edma()
344 u16 ring_id) in wil_init_rx_sring() argument
346 struct wil_status_ring *sring = &wil->srings[ring_id]; in wil_init_rx_sring()
350 status_ring_size, ring_id); in wil_init_rx_sring()
[all …]
Dtrace.h260 __field(u8, ring_id)
268 __entry->ring_id = msg->ring_id;
274 __entry->ring_id, __entry->index, __entry->len,
Dwmi.c2793 .ring_id = ringid, in wmi_addba()
2808 .ring_id = ringid, in wmi_delba_tx()
3717 int wil_wmi_tx_sring_cfg(struct wil6210_priv *wil, int ring_id) in wil_wmi_tx_sring_cfg() argument
3721 struct wil_status_ring *sring = &wil->srings[ring_id]; in wil_wmi_tx_sring_cfg()
3735 cmd.ring_cfg.ring_id = ring_id; in wil_wmi_tx_sring_cfg()
3793 int wil_wmi_rx_sring_add(struct wil6210_priv *wil, u16 ring_id) in wil_wmi_rx_sring_add() argument
3797 struct wil_status_ring *sring = &wil->srings[ring_id]; in wil_wmi_rx_sring_add()
3802 .ring_id = ring_id, in wil_wmi_rx_sring_add()
3845 .ring_id = WIL_RX_DESC_RING_ID, in wil_wmi_rx_desc_ring_add()
3878 int wil_wmi_tx_desc_ring_add(struct wil6210_vif *vif, int ring_id, int cid, in wil_wmi_tx_desc_ring_add() argument
[all …]
Dtxrx.c1236 static int wil_tx_vring_modify(struct wil6210_vif *vif, int ring_id, int cid, in wil_tx_vring_modify() argument
1249 .ringid = ring_id, in wil_tx_vring_modify()
1267 struct wil_ring *vring = &wil->ring_tx[ring_id]; in wil_tx_vring_modify()
1268 struct wil_ring_tx_data *txdata = &wil->ring_tx_data[ring_id]; in wil_tx_vring_modify()
1270 wil_dbg_misc(wil, "vring_modify: ring %d cid %d tid %d\n", ring_id, in wil_tx_vring_modify()
1275 wil_err(wil, "Tx ring [%d] not allocated\n", ring_id); in wil_tx_vring_modify()
1279 if (wil->ring2cid_tid[ring_id][0] != cid || in wil_tx_vring_modify()
1280 wil->ring2cid_tid[ring_id][1] != tid) { in wil_tx_vring_modify()
1282 wil->ring2cid_tid[ring_id][0], in wil_tx_vring_modify()
1283 wil->ring2cid_tid[ring_id][1]); in wil_tx_vring_modify()
[all …]
/drivers/gpu/drm/i915/gvt/
Dtrace.h116 TP_PROTO(int id, char *type, int ring_id, int root_entry_type,
119 TP_ARGS(id, type, ring_id, root_entry_type, gma, gpa),
128 id, type, ring_id, root_entry_type, gma, gpa);
229 TP_PROTO(u8 vgpu_id, u8 ring_id, u32 ip_gma, u32 *cmd_va,
233 TP_ARGS(vgpu_id, ring_id, ip_gma, cmd_va, cmd_len, buf_type,
238 __field(u8, ring_id)
250 __entry->ring_id = ring_id;
263 __entry->ring_id,
Dmmio_context.c167 int ring_id, i; in load_render_mocs() local
173 for (ring_id = 0; ring_id < cnt; ring_id++) { in load_render_mocs()
174 if (!HAS_ENGINE(engine->gt, ring_id)) in load_render_mocs()
177 offset.reg = regs[ring_id]; in load_render_mocs()
179 gen9_render_mocs.control_table[ring_id][i] = in load_render_mocs()
202 int ring_id = req->engine->id; in restore_context_mmio_for_inhibit() local
203 int count = gvt->engine_mmio_list.ctx_mmio_count[ring_id]; in restore_context_mmio_for_inhibit()
219 if (mmio->id != ring_id || !mmio->in_context) in restore_context_mmio_for_inhibit()
225 *(cs-2), *(cs-1), vgpu->id, ring_id); in restore_context_mmio_for_inhibit()
Dscheduler.c137 int ring_id = workload->engine->id; in populate_shadow_context() local
188 if (s->last_ctx[ring_id].valid && in populate_shadow_context()
189 (s->last_ctx[ring_id].lrca == in populate_shadow_context()
191 (s->last_ctx[ring_id].ring_context_gpa == in populate_shadow_context()
195 s->last_ctx[ring_id].lrca = workload->ctx_desc.lrca; in populate_shadow_context()
196 s->last_ctx[ring_id].ring_context_gpa = workload->ring_context_gpa; in populate_shadow_context()
201 s->last_ctx[ring_id].valid = false; in populate_shadow_context()
240 s->last_ctx[ring_id].valid = true; in populate_shadow_context()
275 enum intel_engine_id ring_id = rq->engine->id; in shadow_context_status_change() local
282 scheduler->engine_owner[ring_id]) { in shadow_context_status_change()
[all …]
Dinterrupt.h232 int gvt_ring_id_to_pipe_control_notify_event(int ring_id);
233 int gvt_ring_id_to_mi_flush_dw_event(int ring_id);
234 int gvt_ring_id_to_mi_user_interrupt_event(int ring_id);
/drivers/soc/ti/
Dk3-ringacc.c159 u32 ring_id; member
268 dev_dbg(dev, "dump ring: %d\n", ring->ring_id); in k3_ringacc_ring_dump()
375 ring->ring_id, in k3_ringacc_ring_reset_sci()
384 ret, ring->ring_id); in k3_ringacc_ring_reset_sci()
408 ring->ring_id, in k3_ringacc_ring_reconfig_qmode_sci()
417 ret, ring->ring_id); in k3_ringacc_ring_reconfig_qmode_sci()
435 ring->ring_id, occ); in k3_ringacc_ring_reset_dma()
488 ring->ring_id, in k3_ringacc_ring_free_sci()
497 ret, ring->ring_id); in k3_ringacc_ring_free_sci()
511 if (!test_bit(ring->ring_id, ringacc->rings_inuse)) in k3_ringacc_ring_free()
[all …]
/drivers/crypto/inside-secure/
Dsafexcel_ring.c180 int ring_id, in safexcel_add_cdesc() argument
189 cdesc = safexcel_ring_next_cwptr(priv, &priv->ring[ring_id].cdr, in safexcel_add_cdesc()
225 int ring_id, in safexcel_add_rdesc() argument
232 rdesc = safexcel_ring_next_rwptr(priv, &priv->ring[ring_id].rdr, in safexcel_add_rdesc()
/drivers/net/ethernet/intel/i40e/
Di40e_debugfs.c541 static void i40e_dbg_dump_desc(int cnt, int vsi_seid, int ring_id, int desc_n, in i40e_dbg_dump_desc() argument
568 if (ring_id >= vsi->num_queue_pairs || ring_id < 0) { in i40e_dbg_dump_desc()
569 dev_info(&pf->pdev->dev, "ring %d not found\n", ring_id); in i40e_dbg_dump_desc()
581 ring = kmemdup(vsi->rx_rings[ring_id], sizeof(*ring), GFP_KERNEL); in i40e_dbg_dump_desc()
584 ring = kmemdup(vsi->tx_rings[ring_id], sizeof(*ring), GFP_KERNEL); in i40e_dbg_dump_desc()
587 ring = kmemdup(vsi->xdp_rings[ring_id], sizeof(*ring), GFP_KERNEL); in i40e_dbg_dump_desc()
599 dev_info(&pf->pdev->dev, "VSI = %02i Rx ring = %02i\n", vsi_seid, ring_id); in i40e_dbg_dump_desc()
602 dev_info(&pf->pdev->dev, "VSI = %02i Tx ring = %02i\n", vsi_seid, ring_id); in i40e_dbg_dump_desc()
605 dev_info(&pf->pdev->dev, "VSI = %02i XDP ring = %02i\n", vsi_seid, ring_id); in i40e_dbg_dump_desc()
633 vsi_seid, ring_id, desc_n, in i40e_dbg_dump_desc()
[all …]
/drivers/crypto/qat/qat_common/
Dadf_transport_debug.c146 int ring_id = *((int *)v) - 1; in adf_bank_show() local
147 struct adf_etr_ring_data *ring = &bank->rings[ring_id]; in adf_bank_show()
151 if (!(bank->ring_mask & 1 << ring_id)) in adf_bank_show()
/drivers/net/ethernet/apm/xgene/
Dxgene_enet_ring2.c120 u32 ring_id; in xgene_enet_clr_desc_ring_id() local
122 ring_id = ring->id | OVERWRITE; in xgene_enet_clr_desc_ring_id()
123 xgene_enet_ring_wr32(ring, CSR_RING_ID, ring_id); in xgene_enet_clr_desc_ring_id()
Dxgene_enet_main.c1216 enum xgene_enet_ring_cfgsize cfgsize, u32 ring_id) in xgene_enet_create_desc_ring() argument
1236 ring->id = ring_id; in xgene_enet_create_desc_ring()
1317 u16 ring_id, slots; in xgene_enet_create_desc_rings() local
1327 ring_id = xgene_enet_get_ring_id(RING_OWNER_CPU, cpu_bufnum++); in xgene_enet_create_desc_rings()
1330 ring_id); in xgene_enet_create_desc_rings()
1338 ring_id = xgene_enet_get_ring_id(owner, bp_bufnum++); in xgene_enet_create_desc_rings()
1341 ring_id); in xgene_enet_create_desc_rings()
1369 ring_id = xgene_enet_get_ring_id(owner, bp_bufnum++); in xgene_enet_create_desc_rings()
1372 ring_id); in xgene_enet_create_desc_rings()
1402 ring_id = xgene_enet_get_ring_id(owner, eth_bufnum++); in xgene_enet_create_desc_rings()
[all …]
/drivers/infiniband/hw/bnxt_re/
Dmain.c441 req.ring_id = cpu_to_le16(fw_ring_id); in bnxt_re_net_ring_free()
447 req.ring_id, rc); in bnxt_re_net_ring_free()
483 *fw_ring_id = le16_to_cpu(resp.ring_id); in bnxt_re_net_ring_alloc()
997 bnxt_re_net_ring_free(rdev, rdev->nq[i].ring_id, type); in bnxt_re_free_nq_res()
1061 rc = bnxt_re_net_ring_alloc(rdev, &rattr, &nq->ring_id); in bnxt_re_alloc_res()
1075 bnxt_re_net_ring_free(rdev, rdev->nq[i].ring_id, type); in bnxt_re_alloc_res()
1371 bnxt_re_net_ring_free(rdev, rdev->rcfw.creq.ring_id, type); in bnxt_re_dev_uninit()
1459 rc = bnxt_re_net_ring_alloc(rdev, &rattr, &creq->ring_id); in bnxt_re_dev_init()
1544 bnxt_re_net_ring_free(rdev, rdev->rcfw.creq.ring_id, type); in bnxt_re_dev_init()

123