Home
last modified time | relevance | path

Searched refs:tx_rings (Results 1 – 25 of 39) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/net/ethernet/aquantia/atlantic/
Daq_vec.c23 unsigned int tx_rings; member
47 self->tx_rings > i; ++i, ring = self->ring[i]) { in aq_vec_poll()
124 self->tx_rings = 0; in aq_vec_alloc()
152 ++self->tx_rings; in aq_vec_ring_alloc()
186 self->tx_rings > i; ++i, ring = self->ring[i]) { in aq_vec_init()
228 self->tx_rings > i; ++i, ring = self->ring[i]) { in aq_vec_start()
252 self->tx_rings > i; ++i, ring = self->ring[i]) { in aq_vec_stop()
272 self->tx_rings > i; ++i, ring = self->ring[i]) { in aq_vec_deinit()
301 self->tx_rings > i; ++i, ring = self->ring[i]) { in aq_vec_ring_free()
307 self->tx_rings = 0; in aq_vec_ring_free()
[all …]
Daq_hw.h66 u8 tx_rings; member
/kernel/linux/linux-5.10/drivers/net/wireless/realtek/rtw88/
Dpci.c173 tx_ring = &rtwpci->tx_rings[i]; in rtw_pci_free_trx_ring()
339 tx_ring = &rtwpci->tx_rings[i]; in rtw_pci_init_trx_ring()
361 tx_ring = &rtwpci->tx_rings[i]; in rtw_pci_init_trx_ring()
415 dma = rtwpci->tx_rings[RTW_TX_QUEUE_BCN].r.dma; in rtw_pci_reset_buf_desc()
419 len = rtwpci->tx_rings[RTW_TX_QUEUE_H2C].r.len; in rtw_pci_reset_buf_desc()
420 dma = rtwpci->tx_rings[RTW_TX_QUEUE_H2C].r.dma; in rtw_pci_reset_buf_desc()
421 rtwpci->tx_rings[RTW_TX_QUEUE_H2C].r.rp = 0; in rtw_pci_reset_buf_desc()
422 rtwpci->tx_rings[RTW_TX_QUEUE_H2C].r.wp = 0; in rtw_pci_reset_buf_desc()
427 len = rtwpci->tx_rings[RTW_TX_QUEUE_BK].r.len; in rtw_pci_reset_buf_desc()
428 dma = rtwpci->tx_rings[RTW_TX_QUEUE_BK].r.dma; in rtw_pci_reset_buf_desc()
[all …]
Dpci.h208 struct rtw_pci_tx_ring tx_rings[RTK_MAX_TX_QUEUE_NUM]; member
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/ice/
Dice_ethtool.c697 tx_ring = test_vsi->tx_rings[0]; in ice_loopback_test()
1370 ring = READ_ONCE(vsi->tx_rings[j]); in ice_get_ethtool_stats()
2690 ring->tx_pending = vsi->tx_rings[0]->count; in ice_get_ringparam()
2702 struct ice_ring *tx_rings = NULL, *rx_rings = NULL; in ice_set_ringparam() local
2731 if (new_tx_cnt == vsi->tx_rings[0]->count && in ice_set_ringparam()
2754 vsi->tx_rings[i]->count = new_tx_cnt; in ice_set_ringparam()
2766 if (new_tx_cnt == vsi->tx_rings[0]->count) in ice_set_ringparam()
2771 vsi->tx_rings[0]->count, new_tx_cnt); in ice_set_ringparam()
2773 tx_rings = kcalloc(vsi->num_txq, sizeof(*tx_rings), GFP_KERNEL); in ice_set_ringparam()
2774 if (!tx_rings) { in ice_set_ringparam()
[all …]
Dice_lib.c76 vsi->tx_rings = devm_kcalloc(dev, vsi->alloc_txq, in ice_vsi_alloc_arrays()
77 sizeof(*vsi->tx_rings), GFP_KERNEL); in ice_vsi_alloc_arrays()
78 if (!vsi->tx_rings) in ice_vsi_alloc_arrays()
122 devm_kfree(dev, vsi->tx_rings); in ice_vsi_alloc_arrays()
301 if (vsi->tx_rings) { in ice_vsi_free_arrays()
302 devm_kfree(dev, vsi->tx_rings); in ice_vsi_free_arrays()
303 vsi->tx_rings = NULL; in ice_vsi_free_arrays()
1223 if (vsi->tx_rings) { in ice_vsi_clear_rings()
1225 if (vsi->tx_rings[i]) { in ice_vsi_clear_rings()
1226 kfree_rcu(vsi->tx_rings[i], rcu); in ice_vsi_clear_rings()
[all …]
Dice_xsk.c24 memset(&vsi->tx_rings[q_idx]->stats, 0, in ice_qp_reset_stats()
25 sizeof(vsi->tx_rings[q_idx]->stats)); in ice_qp_reset_stats()
38 ice_clean_tx_ring(vsi->tx_rings[q_idx]); in ice_qp_clean_rings()
158 tx_ring = vsi->tx_rings[q_idx]; in ice_qp_dis()
222 tx_ring = vsi->tx_rings[q_idx]; in ice_qp_ena()
Dice_dcb_lib.c188 return vsi->tx_rings[queue_index]->dcb_tc; in ice_dcb_get_tc()
204 tx_ring = vsi->tx_rings[i]; in ice_vsi_cfg_dcb_rings()
221 tx_ring = vsi->tx_rings[i]; in ice_vsi_cfg_dcb_rings()
Dice_main.c91 struct ice_ring *tx_ring = vsi->tx_rings[i]; in ice_check_for_hang_subtask()
5016 q_handle = vsi->tx_rings[queue_index]->q_handle; in ice_set_tx_maxrate()
5373 ice_update_vsi_tx_ring_stats(vsi, vsi->tx_rings, vsi->num_txq); in ice_update_vsi_ring_stats()
5692 ice_clean_tx_ring(vsi->tx_rings[i]); in ice_down()
5723 struct ice_ring *ring = vsi->tx_rings[i]; in ice_vsi_setup_tx_rings()
6597 if (vsi->tx_rings[i] && vsi->tx_rings[i]->desc) in ice_tx_timeout()
6598 if (txqueue == vsi->tx_rings[i]->q_index) { in ice_tx_timeout()
6599 tx_ring = vsi->tx_rings[i]; in ice_tx_timeout()
Dice.h247 struct ice_ring **tx_rings; /* Tx ring array */ member
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/i40e/
Di40e_ethtool.c1930 ring->tx_pending = vsi->tx_rings[0]->count; in i40e_get_ringparam()
1949 struct i40e_ring *tx_rings = NULL, *rx_rings = NULL; in i40e_set_ringparam() local
1977 if ((new_tx_count == vsi->tx_rings[0]->count) && in i40e_set_ringparam()
1998 vsi->tx_rings[i]->count = new_tx_count; in i40e_set_ringparam()
2016 if (new_tx_count != vsi->tx_rings[0]->count) { in i40e_set_ringparam()
2019 vsi->tx_rings[0]->count, new_tx_count); in i40e_set_ringparam()
2020 tx_rings = kcalloc(tx_alloc_queue_pairs, in i40e_set_ringparam()
2022 if (!tx_rings) { in i40e_set_ringparam()
2031 tx_rings[i] = *vsi->tx_rings[i]; in i40e_set_ringparam()
2032 tx_rings[i].count = new_tx_count; in i40e_set_ringparam()
[all …]
Di40e_main.c335 if (vsi->tx_rings[i] && vsi->tx_rings[i]->desc) { in i40e_tx_timeout()
337 vsi->tx_rings[i]->queue_index) { in i40e_tx_timeout()
338 tx_ring = vsi->tx_rings[i]; in i40e_tx_timeout()
446 if (!vsi->tx_rings) in i40e_get_netdev_stats_struct()
454 ring = READ_ONCE(vsi->tx_rings[i]); in i40e_get_netdev_stats_struct()
514 memset(&vsi->tx_rings[i]->stats, 0, in i40e_vsi_reset_stats()
515 sizeof(vsi->tx_rings[i]->stats)); in i40e_vsi_reset_stats()
516 memset(&vsi->tx_rings[i]->tx_stats, 0, in i40e_vsi_reset_stats()
517 sizeof(vsi->tx_rings[i]->tx_stats)); in i40e_vsi_reset_stats()
811 p = READ_ONCE(vsi->tx_rings[q]); in i40e_update_vsi_stats()
[all …]
Di40e_debugfs.c291 struct i40e_ring *tx_ring = READ_ONCE(vsi->tx_rings[i]); in i40e_dbg_dump_vsi_seid()
572 if (!vsi->tx_rings || !vsi->tx_rings[0]->desc) { in i40e_dbg_dump_desc()
584 ring = kmemdup(vsi->tx_rings[ring_id], sizeof(*ring), GFP_KERNEL); in i40e_dbg_dump_desc()
/kernel/linux/linux-5.10/drivers/thunderbolt/
Dnhi.c454 if (!nhi->tx_rings[i]) { in nhi_alloc_hop()
472 if (ring->is_tx && nhi->tx_rings[ring->hop]) { in nhi_alloc_hop()
485 nhi->tx_rings[ring->hop] = ring; in nhi_alloc_hop()
706 ring->nhi->tx_rings[ring->hop] = NULL; in tb_ring_free()
828 ring = nhi->tx_rings[hop]; in nhi_interrupt_work()
1026 if (nhi->tx_rings[i]) in nhi_shutdown()
1197 nhi->tx_rings = devm_kcalloc(&pdev->dev, nhi->hop_count, in nhi_probe()
1198 sizeof(*nhi->tx_rings), GFP_KERNEL); in nhi_probe()
1201 if (!nhi->tx_rings || !nhi->rx_rings) in nhi_probe()
/kernel/linux/linux-5.10/drivers/net/ethernet/broadcom/genet/
Dbcmgenet.c982 tx_rings[num].packets), \
984 tx_rings[num].bytes), \
1921 bcmgenet_tx_reclaim(dev, &priv->tx_rings[i]); in bcmgenet_tx_reclaim_all()
1924 bcmgenet_tx_reclaim(dev, &priv->tx_rings[DESC_INDEX]); in bcmgenet_tx_reclaim_all()
2018 ring = &priv->tx_rings[index]; in bcmgenet_xmit()
2566 struct bcmgenet_tx_ring *ring = &priv->tx_rings[index]; in bcmgenet_init_tx_ring()
2686 ring = &priv->tx_rings[i]; in bcmgenet_enable_tx_napi()
2691 ring = &priv->tx_rings[DESC_INDEX]; in bcmgenet_enable_tx_napi()
2702 ring = &priv->tx_rings[i]; in bcmgenet_disable_tx_napi()
2706 ring = &priv->tx_rings[DESC_INDEX]; in bcmgenet_disable_tx_napi()
[all …]
Dbcmgenet.h638 struct bcmgenet_tx_ring tx_rings[DESC_INDEX + 1]; member
/kernel/linux/linux-5.10/drivers/net/ethernet/broadcom/
Dbcmsysport.c457 ring = &priv->tx_rings[q]; in bcm_sysport_update_tx_stats()
523 ring = &priv->tx_rings[i]; in bcm_sysport_get_stats()
651 bcm_sysport_set_tx_coalesce(&priv->tx_rings[i], ec); in bcm_sysport_set_coalesce()
1007 bcm_sysport_tx_reclaim(priv, &priv->tx_rings[q]); in bcm_sysport_tx_reclaim_all()
1161 txr = &priv->tx_rings[ring]; in bcm_sysport_rx_isr()
1193 txr = &priv->tx_rings[ring]; in bcm_sysport_tx_isr()
1318 ring = &priv->tx_rings[queue]; in bcm_sysport_xmit()
1499 struct bcm_sysport_tx_ring *ring = &priv->tx_rings[index]; in bcm_sysport_init_tx_ring()
1590 struct bcm_sysport_tx_ring *ring = &priv->tx_rings[index]; in bcm_sysport_fini_tx_ring()
2364 ring = &priv->tx_rings[q]; in bcm_sysport_map_queues()
[all …]
Dbcmsysport.h746 struct bcm_sysport_tx_ring *tx_rings; member
/kernel/linux/linux-5.10/drivers/net/ethernet/netronome/nfp/
Dnfp_net_common.c995 tx_ring = &dp->tx_rings[qidx]; in nfp_net_tx()
2453 dp->tx_rings = kcalloc(dp->num_tx_rings, sizeof(*dp->tx_rings), in nfp_net_tx_rings_prepare()
2455 if (!dp->tx_rings) in nfp_net_tx_rings_prepare()
2464 nfp_net_tx_ring_init(&dp->tx_rings[r], &nn->r_vecs[r - bias], in nfp_net_tx_rings_prepare()
2467 if (nfp_net_tx_ring_alloc(dp, &dp->tx_rings[r])) in nfp_net_tx_rings_prepare()
2470 if (nfp_net_tx_ring_bufs_alloc(dp, &dp->tx_rings[r])) in nfp_net_tx_rings_prepare()
2478 nfp_net_tx_ring_bufs_free(dp, &dp->tx_rings[r]); in nfp_net_tx_rings_prepare()
2480 nfp_net_tx_ring_free(&dp->tx_rings[r]); in nfp_net_tx_rings_prepare()
2482 kfree(dp->tx_rings); in nfp_net_tx_rings_prepare()
2491 nfp_net_tx_ring_bufs_free(dp, &dp->tx_rings[r]); in nfp_net_tx_rings_free()
[all …]
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/iavf/
Diavf_main.c345 struct iavf_ring *tx_ring = &adapter->tx_rings[t_idx]; in iavf_map_vector_to_txq()
574 adapter->tx_rings[i].tail = hw->hw_addr + IAVF_QTX_TAIL1(i); in iavf_configure_tx()
1095 kfree(adapter->tx_rings); in iavf_free_queues()
1096 adapter->tx_rings = NULL; in iavf_free_queues()
1129 adapter->tx_rings = kcalloc(num_active_queues, in iavf_alloc_queues()
1131 if (!adapter->tx_rings) in iavf_alloc_queues()
1142 tx_ring = &adapter->tx_rings[i]; in iavf_alloc_queues()
2427 if (!adapter->tx_rings) in iavf_free_all_tx_resources()
2431 if (adapter->tx_rings[i].desc) in iavf_free_all_tx_resources()
2432 iavf_free_tx_resources(&adapter->tx_rings[i]); in iavf_free_all_tx_resources()
[all …]
Diavf_ethtool.c365 &adapter->tx_rings[i] : NULL); in iavf_get_ethtool_stats()
691 tx_ring = &adapter->tx_rings[queue]; in __iavf_get_coalesce()
747 struct iavf_ring *tx_ring = &adapter->tx_rings[queue]; in iavf_set_itr_per_queue()
Diavf.h244 struct iavf_ring *tx_rings; member
/kernel/linux/linux-5.10/drivers/net/ethernet/sun/
Dniu.c3591 index = (rp - np->tx_rings); in niu_tx_work()
3738 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_poll_core()
4090 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_slowpath_interrupt()
4168 struct tx_ring_info *rp = &np->tx_rings[i]; in __niu_fastpath_interrupt()
4308 if (np->tx_rings) { in niu_free_channels()
4310 struct tx_ring_info *rp = &np->tx_rings[i]; in niu_free_channels()
4314 kfree(np->tx_rings); in niu_free_channels()
4315 np->tx_rings = NULL; in niu_free_channels()
4456 struct tx_ring_info *tx_rings; in niu_alloc_channels() local
4509 tx_rings = kcalloc(num_tx_rings, sizeof(struct tx_ring_info), in niu_alloc_channels()
[all …]
/kernel/linux/linux-5.10/include/linux/
Dthunderbolt.h448 struct tb_ring **tx_rings; member
/kernel/linux/linux-5.10/drivers/net/ethernet/broadcom/bnxt/
Dbnxt.c6047 int __bnxt_hwrm_get_tx_rings(struct bnxt *bp, u16 fid, int *tx_rings) in __bnxt_hwrm_get_tx_rings() argument
6060 *tx_rings = le16_to_cpu(resp->alloc_tx_rings); in __bnxt_hwrm_get_tx_rings()
6069 int tx_rings, int rx_rings, int ring_grps, in __bnxt_hwrm_reserve_pf_rings() argument
6076 enables |= tx_rings ? FUNC_CFG_REQ_ENABLES_NUM_TX_RINGS : 0; in __bnxt_hwrm_reserve_pf_rings()
6077 req->num_tx_rings = cpu_to_le16(tx_rings); in __bnxt_hwrm_reserve_pf_rings()
6083 enables |= tx_rings + ring_grps ? in __bnxt_hwrm_reserve_pf_rings()
6098 req->num_cmpl_rings = cpu_to_le16(tx_rings + ring_grps); in __bnxt_hwrm_reserve_pf_rings()
6119 struct hwrm_func_vf_cfg_input *req, int tx_rings, in __bnxt_hwrm_reserve_vf_rings() argument
6126 enables |= tx_rings ? FUNC_VF_CFG_REQ_ENABLES_NUM_TX_RINGS : 0; in __bnxt_hwrm_reserve_vf_rings()
6131 enables |= tx_rings + ring_grps ? in __bnxt_hwrm_reserve_vf_rings()
[all …]

12