/drivers/net/ethernet/intel/i40e/ |
D | i40e_xsk.c | 245 u16 ntu = rx_ring->next_to_use; in i40e_alloc_rx_buffers_zc() local 251 rx_desc = I40E_RX_DESC(rx_ring, ntu); in i40e_alloc_rx_buffers_zc() 252 bi = i40e_rx_bi(rx_ring, ntu); in i40e_alloc_rx_buffers_zc() 266 ntu++; in i40e_alloc_rx_buffers_zc() 268 if (unlikely(ntu == rx_ring->count)) { in i40e_alloc_rx_buffers_zc() 271 ntu = 0; in i40e_alloc_rx_buffers_zc() 276 if (rx_ring->next_to_use != ntu) { in i40e_alloc_rx_buffers_zc() 279 i40e_release_rx_desc(rx_ring, ntu); in i40e_alloc_rx_buffers_zc() 475 u16 ntu = xdp_ring->next_to_use; in i40e_xmit_pkt_batch() local 484 tx_desc = I40E_TX_DESC(xdp_ring, ntu++); in i40e_xmit_pkt_batch() [all …]
|
D | i40e_adminq.c | 1007 u16 ntu; in i40e_clean_arq_element() local 1023 ntu = rd32(hw, hw->aq.arq.head) & I40E_PF_ARQH_ARQH_MASK; in i40e_clean_arq_element() 1024 if (ntu == ntc) { in i40e_clean_arq_element() 1077 hw->aq.arq.next_to_use = ntu; in i40e_clean_arq_element() 1083 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in i40e_clean_arq_element()
|
D | i40e_txrx.c | 1697 u16 ntu = rx_ring->next_to_use; in i40e_alloc_rx_buffers() local 1705 rx_desc = I40E_RX_DESC(rx_ring, ntu); in i40e_alloc_rx_buffers() 1706 bi = i40e_rx_bi(rx_ring, ntu); in i40e_alloc_rx_buffers() 1725 ntu++; in i40e_alloc_rx_buffers() 1726 if (unlikely(ntu == rx_ring->count)) { in i40e_alloc_rx_buffers() 1729 ntu = 0; in i40e_alloc_rx_buffers() 1738 if (rx_ring->next_to_use != ntu) in i40e_alloc_rx_buffers() 1739 i40e_release_rx_desc(rx_ring, ntu); in i40e_alloc_rx_buffers() 1744 if (rx_ring->next_to_use != ntu) in i40e_alloc_rx_buffers() 1745 i40e_release_rx_desc(rx_ring, ntu); in i40e_alloc_rx_buffers()
|
/drivers/net/ethernet/hisilicon/hns3/ |
D | hns3_trace.h | 74 __field(int, ntu) 83 __entry->ntu = ring->next_to_use; 93 __get_str(devname), __entry->index, __entry->ntu, 105 __field(int, ntu) 115 __entry->ntu = ring->next_to_use; 126 __get_str(devname), __entry->index, __entry->ntu,
|
D | hns3_enet.c | 965 u32 ntc, ntu; in hns3_tx_spare_space() local 971 ntu = tx_spare->next_to_use; in hns3_tx_spare_space() 973 if (ntc > ntu) in hns3_tx_spare_space() 974 return ntc - ntu - 1; in hns3_tx_spare_space() 979 return max(ntc, tx_spare->len - ntu) - 1; in hns3_tx_spare_space() 1085 u32 ntu = tx_spare->next_to_use; in hns3_tx_spare_alloc() local 1093 if (ntu + size > tx_spare->len) { in hns3_tx_spare_alloc() 1094 *cb_len += (tx_spare->len - ntu); in hns3_tx_spare_alloc() 1095 ntu = 0; in hns3_tx_spare_alloc() 1098 tx_spare->next_to_use = ntu + size; in hns3_tx_spare_alloc() [all …]
|
/drivers/net/ethernet/intel/ice/ |
D | ice_xsk.c | 382 u16 ntu = rx_ring->next_to_use; in ice_alloc_rx_bufs_zc() local 390 rx_desc = ICE_RX_DESC(rx_ring, ntu); in ice_alloc_rx_bufs_zc() 391 xdp = &rx_ring->xdp_buf[ntu]; in ice_alloc_rx_bufs_zc() 406 ntu++; in ice_alloc_rx_bufs_zc() 408 if (unlikely(ntu == rx_ring->count)) { in ice_alloc_rx_bufs_zc() 411 ntu = 0; in ice_alloc_rx_bufs_zc() 415 if (rx_ring->next_to_use != ntu) { in ice_alloc_rx_bufs_zc() 418 ice_release_rx_desc(rx_ring, ntu); in ice_alloc_rx_bufs_zc() 831 u16 ntu = rx_ring->next_to_use; in ice_xsk_clean_rx_ring() local 833 for ( ; ntc != ntu; ntc = (ntc + 1) & count_mask) { in ice_xsk_clean_rx_ring() [all …]
|
D | ice_controlq.c | 1169 u16 ntu; in ice_clean_rq_elem() local 1184 ntu = (u16)(rd32(hw, cq->rq.head) & cq->rq.head_mask); in ice_clean_rq_elem() 1186 if (ntu == ntc) { in ice_clean_rq_elem() 1233 cq->rq.next_to_use = ntu; in ice_clean_rq_elem() 1239 ntu = (u16)(rd32(hw, cq->rq.head) & cq->rq.head_mask); in ice_clean_rq_elem() 1240 *pending = (u16)((ntc > ntu ? cq->rq.count : 0) + (ntu - ntc)); in ice_clean_rq_elem()
|
D | ice_txrx.c | 671 u16 ntu = rx_ring->next_to_use; in ice_alloc_rx_bufs() local 680 rx_desc = ICE_RX_DESC(rx_ring, ntu); in ice_alloc_rx_bufs() 681 bi = &rx_ring->rx_buf[ntu]; in ice_alloc_rx_bufs() 701 ntu++; in ice_alloc_rx_bufs() 702 if (unlikely(ntu == rx_ring->count)) { in ice_alloc_rx_bufs() 705 ntu = 0; in ice_alloc_rx_bufs() 714 if (rx_ring->next_to_use != ntu) in ice_alloc_rx_bufs() 715 ice_release_rx_desc(rx_ring, ntu); in ice_alloc_rx_bufs()
|
D | ice_main.c | 1375 u16 ntu; in ice_ctrlq_pending() local 1377 ntu = (u16)(rd32(hw, cq->rq.head) & cq->rq.head_mask); in ice_ctrlq_pending() 1378 return cq->rq.next_to_clean != ntu; in ice_ctrlq_pending()
|
/drivers/net/ethernet/hisilicon/hns3/hns3vf/ |
D | hclgevf_cmd.c | 19 int ntu = ring->next_to_use; in hclgevf_ring_space() local 22 used = (ntu - ntc + ring->desc_num) % ring->desc_num; in hclgevf_ring_space() 30 int ntu = ring->next_to_use; in hclgevf_is_valid_csq_clean_head() local 33 if (ntu > ntc) in hclgevf_is_valid_csq_clean_head() 34 return head >= ntc && head <= ntu; in hclgevf_is_valid_csq_clean_head() 36 return head >= ntc || head <= ntu; in hclgevf_is_valid_csq_clean_head()
|
/drivers/net/ethernet/hisilicon/hns3/hns3pf/ |
D | hclge_cmd.c | 18 int ntu = ring->next_to_use; in hclge_ring_space() local 20 int used = (ntu - ntc + ring->desc_num) % ring->desc_num; in hclge_ring_space() 27 int ntu = ring->next_to_use; in is_valid_csq_clean_head() local 30 if (ntu > ntc) in is_valid_csq_clean_head() 31 return head >= ntc && head <= ntu; in is_valid_csq_clean_head() 33 return head >= ntc || head <= ntu; in is_valid_csq_clean_head()
|
/drivers/net/ethernet/intel/ixgbe/ |
D | ixgbe_xsk.c | 453 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in ixgbe_clean_xdp_tx_irq() local 463 while (ntc != ntu) { in ixgbe_clean_xdp_tx_irq() 541 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in ixgbe_xsk_clean_tx_ring() local 546 while (ntc != ntu) { in ixgbe_xsk_clean_tx_ring()
|
D | ixgbe.h | 512 u16 ntu = ring->next_to_use; in ixgbe_desc_unused() local 514 return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1; in ixgbe_desc_unused()
|
/drivers/net/ethernet/intel/iavf/ |
D | iavf_adminq.c | 867 u16 ntu; in iavf_clean_arq_element() local 883 ntu = rd32(hw, hw->aq.arq.head) & IAVF_VF_ARQH1_ARQH_MASK; in iavf_clean_arq_element() 884 if (ntu == ntc) { in iavf_clean_arq_element() 937 hw->aq.arq.next_to_use = ntu; in iavf_clean_arq_element() 942 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in iavf_clean_arq_element()
|
D | iavf_txrx.c | 884 u16 ntu = rx_ring->next_to_use; in iavf_alloc_rx_buffers() local 892 rx_desc = IAVF_RX_DESC(rx_ring, ntu); in iavf_alloc_rx_buffers() 893 bi = &rx_ring->rx_bi[ntu]; in iavf_alloc_rx_buffers() 912 ntu++; in iavf_alloc_rx_buffers() 913 if (unlikely(ntu == rx_ring->count)) { in iavf_alloc_rx_buffers() 916 ntu = 0; in iavf_alloc_rx_buffers() 925 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 926 iavf_release_rx_desc(rx_ring, ntu); in iavf_alloc_rx_buffers() 931 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 932 iavf_release_rx_desc(rx_ring, ntu); in iavf_alloc_rx_buffers()
|
/drivers/net/ethernet/intel/ixgbevf/ |
D | ixgbevf.h | 289 u16 ntu = ring->next_to_use; in ixgbevf_desc_unused() local 291 return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1; in ixgbevf_desc_unused()
|
/drivers/net/ethernet/intel/igc/ |
D | igc.h | 580 u16 ntu = ring->next_to_use; in igc_desc_unused() local 582 return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1; in igc_desc_unused()
|
D | igc_main.c | 2782 u16 budget, ntu; in igc_xdp_xmit_zc() local 2789 ntu = ring->next_to_use; in igc_xdp_xmit_zc() 2805 tx_desc = IGC_TX_DESC(ring, ntu); in igc_xdp_xmit_zc() 2810 bi = &ring->tx_buffer_info[ntu]; in igc_xdp_xmit_zc() 2820 ntu++; in igc_xdp_xmit_zc() 2821 if (ntu == ring->count) in igc_xdp_xmit_zc() 2822 ntu = 0; in igc_xdp_xmit_zc() 2825 ring->next_to_use = ntu; in igc_xdp_xmit_zc()
|
/drivers/net/ethernet/hisilicon/hns/ |
D | hns_enet.c | 706 int ntu = ring->next_to_use; in hns_desc_unused() local 708 return ((ntc >= ntu) ? 0 : ring->desc_num) + ntc - ntu; in hns_desc_unused()
|