• Home
  • Raw
  • Download

Lines Matching refs:txq

140 	struct mana_txq *txq;  in mana_start_xmit()  local
150 txq = &apc->tx_qp[txq_idx].txq; in mana_start_xmit()
151 gdma_sq = txq->gdma_sq; in mana_start_xmit()
155 pkg.tx_oob.s_oob.vsq_frame = txq->vsq_frame; in mana_start_xmit()
157 if (txq->vp_offset > MANA_SHORT_VPORT_OFFSET_MAX) { in mana_start_xmit()
158 pkg.tx_oob.l_oob.long_vp_offset = txq->vp_offset; in mana_start_xmit()
161 pkg.tx_oob.s_oob.short_vp_offset = txq->vp_offset; in mana_start_xmit()
244 skb_queue_tail(&txq->pending_skbs, skb); in mana_start_xmit()
257 (void)skb_dequeue_tail(&txq->pending_skbs); in mana_start_xmit()
264 atomic_inc(&txq->pending_sends); in mana_start_xmit()
271 tx_stats = &txq->stats; in mana_start_xmit()
324 stats = &apc->tx_qp[q].txq.stats; in mana_get_stats64()
343 int txq; in mana_get_tx_queue() local
345 txq = apc->indir_table[hash & MANA_INDIRECT_TABLE_MASK]; in mana_get_tx_queue()
347 if (txq != old_q && sk && sk_fullsock(sk) && in mana_get_tx_queue()
349 sk_tx_queue_set(sk, txq); in mana_get_tx_queue()
351 return txq; in mana_get_tx_queue()
357 int txq; in mana_select_queue() local
362 txq = sk_tx_queue_get(skb->sk); in mana_select_queue()
364 if (txq < 0 || skb->ooo_okay || txq >= ndev->real_num_tx_queues) { in mana_select_queue()
366 txq = skb_get_rx_queue(skb); in mana_select_queue()
368 txq = mana_get_tx_queue(ndev, skb, txq); in mana_select_queue()
371 return txq; in mana_select_queue()
787 struct mana_txq *txq = cq->txq; in mana_poll_tx_cq() local
798 ndev = txq->ndev; in mana_poll_tx_cq()
844 if (WARN_ON_ONCE(txq->gdma_txq_id != completions[i].wq_num)) in mana_poll_tx_cq()
847 skb = skb_dequeue(&txq->pending_skbs); in mana_poll_tx_cq()
864 mana_move_wq_tail(txq->gdma_sq, wqe_unit_cnt); in mana_poll_tx_cq()
866 gdma_wq = txq->gdma_sq; in mana_poll_tx_cq()
872 net_txq = txq->net_txq; in mana_poll_tx_cq()
883 if (atomic_sub_return(pkt_transmitted, &txq->pending_sends) < 0) in mana_poll_tx_cq()
1131 static void mana_deinit_txq(struct mana_port_context *apc, struct mana_txq *txq) in mana_deinit_txq() argument
1135 if (!txq->gdma_sq) in mana_deinit_txq()
1138 mana_gd_destroy_queue(gd->gdma_context, txq->gdma_sq); in mana_deinit_txq()
1159 mana_deinit_txq(apc, &apc->tx_qp[i].txq); in mana_destroy_txq()
1175 struct mana_txq *txq; in mana_create_txq() local
1204 txq = &apc->tx_qp[i].txq; in mana_create_txq()
1206 u64_stats_init(&txq->stats.syncp); in mana_create_txq()
1207 txq->ndev = net; in mana_create_txq()
1208 txq->net_txq = netdev_get_tx_queue(net, i); in mana_create_txq()
1209 txq->vp_offset = apc->tx_vp_offset; in mana_create_txq()
1210 skb_queue_head_init(&txq->pending_skbs); in mana_create_txq()
1216 err = mana_gd_create_mana_wq_cq(gd, &spec, &txq->gdma_sq); in mana_create_txq()
1224 cq->txq = txq; in mana_create_txq()
1240 wq_spec.gdma_region = txq->gdma_sq->mem_info.gdma_region; in mana_create_txq()
1241 wq_spec.queue_size = txq->gdma_sq->queue_size; in mana_create_txq()
1255 txq->gdma_sq->id = wq_spec.queue_index; in mana_create_txq()
1258 txq->gdma_sq->mem_info.gdma_region = GDMA_INVALID_DMA_REGION; in mana_create_txq()
1261 txq->gdma_txq_id = txq->gdma_sq->id; in mana_create_txq()
1704 struct mana_txq *txq; in mana_dealloc_queues() local
1719 txq = &apc->tx_qp[i].txq; in mana_dealloc_queues()
1721 while (atomic_read(&txq->pending_sends) > 0) in mana_dealloc_queues()