Lines Matching refs:tcb
161 bnad_txq_cleanup(struct bnad *bnad, struct bna_tcb *tcb) in bnad_txq_cleanup() argument
163 struct bnad_tx_unmap *unmap_q = tcb->unmap_q; in bnad_txq_cleanup()
167 for (i = 0; i < tcb->q_depth; i++) { in bnad_txq_cleanup()
171 bnad_tx_buff_unmap(bnad, unmap_q, tcb->q_depth, i); in bnad_txq_cleanup()
183 bnad_txcmpl_process(struct bnad *bnad, struct bna_tcb *tcb) in bnad_txcmpl_process() argument
187 struct bnad_tx_unmap *unmap_q = tcb->unmap_q; in bnad_txcmpl_process()
192 if (!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) in bnad_txcmpl_process()
195 hw_cons = *(tcb->hw_consumer_index); in bnad_txcmpl_process()
196 cons = tcb->consumer_index; in bnad_txcmpl_process()
197 q_depth = tcb->q_depth; in bnad_txcmpl_process()
200 BUG_ON(!(wis <= BNA_QE_IN_USE_CNT(tcb, tcb->q_depth))); in bnad_txcmpl_process()
218 tcb->consumer_index = hw_cons; in bnad_txcmpl_process()
220 tcb->txq->tx_packets += sent_packets; in bnad_txcmpl_process()
221 tcb->txq->tx_bytes += sent_bytes; in bnad_txcmpl_process()
227 bnad_tx_complete(struct bnad *bnad, struct bna_tcb *tcb) in bnad_tx_complete() argument
232 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) in bnad_tx_complete()
235 sent = bnad_txcmpl_process(bnad, tcb); in bnad_tx_complete()
239 BNA_QE_FREE_CNT(tcb, tcb->q_depth) >= in bnad_tx_complete()
241 if (test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) { in bnad_tx_complete()
248 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) in bnad_tx_complete()
249 bna_ib_ack(tcb->i_dbell, sent); in bnad_tx_complete()
252 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags); in bnad_tx_complete()
261 struct bna_tcb *tcb = (struct bna_tcb *)data; in bnad_msix_tx() local
262 struct bnad *bnad = tcb->bnad; in bnad_msix_tx()
264 bnad_tx_complete(bnad, tcb); in bnad_msix_tx()
699 struct bna_tcb *tcb = NULL; in bnad_isr() local
726 tcb = bnad->tx_info[i].tcb[j]; in bnad_isr()
727 if (tcb && test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) in bnad_isr()
728 bnad_tx_complete(bnad, bnad->tx_info[i].tcb[j]); in bnad_isr()
853 struct bna_tcb *tcb = in bnad_cb_ethport_link_status() local
854 bnad->tx_info[tx_id].tcb[tcb_id]; in bnad_cb_ethport_link_status()
856 if (!tcb) in bnad_cb_ethport_link_status()
859 txq_id = tcb->id; in bnad_cb_ethport_link_status()
862 &tcb->flags)) { in bnad_cb_ethport_link_status()
904 bnad_cb_tcb_setup(struct bnad *bnad, struct bna_tcb *tcb) in bnad_cb_tcb_setup() argument
907 (struct bnad_tx_info *)tcb->txq->tx->priv; in bnad_cb_tcb_setup()
909 tcb->priv = tcb; in bnad_cb_tcb_setup()
910 tx_info->tcb[tcb->id] = tcb; in bnad_cb_tcb_setup()
914 bnad_cb_tcb_destroy(struct bnad *bnad, struct bna_tcb *tcb) in bnad_cb_tcb_destroy() argument
917 (struct bnad_tx_info *)tcb->txq->tx->priv; in bnad_cb_tcb_destroy()
919 tx_info->tcb[tcb->id] = NULL; in bnad_cb_tcb_destroy()
920 tcb->priv = NULL; in bnad_cb_tcb_destroy()
947 struct bna_tcb *tcb; in bnad_cb_tx_stall() local
952 tcb = tx_info->tcb[i]; in bnad_cb_tx_stall()
953 if (!tcb) in bnad_cb_tx_stall()
955 txq_id = tcb->id; in bnad_cb_tx_stall()
956 clear_bit(BNAD_TXQ_TX_STARTED, &tcb->flags); in bnad_cb_tx_stall()
967 struct bna_tcb *tcb; in bnad_cb_tx_resume() local
972 tcb = tx_info->tcb[i]; in bnad_cb_tx_resume()
973 if (!tcb) in bnad_cb_tx_resume()
975 txq_id = tcb->id; in bnad_cb_tx_resume()
977 BUG_ON(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)); in bnad_cb_tx_resume()
978 set_bit(BNAD_TXQ_TX_STARTED, &tcb->flags); in bnad_cb_tx_resume()
979 BUG_ON(*(tcb->hw_consumer_index) != 0); in bnad_cb_tx_resume()
1009 struct bna_tcb *tcb; in bnad_tx_cleanup() local
1014 tcb = tx_info->tcb[i]; in bnad_tx_cleanup()
1015 if (!tcb) in bnad_tx_cleanup()
1018 bnad = tcb->bnad; in bnad_tx_cleanup()
1020 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) { in bnad_tx_cleanup()
1025 bnad_txq_cleanup(bnad, tcb); in bnad_tx_cleanup()
1028 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags); in bnad_tx_cleanup()
1046 struct bna_tcb *tcb; in bnad_cb_tx_cleanup() local
1050 tcb = tx_info->tcb[i]; in bnad_cb_tx_cleanup()
1051 if (!tcb) in bnad_cb_tx_cleanup()
1426 if (tx_info->tcb[i] == NULL) in bnad_tx_msix_unregister()
1429 vector_num = tx_info->tcb[i]->intr_vector; in bnad_tx_msix_unregister()
1430 free_irq(bnad->msix_table[vector_num].vector, tx_info->tcb[i]); in bnad_tx_msix_unregister()
1446 vector_num = tx_info->tcb[i]->intr_vector; in bnad_tx_msix_register()
1447 sprintf(tx_info->tcb[i]->name, "%s TXQ %d", bnad->netdev->name, in bnad_tx_msix_register()
1448 tx_id + tx_info->tcb[i]->id); in bnad_tx_msix_register()
1451 tx_info->tcb[i]->name, in bnad_tx_msix_register()
1452 tx_info->tcb[i]); in bnad_tx_msix_register()
1838 if (tx_info->tcb[0]->intr_type == BNA_INTR_T_MSIX) in bnad_destroy_tx()
2235 if (bnad->tx_info[i].tcb[j]) { in bnad_netdev_qstats_fill()
2237 bnad->tx_info[i].tcb[j]->txq->tx_packets; in bnad_netdev_qstats_fill()
2239 bnad->tx_info[i].tcb[j]->txq->tx_bytes; in bnad_netdev_qstats_fill()
2638 bnad_txq_wi_prepare(struct bnad *bnad, struct bna_tcb *tcb, in bnad_txq_wi_prepare() argument
2650 vlan_tag = ((tcb->priority & 0x7) << VLAN_PRIO_SHIFT) in bnad_txq_wi_prepare()
2755 struct bna_tcb *tcb = NULL; in bnad_start_xmit() local
2783 tcb = bnad->tx_info[0].tcb[txq_id]; in bnad_start_xmit()
2784 q_depth = tcb->q_depth; in bnad_start_xmit()
2785 prod = tcb->producer_index; in bnad_start_xmit()
2787 unmap_q = tcb->unmap_q; in bnad_start_xmit()
2793 if (unlikely(!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) { in bnad_start_xmit()
2809 if (unlikely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) { in bnad_start_xmit()
2810 if ((*tcb->hw_consumer_index != tcb->consumer_index) && in bnad_start_xmit()
2811 !test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) { in bnad_start_xmit()
2813 sent = bnad_txcmpl_process(bnad, tcb); in bnad_start_xmit()
2814 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) in bnad_start_xmit()
2815 bna_ib_ack(tcb->i_dbell, sent); in bnad_start_xmit()
2817 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags); in bnad_start_xmit()
2829 if (likely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) { in bnad_start_xmit()
2838 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod]; in bnad_start_xmit()
2842 if (bnad_txq_wi_prepare(bnad, tcb, skb, txqent)) { in bnad_start_xmit()
2868 tcb->producer_index); in bnad_start_xmit()
2880 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod]; in bnad_start_xmit()
2897 bnad_tx_buff_unmap(bnad, unmap_q, q_depth, tcb->producer_index); in bnad_start_xmit()
2904 tcb->producer_index = prod; in bnad_start_xmit()
2908 if (unlikely(!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) in bnad_start_xmit()
2911 bna_txq_prod_indx_doorbell(tcb); in bnad_start_xmit()