• Home
  • Raw
  • Download

Lines Matching refs:rcb

270 bnad_rxq_alloc_uninit(struct bnad *bnad, struct bna_rcb *rcb)  in bnad_rxq_alloc_uninit()  argument
272 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_alloc_uninit()
282 bnad_rxq_alloc_init(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_alloc_init() argument
284 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_alloc_init()
287 bnad_rxq_alloc_uninit(bnad, rcb); in bnad_rxq_alloc_init()
292 if (bna_is_small_rxq(rcb->id)) { in bnad_rxq_alloc_init()
294 unmap_q->map_size = rcb->rxq->buffer_size; in bnad_rxq_alloc_init()
298 (rcb->rxq->buffer_size > 2048) ? in bnad_rxq_alloc_init()
340 bnad_rxq_cleanup(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_cleanup() argument
342 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_cleanup()
345 for (i = 0; i < rcb->q_depth; i++) { in bnad_rxq_cleanup()
353 bnad_rxq_alloc_uninit(bnad, rcb); in bnad_rxq_cleanup()
357 bnad_rxq_refill_page(struct bnad *bnad, struct bna_rcb *rcb, u32 nalloc) in bnad_rxq_refill_page() argument
360 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_refill_page()
367 prod = rcb->producer_index; in bnad_rxq_refill_page()
368 q_depth = rcb->q_depth; in bnad_rxq_refill_page()
389 rcb->rxq->rxbuf_alloc_failed++; in bnad_rxq_refill_page()
407 rxent = &((struct bna_rxq_entry *)rcb->sw_q)[prod]; in bnad_rxq_refill_page()
415 rcb->producer_index = prod; in bnad_rxq_refill_page()
417 if (likely(test_bit(BNAD_RXQ_POST_OK, &rcb->flags))) in bnad_rxq_refill_page()
418 bna_rxq_prod_indx_doorbell(rcb); in bnad_rxq_refill_page()
425 bnad_rxq_refill_skb(struct bnad *bnad, struct bna_rcb *rcb, u32 nalloc) in bnad_rxq_refill_skb() argument
428 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_refill_skb()
434 buff_sz = rcb->rxq->buffer_size; in bnad_rxq_refill_skb()
435 prod = rcb->producer_index; in bnad_rxq_refill_skb()
436 q_depth = rcb->q_depth; in bnad_rxq_refill_skb()
446 rcb->rxq->rxbuf_alloc_failed++; in bnad_rxq_refill_skb()
456 rxent = &((struct bna_rxq_entry *)rcb->sw_q)[prod]; in bnad_rxq_refill_skb()
464 rcb->producer_index = prod; in bnad_rxq_refill_skb()
466 if (likely(test_bit(BNAD_RXQ_POST_OK, &rcb->flags))) in bnad_rxq_refill_skb()
467 bna_rxq_prod_indx_doorbell(rcb); in bnad_rxq_refill_skb()
474 bnad_rxq_post(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_post() argument
476 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_post()
479 to_alloc = BNA_QE_FREE_CNT(rcb, rcb->q_depth); in bnad_rxq_post()
484 bnad_rxq_refill_page(bnad, rcb, to_alloc); in bnad_rxq_post()
486 bnad_rxq_refill_skb(bnad, rcb, to_alloc); in bnad_rxq_post()
552 struct bna_rcb *rcb = NULL; in bnad_cq_process() local
572 rcb = ccb->rcb[1]; in bnad_cq_process()
574 rcb = ccb->rcb[0]; in bnad_cq_process()
576 unmap_q = rcb->unmap_q; in bnad_cq_process()
577 unmap = &unmap_q->unmap[rcb->consumer_index]; in bnad_cq_process()
587 rcb->rxq->rx_packets_with_error++; in bnad_cq_process()
609 rcb->rxq->rx_packets++; in bnad_cq_process()
610 rcb->rxq->rx_bytes += length; in bnad_cq_process()
622 BNA_QE_INDX_INC(rcb->consumer_index, rcb->q_depth); in bnad_cq_process()
628 if (likely(test_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags))) in bnad_cq_process()
631 bnad_rxq_post(bnad, ccb->rcb[0]); in bnad_cq_process()
632 if (ccb->rcb[1]) in bnad_cq_process()
633 bnad_rxq_post(bnad, ccb->rcb[1]); in bnad_cq_process()
1072 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[0]->flags); in bnad_cb_rx_stall()
1074 if (ccb->rcb[1]) in bnad_cb_rx_stall()
1075 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[1]->flags); in bnad_cb_rx_stall()
1107 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[0]); in bnad_rx_cleanup()
1108 if (rx_ctrl->ccb->rcb[1]) in bnad_rx_cleanup()
1109 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[1]); in bnad_rx_cleanup()
1131 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags); in bnad_cb_rx_cleanup()
1133 if (ccb->rcb[1]) in bnad_cb_rx_cleanup()
1134 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[1]->flags); in bnad_cb_rx_cleanup()
1145 struct bna_rcb *rcb; in bnad_cb_rx_post() local
1158 rcb = ccb->rcb[j]; in bnad_cb_rx_post()
1159 if (!rcb) in bnad_cb_rx_post()
1162 bnad_rxq_alloc_init(bnad, rcb); in bnad_cb_rx_post()
1163 set_bit(BNAD_RXQ_STARTED, &rcb->flags); in bnad_cb_rx_post()
1164 set_bit(BNAD_RXQ_POST_OK, &rcb->flags); in bnad_cb_rx_post()
1165 bnad_rxq_post(bnad, rcb); in bnad_cb_rx_post()
2217 rx_ctrl[j].ccb->rcb[0]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2219 rx_ctrl[j].ccb->rcb[0]->rxq->rx_bytes; in bnad_netdev_qstats_fill()
2220 if (bnad->rx_info[i].rx_ctrl[j].ccb->rcb[1] && in bnad_netdev_qstats_fill()
2222 rcb[1]->rxq) { in bnad_netdev_qstats_fill()
2225 ccb->rcb[1]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2228 ccb->rcb[1]->rxq->rx_bytes; in bnad_netdev_qstats_fill()