• Home
  • Raw
  • Download

Lines Matching refs:fp

47 	struct bnx2x_fastpath *from_fp = &bp->fp[from];  in bnx2x_move_fp()
48 struct bnx2x_fastpath *to_fp = &bp->fp[to]; in bnx2x_move_fp()
131 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_shrink_eth_fp() local
134 memcpy(&bp->bnx2x_txq[new_idx], fp->txdata_ptr[cos], in bnx2x_shrink_eth_fp()
136 fp->txdata_ptr[cos] = &bp->bnx2x_txq[new_idx]; in bnx2x_shrink_eth_fp()
284 static inline void bnx2x_update_last_max_sge(struct bnx2x_fastpath *fp, in bnx2x_update_last_max_sge() argument
287 u16 last_max = fp->last_max_sge; in bnx2x_update_last_max_sge()
290 fp->last_max_sge = idx; in bnx2x_update_last_max_sge()
293 static inline void bnx2x_update_sge_prod(struct bnx2x_fastpath *fp, in bnx2x_update_sge_prod() argument
297 struct bnx2x *bp = fp->bp; in bnx2x_update_sge_prod()
307 BIT_VEC64_CLEAR_BIT(fp->sge_mask, in bnx2x_update_sge_prod()
314 prefetch((void *)(fp->sge_mask)); in bnx2x_update_sge_prod()
315 bnx2x_update_last_max_sge(fp, in bnx2x_update_sge_prod()
318 last_max = RX_SGE(fp->last_max_sge); in bnx2x_update_sge_prod()
320 first_elem = RX_SGE(fp->rx_sge_prod) >> BIT_VEC64_ELEM_SHIFT; in bnx2x_update_sge_prod()
328 if (likely(fp->sge_mask[i])) in bnx2x_update_sge_prod()
331 fp->sge_mask[i] = BIT_VEC64_ELEM_ONE_MASK; in bnx2x_update_sge_prod()
336 fp->rx_sge_prod += delta; in bnx2x_update_sge_prod()
338 bnx2x_clear_sge_mask_next_elems(fp); in bnx2x_update_sge_prod()
343 fp->last_max_sge, fp->rx_sge_prod); in bnx2x_update_sge_prod()
367 static void bnx2x_tpa_start(struct bnx2x_fastpath *fp, u16 queue, in bnx2x_tpa_start() argument
371 struct bnx2x *bp = fp->bp; in bnx2x_tpa_start()
372 struct sw_rx_bd *cons_rx_buf = &fp->rx_buf_ring[cons]; in bnx2x_tpa_start()
373 struct sw_rx_bd *prod_rx_buf = &fp->rx_buf_ring[prod]; in bnx2x_tpa_start()
374 struct eth_rx_bd *prod_bd = &fp->rx_desc_ring[prod]; in bnx2x_tpa_start()
376 struct bnx2x_agg_info *tpa_info = &fp->tpa_info[queue]; in bnx2x_tpa_start()
386 fp->rx_buf_size, DMA_FROM_DEVICE); in bnx2x_tpa_start()
395 bnx2x_reuse_rx_data(fp, cons, prod); in bnx2x_tpa_start()
418 if (fp->mode == TPA_MODE_GRO) { in bnx2x_tpa_start()
425 fp->tpa_queue_used |= (1 << queue); in bnx2x_tpa_start()
431 fp->tpa_queue_used); in bnx2x_tpa_start()
488 struct bnx2x_fastpath *fp, u16 index) in bnx2x_alloc_rx_sge() argument
491 struct sw_rx_page *sw_buf = &fp->rx_page_ring[index]; in bnx2x_alloc_rx_sge()
492 struct eth_rx_sge *sge = &fp->rx_sge_ring[index]; in bnx2x_alloc_rx_sge()
517 static int bnx2x_fill_frag_skb(struct bnx2x *bp, struct bnx2x_fastpath *fp, in bnx2x_fill_frag_skb() argument
532 if (fp->mode == TPA_MODE_GRO) { in bnx2x_fill_frag_skb()
559 if (fp->mode == TPA_MODE_GRO) in bnx2x_fill_frag_skb()
564 rx_pg = &fp->rx_page_ring[sge_idx]; in bnx2x_fill_frag_skb()
569 err = bnx2x_alloc_rx_sge(bp, fp, sge_idx); in bnx2x_fill_frag_skb()
571 bnx2x_fp_qstats(bp, fp)->rx_skb_alloc_failed++; in bnx2x_fill_frag_skb()
580 if (fp->mode == TPA_MODE_LRO) in bnx2x_fill_frag_skb()
605 static void bnx2x_frag_free(const struct bnx2x_fastpath *fp, void *data) in bnx2x_frag_free() argument
607 if (fp->rx_frag_size) in bnx2x_frag_free()
613 static void *bnx2x_frag_alloc(const struct bnx2x_fastpath *fp) in bnx2x_frag_alloc() argument
615 if (fp->rx_frag_size) in bnx2x_frag_alloc()
616 return netdev_alloc_frag(fp->rx_frag_size); in bnx2x_frag_alloc()
618 return kmalloc(fp->rx_buf_size + NET_SKB_PAD, GFP_ATOMIC); in bnx2x_frag_alloc()
655 static void bnx2x_gro_receive(struct bnx2x *bp, struct bnx2x_fastpath *fp, in bnx2x_gro_receive() argument
673 napi_gro_receive(&fp->napi, skb); in bnx2x_gro_receive()
676 static void bnx2x_tpa_stop(struct bnx2x *bp, struct bnx2x_fastpath *fp, in bnx2x_tpa_stop() argument
698 new_data = bnx2x_frag_alloc(fp); in bnx2x_tpa_stop()
703 fp->rx_buf_size, DMA_FROM_DEVICE); in bnx2x_tpa_stop()
705 skb = build_skb(data, fp->rx_frag_size); in bnx2x_tpa_stop()
709 if (pad + len > fp->rx_buf_size) { in bnx2x_tpa_stop()
711 pad, len, fp->rx_buf_size); in bnx2x_tpa_stop()
725 if (!bnx2x_fill_frag_skb(bp, fp, tpa_info, pages, in bnx2x_tpa_stop()
729 bnx2x_gro_receive(bp, fp, skb); in bnx2x_tpa_stop()
742 bnx2x_frag_free(fp, new_data); in bnx2x_tpa_stop()
747 bnx2x_fp_stats(bp, fp)->eth_q_stats.rx_skb_alloc_failed++; in bnx2x_tpa_stop()
751 struct bnx2x_fastpath *fp, u16 index) in bnx2x_alloc_rx_data() argument
754 struct sw_rx_bd *rx_buf = &fp->rx_buf_ring[index]; in bnx2x_alloc_rx_data()
755 struct eth_rx_bd *rx_bd = &fp->rx_desc_ring[index]; in bnx2x_alloc_rx_data()
758 data = bnx2x_frag_alloc(fp); in bnx2x_alloc_rx_data()
763 fp->rx_buf_size, in bnx2x_alloc_rx_data()
766 bnx2x_frag_free(fp, data); in bnx2x_alloc_rx_data()
782 struct bnx2x_fastpath *fp, in bnx2x_csum_validate() argument
804 int bnx2x_rx_int(struct bnx2x_fastpath *fp, int budget) in bnx2x_rx_int() argument
806 struct bnx2x *bp = fp->bp; in bnx2x_rx_int()
818 hw_comp_cons = le16_to_cpu(*fp->rx_cons_sb); in bnx2x_rx_int()
822 bd_cons = fp->rx_bd_cons; in bnx2x_rx_int()
823 bd_prod = fp->rx_bd_prod; in bnx2x_rx_int()
825 sw_comp_cons = fp->rx_comp_cons; in bnx2x_rx_int()
826 sw_comp_prod = fp->rx_comp_prod; in bnx2x_rx_int()
835 fp->index, hw_comp_cons, sw_comp_cons); in bnx2x_rx_int()
857 cqe = &fp->rx_comp_ring[comp_ring_cons]; in bnx2x_rx_int()
872 bnx2x_sp_event(fp, cqe); in bnx2x_rx_int()
876 rx_buf = &fp->rx_buf_ring[bd_cons]; in bnx2x_rx_int()
884 if (fp->disable_tpa && in bnx2x_rx_int()
897 bnx2x_tpa_start(fp, queue, in bnx2x_rx_int()
905 tpa_info = &fp->tpa_info[queue]; in bnx2x_rx_int()
913 if (fp->mode == TPA_MODE_GRO) in bnx2x_rx_int()
920 bnx2x_tpa_stop(bp, fp, tpa_info, pages, in bnx2x_rx_int()
927 bnx2x_update_sge_prod(fp, pages, &cqe->end_agg_cqe); in bnx2x_rx_int()
944 bnx2x_fp_qstats(bp, fp)->rx_err_discard_pkt++; in bnx2x_rx_int()
957 bnx2x_fp_qstats(bp, fp)->rx_skb_alloc_failed++; in bnx2x_rx_int()
961 bnx2x_reuse_rx_data(fp, bd_cons, bd_prod); in bnx2x_rx_int()
963 if (likely(bnx2x_alloc_rx_data(bp, fp, bd_prod) == 0)) { in bnx2x_rx_int()
966 fp->rx_buf_size, in bnx2x_rx_int()
968 skb = build_skb(data, fp->rx_frag_size); in bnx2x_rx_int()
970 bnx2x_frag_free(fp, data); in bnx2x_rx_int()
971 bnx2x_fp_qstats(bp, fp)-> in bnx2x_rx_int()
979 bnx2x_fp_qstats(bp, fp)->rx_skb_alloc_failed++; in bnx2x_rx_int()
981 bnx2x_reuse_rx_data(fp, bd_cons, bd_prod); in bnx2x_rx_int()
996 bnx2x_csum_validate(skb, cqe, fp, in bnx2x_rx_int()
997 bnx2x_fp_qstats(bp, fp)); in bnx2x_rx_int()
999 skb_record_rx_queue(skb, fp->rx_queue); in bnx2x_rx_int()
1005 napi_gro_receive(&fp->napi, skb); in bnx2x_rx_int()
1023 fp->rx_bd_cons = bd_cons; in bnx2x_rx_int()
1024 fp->rx_bd_prod = bd_prod_fw; in bnx2x_rx_int()
1025 fp->rx_comp_cons = sw_comp_cons; in bnx2x_rx_int()
1026 fp->rx_comp_prod = sw_comp_prod; in bnx2x_rx_int()
1029 bnx2x_update_rx_prod(bp, fp, bd_prod_fw, sw_comp_prod, in bnx2x_rx_int()
1030 fp->rx_sge_prod); in bnx2x_rx_int()
1032 fp->rx_pkt += rx_pkt; in bnx2x_rx_int()
1033 fp->rx_calls++; in bnx2x_rx_int()
1040 struct bnx2x_fastpath *fp = fp_cookie; in bnx2x_msix_fp_int() local
1041 struct bnx2x *bp = fp->bp; in bnx2x_msix_fp_int()
1046 fp->index, fp->fw_sb_id, fp->igu_sb_id); in bnx2x_msix_fp_int()
1048 bnx2x_ack_sb(bp, fp->igu_sb_id, USTORM_ID, 0, IGU_INT_DISABLE, 0); in bnx2x_msix_fp_int()
1056 prefetch(fp->rx_cons_sb); in bnx2x_msix_fp_int()
1058 for_each_cos_in_tx_queue(fp, cos) in bnx2x_msix_fp_int()
1059 prefetch(fp->txdata_ptr[cos]->tx_cons_sb); in bnx2x_msix_fp_int()
1061 prefetch(&fp->sb_running_index[SM_RX_ID]); in bnx2x_msix_fp_int()
1062 napi_schedule(&bnx2x_fp(bp, fp->index, napi)); in bnx2x_msix_fp_int()
1233 static void bnx2x_set_next_page_sgl(struct bnx2x_fastpath *fp) in bnx2x_set_next_page_sgl() argument
1240 sge = &fp->rx_sge_ring[RX_SGE_CNT * i - 2]; in bnx2x_set_next_page_sgl()
1242 cpu_to_le32(U64_HI(fp->rx_sge_mapping + in bnx2x_set_next_page_sgl()
1246 cpu_to_le32(U64_LO(fp->rx_sge_mapping + in bnx2x_set_next_page_sgl()
1252 struct bnx2x_fastpath *fp, int last) in bnx2x_free_tpa_pool() argument
1257 struct bnx2x_agg_info *tpa_info = &fp->tpa_info[i]; in bnx2x_free_tpa_pool()
1268 fp->rx_buf_size, DMA_FROM_DEVICE); in bnx2x_free_tpa_pool()
1269 bnx2x_frag_free(fp, data); in bnx2x_free_tpa_pool()
1279 struct bnx2x_fastpath *fp = &bp->fp[j]; in bnx2x_init_rx_rings_cnic() local
1281 fp->rx_bd_cons = 0; in bnx2x_init_rx_rings_cnic()
1288 bnx2x_update_rx_prod(bp, fp, fp->rx_bd_prod, fp->rx_comp_prod, in bnx2x_init_rx_rings_cnic()
1289 fp->rx_sge_prod); in bnx2x_init_rx_rings_cnic()
1301 struct bnx2x_fastpath *fp = &bp->fp[j]; in bnx2x_init_rx_rings() local
1304 "mtu %d rx_buf_size %d\n", bp->dev->mtu, fp->rx_buf_size); in bnx2x_init_rx_rings()
1306 if (!fp->disable_tpa) { in bnx2x_init_rx_rings()
1310 &fp->tpa_info[i]; in bnx2x_init_rx_rings()
1314 first_buf->data = bnx2x_frag_alloc(fp); in bnx2x_init_rx_rings()
1318 bnx2x_free_tpa_pool(bp, fp, i); in bnx2x_init_rx_rings()
1319 fp->disable_tpa = 1; in bnx2x_init_rx_rings()
1327 bnx2x_set_next_page_sgl(fp); in bnx2x_init_rx_rings()
1330 bnx2x_init_sge_ring_bit_mask(fp); in bnx2x_init_rx_rings()
1336 if (bnx2x_alloc_rx_sge(bp, fp, ring_prod) < 0) { in bnx2x_init_rx_rings()
1342 bnx2x_free_rx_sge_range(bp, fp, in bnx2x_init_rx_rings()
1344 bnx2x_free_tpa_pool(bp, fp, in bnx2x_init_rx_rings()
1346 fp->disable_tpa = 1; in bnx2x_init_rx_rings()
1353 fp->rx_sge_prod = ring_prod; in bnx2x_init_rx_rings()
1358 struct bnx2x_fastpath *fp = &bp->fp[j]; in bnx2x_init_rx_rings() local
1360 fp->rx_bd_cons = 0; in bnx2x_init_rx_rings()
1367 bnx2x_update_rx_prod(bp, fp, fp->rx_bd_prod, fp->rx_comp_prod, in bnx2x_init_rx_rings()
1368 fp->rx_sge_prod); in bnx2x_init_rx_rings()
1376 U64_LO(fp->rx_comp_mapping)); in bnx2x_init_rx_rings()
1379 U64_HI(fp->rx_comp_mapping)); in bnx2x_init_rx_rings()
1384 static void bnx2x_free_tx_skbs_queue(struct bnx2x_fastpath *fp) in bnx2x_free_tx_skbs_queue() argument
1387 struct bnx2x *bp = fp->bp; in bnx2x_free_tx_skbs_queue()
1389 for_each_cos_in_tx_queue(fp, cos) { in bnx2x_free_tx_skbs_queue()
1390 struct bnx2x_fp_txdata *txdata = fp->txdata_ptr[cos]; in bnx2x_free_tx_skbs_queue()
1413 bnx2x_free_tx_skbs_queue(&bp->fp[i]); in bnx2x_free_tx_skbs_cnic()
1422 bnx2x_free_tx_skbs_queue(&bp->fp[i]); in bnx2x_free_tx_skbs()
1426 static void bnx2x_free_rx_bds(struct bnx2x_fastpath *fp) in bnx2x_free_rx_bds() argument
1428 struct bnx2x *bp = fp->bp; in bnx2x_free_rx_bds()
1432 if (fp->rx_buf_ring == NULL) in bnx2x_free_rx_bds()
1436 struct sw_rx_bd *rx_buf = &fp->rx_buf_ring[i]; in bnx2x_free_rx_bds()
1443 fp->rx_buf_size, DMA_FROM_DEVICE); in bnx2x_free_rx_bds()
1446 bnx2x_frag_free(fp, data); in bnx2x_free_rx_bds()
1455 bnx2x_free_rx_bds(&bp->fp[j]); in bnx2x_free_rx_skbs_cnic()
1464 struct bnx2x_fastpath *fp = &bp->fp[j]; in bnx2x_free_rx_skbs() local
1466 bnx2x_free_rx_bds(fp); in bnx2x_free_rx_skbs()
1468 if (!fp->disable_tpa) in bnx2x_free_rx_skbs()
1469 bnx2x_free_tpa_pool(bp, fp, MAX_AGG_QS(bp)); in bnx2x_free_rx_skbs()
1535 free_irq(bp->msix_table[offset++].vector, &bp->fp[i]); in bnx2x_free_msix_irqs()
1663 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_req_msix_irqs() local
1664 snprintf(fp->name, sizeof(fp->name), "%s-fp-%d", in bnx2x_req_msix_irqs()
1668 bnx2x_msix_fp_int, 0, fp->name, fp); in bnx2x_req_msix_irqs()
1908 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_set_rx_buf_size() local
1922 fp->rx_buf_size = BNX2X_FW_RX_ALIGN_START + in bnx2x_set_rx_buf_size()
1928 if (fp->rx_buf_size + NET_SKB_PAD <= PAGE_SIZE) in bnx2x_set_rx_buf_size()
1929 fp->rx_frag_size = fp->rx_buf_size + NET_SKB_PAD; in bnx2x_set_rx_buf_size()
1931 fp->rx_frag_size = 0; in bnx2x_set_rx_buf_size()
1945 bp->fp->cl_id + in bnx2x_init_rss_pf()
2318 struct bnx2x_fastpath *fp = &bp->fp[index]; in bnx2x_bz_fp() local
2321 struct napi_struct orig_napi = fp->napi; in bnx2x_bz_fp()
2322 struct bnx2x_agg_info *orig_tpa_info = fp->tpa_info; in bnx2x_bz_fp()
2324 if (fp->tpa_info) in bnx2x_bz_fp()
2325 memset(fp->tpa_info, 0, ETH_MAX_AGGREGATION_QUEUES_E1H_E2 * in bnx2x_bz_fp()
2327 memset(fp, 0, sizeof(*fp)); in bnx2x_bz_fp()
2330 fp->napi = orig_napi; in bnx2x_bz_fp()
2331 fp->tpa_info = orig_tpa_info; in bnx2x_bz_fp()
2332 fp->bp = bp; in bnx2x_bz_fp()
2333 fp->index = index; in bnx2x_bz_fp()
2334 if (IS_ETH_FP(fp)) in bnx2x_bz_fp()
2335 fp->max_cos = bp->max_cos; in bnx2x_bz_fp()
2338 fp->max_cos = 1; in bnx2x_bz_fp()
2341 if (IS_FCOE_FP(fp)) in bnx2x_bz_fp()
2342 fp->txdata_ptr[0] = &bp->bnx2x_txq[FCOE_TXQ_IDX(bp)]; in bnx2x_bz_fp()
2343 if (IS_ETH_FP(fp)) in bnx2x_bz_fp()
2344 for_each_cos_in_tx_queue(fp, cos) in bnx2x_bz_fp()
2345 fp->txdata_ptr[cos] = &bp->bnx2x_txq[cos * in bnx2x_bz_fp()
2352 fp->disable_tpa = !(bp->flags & TPA_ENABLE_FLAG || in bnx2x_bz_fp()
2356 fp->mode = TPA_MODE_LRO; in bnx2x_bz_fp()
2358 fp->mode = TPA_MODE_GRO; in bnx2x_bz_fp()
2361 if (IS_FCOE_FP(fp)) in bnx2x_bz_fp()
2362 fp->disable_tpa = 1; in bnx2x_bz_fp()
2411 rc = bnx2x_setup_queue(bp, &bp->fp[i], 0); in bnx2x_load_cnic()
2639 rc = bnx2x_setup_queue(bp, &bp->fp[i], 0); in bnx2x_nic_load()
2670 rc = bnx2x_vfpf_config_mac(bp, bp->dev->dev_addr, bp->fp->index, in bnx2x_nic_load()
2764 bnx2x_free_rx_sge_range(bp, bp->fp + i, NUM_RX_SGE); in bnx2x_nic_load()
2797 struct bnx2x_fastpath *fp = &bp->fp[i]; in bnx2x_drain_tx_queues() local
2799 for_each_cos_in_tx_queue(fp, cos) in bnx2x_drain_tx_queues()
2800 rc = bnx2x_clean_tx_queue(bp, fp->txdata_ptr[cos]); in bnx2x_drain_tx_queues()
2933 bnx2x_free_rx_sge_range(bp, bp->fp + i, NUM_RX_SGE); in bnx2x_nic_unload()
3032 struct bnx2x_fastpath *fp = container_of(napi, struct bnx2x_fastpath, in bnx2x_poll() local
3034 struct bnx2x *bp = fp->bp; in bnx2x_poll()
3044 for_each_cos_in_tx_queue(fp, cos) in bnx2x_poll()
3045 if (bnx2x_tx_queue_has_work(fp->txdata_ptr[cos])) in bnx2x_poll()
3046 bnx2x_tx_int(bp, fp->txdata_ptr[cos]); in bnx2x_poll()
3048 if (bnx2x_has_rx_work(fp)) { in bnx2x_poll()
3049 work_done += bnx2x_rx_int(fp, budget - work_done); in bnx2x_poll()
3057 if (!(bnx2x_has_rx_work(fp) || bnx2x_has_tx_work(fp))) { in bnx2x_poll()
3063 if (IS_FCOE_FP(fp)) { in bnx2x_poll()
3067 bnx2x_update_fpsb_idx(fp); in bnx2x_poll()
3083 if (!(bnx2x_has_rx_work(fp) || bnx2x_has_tx_work(fp))) { in bnx2x_poll()
3087 "Update index to %d\n", fp->fp_hc_idx); in bnx2x_poll()
3088 bnx2x_ack_sb(bp, fp->igu_sb_id, USTORM_ID, in bnx2x_poll()
3089 le16_to_cpu(fp->fp_hc_idx), in bnx2x_poll()
4057 struct bnx2x_fastpath *fp = &bp->fp[fp_index]; in bnx2x_free_fp_mem_at() local
4064 fp->status_blk_mapping = 0; in bnx2x_free_fp_mem_at()
4081 bnx2x_free_rx_bds(fp); in bnx2x_free_fp_mem_at()
4104 for_each_cos_in_tx_queue(fp, cos) { in bnx2x_free_fp_mem_at()
4105 struct bnx2x_fp_txdata *txdata = fp->txdata_ptr[cos]; in bnx2x_free_fp_mem_at()
4151 static int bnx2x_alloc_rx_bds(struct bnx2x_fastpath *fp, in bnx2x_alloc_rx_bds() argument
4154 struct bnx2x *bp = fp->bp; in bnx2x_alloc_rx_bds()
4158 fp->rx_comp_cons = 0; in bnx2x_alloc_rx_bds()
4165 if (bnx2x_alloc_rx_data(bp, fp, ring_prod) < 0) { in bnx2x_alloc_rx_bds()
4176 i - failure_cnt, fp->index); in bnx2x_alloc_rx_bds()
4178 fp->rx_bd_prod = ring_prod; in bnx2x_alloc_rx_bds()
4180 fp->rx_comp_prod = min_t(u16, NUM_RCQ_RINGS*RCQ_DESC_CNT, in bnx2x_alloc_rx_bds()
4182 fp->rx_pkt = fp->rx_calls = 0; in bnx2x_alloc_rx_bds()
4184 bnx2x_fp_stats(bp, fp)->eth_q_stats.rx_skb_alloc_failed += failure_cnt; in bnx2x_alloc_rx_bds()
4189 static void bnx2x_set_next_page_rx_cq(struct bnx2x_fastpath *fp) in bnx2x_set_next_page_rx_cq() argument
4197 &fp->rx_comp_ring[RCQ_DESC_CNT * i - 1]; in bnx2x_set_next_page_rx_cq()
4199 cpu_to_le32(U64_HI(fp->rx_comp_mapping + in bnx2x_set_next_page_rx_cq()
4202 cpu_to_le32(U64_LO(fp->rx_comp_mapping + in bnx2x_set_next_page_rx_cq()
4210 struct bnx2x_fastpath *fp = &bp->fp[index]; in bnx2x_alloc_fp_mem_at() local
4267 for_each_cos_in_tx_queue(fp, cos) { in bnx2x_alloc_fp_mem_at()
4268 struct bnx2x_fp_txdata *txdata = fp->txdata_ptr[cos]; in bnx2x_alloc_fp_mem_at()
4303 bnx2x_set_next_page_rx_bd(fp); in bnx2x_alloc_fp_mem_at()
4306 bnx2x_set_next_page_rx_cq(fp); in bnx2x_alloc_fp_mem_at()
4309 ring_size = bnx2x_alloc_rx_bds(fp, rx_ring_size); in bnx2x_alloc_fp_mem_at()
4324 if (ring_size < (fp->disable_tpa ? in bnx2x_alloc_fp_mem_at()
4392 kfree(bp->fp[i].tpa_info); in bnx2x_free_mem_bp()
4393 kfree(bp->fp); in bnx2x_free_mem_bp()
4403 struct bnx2x_fastpath *fp; in bnx2x_alloc_mem_bp() local
4424 fp = kcalloc(bp->fp_array_size, sizeof(*fp), GFP_KERNEL); in bnx2x_alloc_mem_bp()
4425 if (!fp) in bnx2x_alloc_mem_bp()
4428 fp[i].tpa_info = in bnx2x_alloc_mem_bp()
4431 if (!(fp[i].tpa_info)) in bnx2x_alloc_mem_bp()
4435 bp->fp = fp; in bnx2x_alloc_mem_bp()