Home
last modified time | relevance | path

Searched refs:rx_desc (Results 1 – 25 of 85) sorted by relevance

1234

/drivers/net/ethernet/intel/fm10k/
Dfm10k_main.c131 union fm10k_rx_desc *rx_desc; in fm10k_alloc_rx_buffers() local
139 rx_desc = FM10K_RX_DESC(rx_ring, i); in fm10k_alloc_rx_buffers()
150 rx_desc->q.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset); in fm10k_alloc_rx_buffers()
152 rx_desc++; in fm10k_alloc_rx_buffers()
156 rx_desc = FM10K_RX_DESC(rx_ring, 0); in fm10k_alloc_rx_buffers()
162 rx_desc->d.staterr = 0; in fm10k_alloc_rx_buffers()
270 union fm10k_rx_desc *rx_desc, in fm10k_add_rx_frag() argument
317 union fm10k_rx_desc *rx_desc, in fm10k_fetch_rx_buffer() argument
320 unsigned int size = le16_to_cpu(rx_desc->w.length); in fm10k_fetch_rx_buffer()
361 if (fm10k_add_rx_frag(rx_buffer, size, rx_desc, skb)) { in fm10k_fetch_rx_buffer()
[all …]
/drivers/net/ethernet/seeq/
Dsgiseeq.c58 (unsigned long)((sp)->rx_desc)))
94 struct sgiseeq_rx_desc *rx_desc; member
197 if (!sp->rx_desc[i].skb) { in seeq_init_ring()
207 sp->rx_desc[i].skb = skb; in seeq_init_ring()
208 sp->rx_desc[i].rdma.pbuf = dma_addr; in seeq_init_ring()
210 sp->rx_desc[i].rdma.cntinfo = RCNTINFO_INIT; in seeq_init_ring()
211 dma_sync_desc_dev(dev, &sp->rx_desc[i]); in seeq_init_ring()
213 sp->rx_desc[i - 1].rdma.cntinfo |= HPCDMA_EOR; in seeq_init_ring()
214 dma_sync_desc_dev(dev, &sp->rx_desc[i - 1]); in seeq_init_ring()
233 if (sp->rx_desc[i].skb) { in seeq_purge_ring()
[all …]
/drivers/infiniband/ulp/isert/
Dib_isert.c179 struct iser_rx_desc *rx_desc; in isert_alloc_rx_descriptors() local
189 rx_desc = isert_conn->rx_descs; in isert_alloc_rx_descriptors()
191 for (i = 0; i < ISERT_QP_MAX_RECV_DTOS; i++, rx_desc++) { in isert_alloc_rx_descriptors()
192 dma_addr = ib_dma_map_single(ib_dev, (void *)rx_desc, in isert_alloc_rx_descriptors()
197 rx_desc->dma_addr = dma_addr; in isert_alloc_rx_descriptors()
199 rx_sg = &rx_desc->rx_sg; in isert_alloc_rx_descriptors()
200 rx_sg->addr = rx_desc->dma_addr; in isert_alloc_rx_descriptors()
203 rx_desc->rx_cqe.done = isert_recv_done; in isert_alloc_rx_descriptors()
209 rx_desc = isert_conn->rx_descs; in isert_alloc_rx_descriptors()
210 for (j = 0; j < i; j++, rx_desc++) { in isert_alloc_rx_descriptors()
[all …]
/drivers/spi/
Dspi-pxa2xx-dma.c149 struct dma_async_tx_descriptor *tx_desc, *rx_desc; in pxa2xx_spi_dma_prepare() local
160 rx_desc = pxa2xx_spi_dma_prepare_one(drv_data, DMA_DEV_TO_MEM); in pxa2xx_spi_dma_prepare()
161 if (!rx_desc) { in pxa2xx_spi_dma_prepare()
169 rx_desc->callback = pxa2xx_spi_dma_callback; in pxa2xx_spi_dma_prepare()
170 rx_desc->callback_param = drv_data; in pxa2xx_spi_dma_prepare()
172 dmaengine_submit(rx_desc); in pxa2xx_spi_dma_prepare()
/drivers/infiniband/ulp/iser/
Diser_initiator.c245 struct iser_rx_desc *rx_desc; in iser_alloc_rx_descriptors() local
267 rx_desc = iser_conn->rx_descs; in iser_alloc_rx_descriptors()
269 for (i = 0; i < iser_conn->qp_max_recv_dtos; i++, rx_desc++) { in iser_alloc_rx_descriptors()
270 dma_addr = ib_dma_map_single(device->ib_device, (void *)rx_desc, in iser_alloc_rx_descriptors()
275 rx_desc->dma_addr = dma_addr; in iser_alloc_rx_descriptors()
276 rx_desc->cqe.done = iser_task_rsp; in iser_alloc_rx_descriptors()
277 rx_sg = &rx_desc->rx_sg; in iser_alloc_rx_descriptors()
278 rx_sg->addr = rx_desc->dma_addr; in iser_alloc_rx_descriptors()
287 rx_desc = iser_conn->rx_descs; in iser_alloc_rx_descriptors()
288 for (j = 0; j < i; j++, rx_desc++) in iser_alloc_rx_descriptors()
[all …]
/drivers/net/ethernet/intel/i40e/
Di40e_txrx.c562 union i40e_rx_desc *rx_desc, u8 prog_id) in i40e_fd_handle_status() argument
570 qw = le64_to_cpu(rx_desc->wb.qword1.status_error_len); in i40e_fd_handle_status()
575 pf->fd_inv = le32_to_cpu(rx_desc->wb.qword0.hi_dword.fd_id); in i40e_fd_handle_status()
576 if ((rx_desc->wb.qword0.hi_dword.fd_id != 0) || in i40e_fd_handle_status()
594 if ((rx_desc->wb.qword0.hi_dword.fd_id == 0) && in i40e_fd_handle_status()
618 rx_desc->wb.qword0.hi_dword.fd_id); in i40e_fd_handle_status()
1097 union i40e_rx_desc *rx_desc, in i40e_clean_programming_status() argument
1122 i40e_fd_handle_status(rx_ring, rx_desc, id); in i40e_clean_programming_status()
1407 union i40e_rx_desc *rx_desc; in i40e_alloc_rx_buffers() local
1414 rx_desc = I40E_RX_DESC(rx_ring, ntu); in i40e_alloc_rx_buffers()
[all …]
/drivers/net/ethernet/intel/i40evf/
Di40e_txrx.c711 union i40e_rx_desc *rx_desc; in i40evf_alloc_rx_buffers() local
718 rx_desc = I40E_RX_DESC(rx_ring, ntu); in i40evf_alloc_rx_buffers()
734 rx_desc->read.pkt_addr = cpu_to_le64(bi->dma + bi->page_offset); in i40evf_alloc_rx_buffers()
736 rx_desc++; in i40evf_alloc_rx_buffers()
740 rx_desc = I40E_RX_DESC(rx_ring, 0); in i40evf_alloc_rx_buffers()
746 rx_desc->wb.qword1.status_error_len = 0; in i40evf_alloc_rx_buffers()
774 union i40e_rx_desc *rx_desc) in i40e_rx_checksum() argument
782 qword = le64_to_cpu(rx_desc->wb.qword1.status_error_len); in i40e_rx_checksum()
879 union i40e_rx_desc *rx_desc, in i40e_rx_hash() argument
891 if ((rx_desc->wb.qword1.status_error_len & rss_mask) == rss_mask) { in i40e_rx_hash()
[all …]
/drivers/net/ethernet/alteon/
Dacenic.h477 #define RX_STD_RING_SIZE (RX_STD_RING_ENTRIES * sizeof(struct rx_desc))
480 #define RX_JUMBO_RING_SIZE (RX_JUMBO_RING_ENTRIES *sizeof(struct rx_desc))
483 #define RX_MINI_RING_SIZE (RX_MINI_RING_ENTRIES *sizeof(struct rx_desc))
487 sizeof(struct rx_desc))
489 struct rx_desc{ struct
664 struct rx_desc *rx_std_ring;
665 struct rx_desc *rx_jumbo_ring;
666 struct rx_desc *rx_mini_ring;
667 struct rx_desc *rx_return_ring;
/drivers/net/ethernet/
Dec_bhf.c96 struct rx_desc { struct
148 struct rx_desc *rx_descs;
203 static int ec_bhf_pkt_received(struct rx_desc *desc) in ec_bhf_pkt_received()
208 static void ec_bhf_add_rx_desc(struct ec_bhf_priv *priv, struct rx_desc *desc) in ec_bhf_add_rx_desc()
216 struct rx_desc *desc = &priv->rx_descs[priv->rx_dnext]; in ec_bhf_process_rx()
382 priv->rx_dcount = priv->rx_buf.len / sizeof(struct rx_desc); in ec_bhf_setup_rx_descs()
383 priv->rx_descs = (struct rx_desc *)priv->rx_buf.buf; in ec_bhf_setup_rx_descs()
387 struct rx_desc *desc = &priv->rx_descs[i]; in ec_bhf_setup_rx_descs()
410 FIFO_SIZE * sizeof(struct rx_desc)); in ec_bhf_open()
/drivers/net/wireless/ti/wl1251/
Drx.c220 struct wl1251_rx_descriptor *rx_desc; in wl1251_rx() local
225 rx_desc = wl->rx_descriptor; in wl1251_rx()
228 wl1251_rx_header(wl, rx_desc); in wl1251_rx()
231 wl1251_rx_body(wl, rx_desc); in wl1251_rx()
/drivers/net/ethernet/marvell/
Dpxa168_eth.c187 struct rx_desc { struct
218 struct rx_desc *p_rx_desc_area;
318 struct rx_desc *p_used_rx_desc; in rxq_refill()
654 (u32) (pep->rx_desc_dma + rx_curr_desc * sizeof(struct rx_desc))); in eth_port_start()
657 (u32) (pep->rx_desc_dma + rx_curr_desc * sizeof(struct rx_desc))); in eth_port_start()
784 struct rx_desc *rx_desc; in rxq_process() local
792 rx_desc = &pep->p_rx_desc_area[rx_curr_desc]; in rxq_process()
793 cmd_sts = rx_desc->cmd_sts; in rxq_process()
808 dma_unmap_single(NULL, rx_desc->buf_ptr, in rxq_process()
809 rx_desc->buf_size, in rxq_process()
[all …]
Dmvneta.c774 int rx_desc = rxq->next_desc_to_proc; in mvneta_rxq_next_desc_get() local
776 rxq->next_desc_to_proc = MVNETA_QUEUE_NEXT_DESC(rxq, rx_desc); in mvneta_rxq_next_desc_get()
778 return rxq->descs + rx_desc; in mvneta_rxq_next_desc_get()
1618 static void mvneta_rx_desc_fill(struct mvneta_rx_desc *rx_desc, in mvneta_rx_desc_fill() argument
1624 rx_desc->buf_phys_addr = phys_addr; in mvneta_rx_desc_fill()
1625 i = rx_desc - rxq->descs; in mvneta_rx_desc_fill()
1710 struct mvneta_rx_desc *rx_desc) in mvneta_rx_error() argument
1712 u32 status = rx_desc->status; in mvneta_rx_error()
1717 status, rx_desc->data_size); in mvneta_rx_error()
1724 status, rx_desc->data_size); in mvneta_rx_error()
[all …]
Dmv643xx_eth.c201 struct rx_desc { struct
217 struct rx_desc { argument
345 struct rx_desc *rx_desc_area;
525 struct rx_desc *rx_desc; in rxq_process() local
530 rx_desc = &rxq->rx_desc_area[rxq->rx_curr_desc]; in rxq_process()
532 cmd_sts = rx_desc->cmd_sts; in rxq_process()
544 dma_unmap_single(mp->dev->dev.parent, rx_desc->buf_ptr, in rxq_process()
545 rx_desc->buf_size, DMA_FROM_DEVICE); in rxq_process()
551 byte_cnt = rx_desc->byte_cnt; in rxq_process()
618 struct rx_desc *rx_desc; in rxq_refill() local
[all …]
/drivers/net/ethernet/intel/ixgbevf/
Dixgbevf_main.c471 union ixgbe_adv_rx_desc *rx_desc, in ixgbevf_rx_hash() argument
479 rss_type = le16_to_cpu(rx_desc->wb.lower.lo_dword.hs_rss.pkt_info) & in ixgbevf_rx_hash()
485 skb_set_hash(skb, le32_to_cpu(rx_desc->wb.lower.hi_dword.rss), in ixgbevf_rx_hash()
497 union ixgbe_adv_rx_desc *rx_desc, in ixgbevf_rx_checksum() argument
507 if (ixgbevf_test_staterr(rx_desc, IXGBE_RXD_STAT_IPCS) && in ixgbevf_rx_checksum()
508 ixgbevf_test_staterr(rx_desc, IXGBE_RXDADV_ERR_IPE)) { in ixgbevf_rx_checksum()
513 if (!ixgbevf_test_staterr(rx_desc, IXGBE_RXD_STAT_L4CS)) in ixgbevf_rx_checksum()
516 if (ixgbevf_test_staterr(rx_desc, IXGBE_RXDADV_ERR_TCPE)) { in ixgbevf_rx_checksum()
536 union ixgbe_adv_rx_desc *rx_desc, in ixgbevf_process_skb_fields() argument
539 ixgbevf_rx_hash(rx_ring, rx_desc, skb); in ixgbevf_process_skb_fields()
[all …]
/drivers/net/wireless/ti/wlcore/
Dhw_ops.h59 wlcore_hw_get_rx_buf_align(struct wl1271 *wl, u32 rx_desc) in wlcore_hw_get_rx_buf_align() argument
65 return wl->ops->get_rx_buf_align(wl, rx_desc); in wlcore_hw_get_rx_buf_align()
69 wlcore_hw_prepare_read(struct wl1271 *wl, u32 rx_desc, u32 len) in wlcore_hw_prepare_read() argument
72 return wl->ops->prepare_read(wl, rx_desc, len); in wlcore_hw_prepare_read()
/drivers/atm/
Dhorizon.c728 rx_ch_desc * rx_desc = &memmap->rx_descs[channel]; in hrz_open_rx() local
733 channel_type = rd_mem (dev, &rx_desc->wr_buf_type) & BUFFER_PTR_MASK; in hrz_open_rx()
761 wr_mem (dev, &rx_desc->wr_buf_type, in hrz_open_rx()
764 wr_mem (dev, &rx_desc->rd_buf_type, buf_ptr); in hrz_open_rx()
802 rx_ch_desc * rx_desc = &memmap->rx_descs[vc]; in hrz_close_rx() local
807 value = rd_mem (dev, &rx_desc->wr_buf_type) & BUFFER_PTR_MASK; in hrz_close_rx()
821 wr_mem (dev, &rx_desc->wr_buf_type, RX_CHANNEL_DISABLED); in hrz_close_rx()
823 if ((rd_mem (dev, &rx_desc->wr_buf_type) & BUFFER_PTR_MASK) == RX_CHANNEL_DISABLED) in hrz_close_rx()
867 r1 = rd_mem (dev, &rx_desc->rd_buf_type); in hrz_close_rx()
885 r2 = rd_mem (dev, &rx_desc->rd_buf_type); in hrz_close_rx()
[all …]
/drivers/net/ethernet/intel/ixgb/
Dixgb_main.c1914 struct ixgb_rx_desc *rx_desc, in ixgb_rx_checksum() argument
1920 if ((rx_desc->status & IXGB_RX_DESC_STATUS_IXSM) || in ixgb_rx_checksum()
1921 (!(rx_desc->status & IXGB_RX_DESC_STATUS_TCPCS))) { in ixgb_rx_checksum()
1928 if (rx_desc->errors & IXGB_RX_DESC_ERRORS_TCPE) { in ixgb_rx_checksum()
1975 struct ixgb_rx_desc *rx_desc, *next_rxd; in ixgb_clean_rx_irq() local
1983 rx_desc = IXGB_RX_DESC(*rx_ring, i); in ixgb_clean_rx_irq()
1986 while (rx_desc->status & IXGB_RX_DESC_STATUS_DD) { in ixgb_clean_rx_irq()
1995 status = rx_desc->status; in ixgb_clean_rx_irq()
2023 length = le16_to_cpu(rx_desc->length); in ixgb_clean_rx_irq()
2024 rx_desc->length = 0; in ixgb_clean_rx_irq()
[all …]
/drivers/net/ethernet/intel/e1000/
De1000_main.c3478 struct e1000_rx_desc *rx_desc = E1000_RX_DESC(*rx_ring, i); in e1000_dump() local
3481 struct my_u *u = (struct my_u *)rx_desc; in e1000_dump()
4157 struct e1000_rx_desc *rx_desc, *next_rxd; in e1000_clean_jumbo_rx_irq() local
4166 rx_desc = E1000_RX_DESC(*rx_ring, i); in e1000_clean_jumbo_rx_irq()
4169 while (rx_desc->status & E1000_RXD_STAT_DD) { in e1000_clean_jumbo_rx_irq()
4178 status = rx_desc->status; in e1000_clean_jumbo_rx_irq()
4194 length = le16_to_cpu(rx_desc->length); in e1000_clean_jumbo_rx_irq()
4198 (rx_desc->errors & E1000_RXD_ERR_FRAME_ERR_MASK))) { in e1000_clean_jumbo_rx_irq()
4202 rx_desc->errors, in e1000_clean_jumbo_rx_irq()
4273 status | rx_desc->errors << 24, in e1000_clean_jumbo_rx_irq()
[all …]
/drivers/scsi/mvsas/
Dmv_sas.c886 static void mvs_slot_free(struct mvs_info *mvi, u32 rx_desc) in mvs_slot_free() argument
888 u32 slot_idx = rx_desc & RXQ_SLOT_MASK; in mvs_slot_free()
1717 int mvs_slot_complete(struct mvs_info *mvi, u32 rx_desc, u32 flags) in mvs_slot_complete() argument
1719 u32 slot_idx = rx_desc & RXQ_SLOT_MASK; in mvs_slot_complete()
1771 if (unlikely((rx_desc & RXQ_ERR) in mvs_slot_complete()
1776 rx_desc, get_unaligned_le64(slot->response)); in mvs_slot_complete()
1785 if (rx_desc & RXQ_GOOD) { in mvs_slot_complete()
1790 else if (rx_desc & RXQ_RSP) { in mvs_slot_complete()
2068 u32 rx_prod_idx, rx_desc; in mvs_int_rx() local
2094 rx_desc = le32_to_cpu(mvi->rx[rx_prod_idx + 1]); in mvs_int_rx()
[all …]
/drivers/net/ethernet/mellanox/mlx4/
Den_rx.c76 struct mlx4_en_rx_desc *rx_desc, in mlx4_en_alloc_frags() argument
88 rx_desc->data[i].addr = cpu_to_be64(frags->dma + in mlx4_en_alloc_frags()
111 struct mlx4_en_rx_desc *rx_desc = ring->buf + ring->stride * index; in mlx4_en_init_rx_desc() local
117 rx_desc->data[i].byte_count = in mlx4_en_init_rx_desc()
119 rx_desc->data[i].lkey = cpu_to_be32(priv->mdev->mr.key); in mlx4_en_init_rx_desc()
127 rx_desc->data[i].byte_count = 0; in mlx4_en_init_rx_desc()
128 rx_desc->data[i].lkey = cpu_to_be32(MLX4_EN_MEMTYPE_PAD); in mlx4_en_init_rx_desc()
129 rx_desc->data[i].addr = 0; in mlx4_en_init_rx_desc()
137 struct mlx4_en_rx_desc *rx_desc = ring->buf + in mlx4_en_prepare_rx_desc() local
149 rx_desc->data[0].addr = cpu_to_be64(frags->dma + in mlx4_en_prepare_rx_desc()
[all …]
/drivers/net/ethernet/intel/e1000e/
Dnetdev.c228 union e1000_rx_desc_extended *rx_desc; in e1000e_dump() local
448 rx_desc = E1000_RX_DESC_EXT(*rx_ring, i); in e1000e_dump()
449 u1 = (struct my_u1 *)rx_desc; in e1000e_dump()
450 staterr = le32_to_cpu(rx_desc->wb.upper.status_error); in e1000e_dump()
668 union e1000_rx_desc_extended *rx_desc; in e1000_alloc_rx_buffers() local
702 rx_desc = E1000_RX_DESC_EXT(*rx_ring, i); in e1000_alloc_rx_buffers()
703 rx_desc->read.buffer_addr = cpu_to_le64(buffer_info->dma); in e1000_alloc_rx_buffers()
736 union e1000_rx_desc_packet_split *rx_desc; in e1000_alloc_rx_buffers_ps() local
746 rx_desc = E1000_RX_DESC_PS(*rx_ring, i); in e1000_alloc_rx_buffers_ps()
752 rx_desc->read.buffer_addr[j + 1] = in e1000_alloc_rx_buffers_ps()
[all …]
/drivers/net/ethernet/intel/ixgbe/
Dixgbe.h511 static inline __le32 ixgbe_test_staterr(union ixgbe_adv_rx_desc *rx_desc, in ixgbe_test_staterr() argument
514 return rx_desc->wb.upper.status_error & cpu_to_le32(stat_err_bits); in ixgbe_test_staterr()
924 union ixgbe_adv_rx_desc *rx_desc, struct sk_buff *skb);
968 union ixgbe_adv_rx_desc *rx_desc, in ixgbe_ptp_rx_hwtstamp() argument
971 if (unlikely(ixgbe_test_staterr(rx_desc, IXGBE_RXD_STAT_TSIP))) { in ixgbe_ptp_rx_hwtstamp()
976 if (unlikely(!ixgbe_test_staterr(rx_desc, IXGBE_RXDADV_STAT_TS))) in ixgbe_ptp_rx_hwtstamp()
Dixgbe_main.c626 union ixgbe_adv_rx_desc *rx_desc; in ixgbe_dump() local
845 rx_desc = IXGBE_RX_DESC(rx_ring, i); in ixgbe_dump()
846 u0 = (struct my_u0 *)rx_desc; in ixgbe_dump()
847 if (rx_desc->wb.upper.length) { in ixgbe_dump()
1478 union ixgbe_adv_rx_desc *rx_desc, in ixgbe_rx_hash() argument
1486 rss_type = le16_to_cpu(rx_desc->wb.lower.lo_dword.hs_rss.pkt_info) & in ixgbe_rx_hash()
1492 skb_set_hash(skb, le32_to_cpu(rx_desc->wb.lower.hi_dword.rss), in ixgbe_rx_hash()
1506 union ixgbe_adv_rx_desc *rx_desc) in ixgbe_rx_is_fcoe() argument
1508 __le16 pkt_info = rx_desc->wb.lower.lo_dword.hs_rss.pkt_info; in ixgbe_rx_is_fcoe()
1524 union ixgbe_adv_rx_desc *rx_desc, in ixgbe_rx_checksum() argument
[all …]
/drivers/net/ethernet/oki-semi/pch_gbe/
Dpch_gbe_main.c1414 struct pch_gbe_rx_desc *rx_desc; in pch_gbe_alloc_rx_buffers() local
1447 rx_desc = PCH_GBE_RX_DESC(*rx_ring, i); in pch_gbe_alloc_rx_buffers()
1448 rx_desc->buffer_addr = (buffer_info->dma); in pch_gbe_alloc_rx_buffers()
1449 rx_desc->gbec_status = DSC_INIT16; in pch_gbe_alloc_rx_buffers()
1678 struct pch_gbe_rx_desc *rx_desc; in pch_gbe_clean_rx() local
1692 rx_desc = PCH_GBE_RX_DESC(*rx_ring, i); in pch_gbe_clean_rx()
1693 if (rx_desc->gbec_status == DSC_INIT16) in pch_gbe_clean_rx()
1698 dma_status = rx_desc->dma_status; in pch_gbe_clean_rx()
1699 gbec_status = rx_desc->gbec_status; in pch_gbe_clean_rx()
1700 tcp_ip_status = rx_desc->tcp_ip_status; in pch_gbe_clean_rx()
[all …]
/drivers/net/ethernet/intel/igb/
Digb_main.c377 union e1000_adv_rx_desc *rx_desc; in igb_dump() local
525 rx_desc = IGB_RX_DESC(rx_ring, i); in igb_dump()
526 u0 = (struct my_u0 *)rx_desc; in igb_dump()
527 staterr = le32_to_cpu(rx_desc->wb.upper.status_error); in igb_dump()
3784 union e1000_adv_rx_desc *rx_desc; in igb_configure_rx_ring() local
3831 rx_desc = IGB_RX_DESC(ring, 0); in igb_configure_rx_ring()
3832 rx_desc->wb.upper.length = 0; in igb_configure_rx_ring()
7214 union e1000_adv_rx_desc *rx_desc, in igb_construct_skb() argument
7237 if (unlikely(igb_test_staterr(rx_desc, E1000_RXDADV_STAT_TSIP))) { in igb_construct_skb()
7271 union e1000_adv_rx_desc *rx_desc, in igb_build_skb() argument
[all …]

1234