Home
last modified time | relevance | path

Searched refs:xdpf (Results 1 – 25 of 45) sorted by relevance

12

/drivers/net/ethernet/mellanox/mlx5/core/en/
Dxdp.c64 struct xdp_frame *xdpf; in mlx5e_xmit_xdp_buff() local
67 xdpf = xdp_convert_buff_to_frame(xdp); in mlx5e_xmit_xdp_buff()
68 if (unlikely(!xdpf)) in mlx5e_xmit_xdp_buff()
71 xdptxd.data = xdpf->data; in mlx5e_xmit_xdp_buff()
72 xdptxd.len = xdpf->len; in mlx5e_xmit_xdp_buff()
92 xdp_return_frame(xdpf); in mlx5e_xmit_xdp_buff()
97 xdpi.frame.xdpf = xdpf; in mlx5e_xmit_xdp_buff()
108 dma_addr = di->addr + (xdpf->data - (void *)xdpf); in mlx5e_xmit_xdp_buff()
382 xdpi.frame.xdpf->len, DMA_TO_DEVICE); in mlx5e_free_xdpsq_desc()
383 xdp_return_frame_bulk(xdpi.frame.xdpf, bq); in mlx5e_free_xdpsq_desc()
[all …]
/drivers/net/ethernet/intel/ixgbe/
Dixgbe_xsk.c103 struct xdp_frame *xdpf; in ixgbe_run_xdp_zc() local
120 xdpf = xdp_convert_buff_to_frame(xdp); in ixgbe_run_xdp_zc()
121 if (unlikely(!xdpf)) in ixgbe_run_xdp_zc()
123 result = ixgbe_xmit_xdp_ring(adapter, xdpf); in ixgbe_run_xdp_zc()
412 tx_bi->xdpf = NULL; in ixgbe_xmit_zc()
443 xdp_return_frame(tx_bi->xdpf); in ixgbe_clean_xdp_tx_buffer()
470 if (tx_bi->xdpf) in ixgbe_clean_xdp_tx_irq()
475 tx_bi->xdpf = NULL; in ixgbe_clean_xdp_tx_irq()
549 if (tx_bi->xdpf) in ixgbe_xsk_clean_tx_ring()
554 tx_bi->xdpf = NULL; in ixgbe_xsk_clean_tx_ring()
Dixgbe_txrx_common.h16 struct xdp_frame *xdpf);
/drivers/net/ethernet/ti/
Dcpsw_priv.c54 struct xdp_frame *xdpf; in cpsw_tx_handler() local
61 xdpf = cpsw_handle_to_xdpf(token); in cpsw_tx_handler()
62 xmeta = (void *)xdpf + CPSW_XMETA_OFFSET; in cpsw_tx_handler()
65 xdp_return_frame(xdpf); in cpsw_tx_handler()
1288 int cpsw_xdp_tx_frame(struct cpsw_priv *priv, struct xdp_frame *xdpf, in cpsw_xdp_tx_frame() argument
1297 xmeta = (void *)xdpf + CPSW_XMETA_OFFSET; in cpsw_xdp_tx_frame()
1304 dma += xdpf->headroom + sizeof(struct xdp_frame); in cpsw_xdp_tx_frame()
1305 ret = cpdma_chan_submit_mapped(txch, cpsw_xdpf_to_handle(xdpf), in cpsw_xdp_tx_frame()
1306 dma, xdpf->len, port); in cpsw_xdp_tx_frame()
1308 if (sizeof(*xmeta) > xdpf->headroom) in cpsw_xdp_tx_frame()
[all …]
Dcpsw_priv.h419 static inline void *cpsw_xdpf_to_handle(struct xdp_frame *xdpf) in cpsw_xdpf_to_handle() argument
421 return (void *)((unsigned long)xdpf | BIT(0)); in cpsw_xdpf_to_handle()
440 int cpsw_xdp_tx_frame(struct cpsw_priv *priv, struct xdp_frame *xdpf,
/drivers/net/ethernet/freescale/dpaa/
Ddpaa_eth.c1701 xdp_return_frame(swbp->xdpf); in dpaa_cleanup_tx_fd()
2182 struct xdp_frame *new_xdpf, *xdpf = *init_xdpf; in dpaa_a050385_wa_xdpf() local
2196 if (PTR_IS_ALIGNED(xdpf->data, DPAA_FD_DATA_ALIGNMENT) && in dpaa_a050385_wa_xdpf()
2197 xdpf->headroom >= priv->tx_headroom) { in dpaa_a050385_wa_xdpf()
2198 xdpf->headroom = priv->tx_headroom; in dpaa_a050385_wa_xdpf()
2206 aligned_data = PTR_ALIGN_DOWN(xdpf->data, DPAA_FD_DATA_ALIGNMENT); in dpaa_a050385_wa_xdpf()
2207 data_shift = xdpf->data - aligned_data; in dpaa_a050385_wa_xdpf()
2212 if (xdpf->headroom >= data_shift + priv->tx_headroom) { in dpaa_a050385_wa_xdpf()
2213 memmove(aligned_data, xdpf->data, xdpf->len); in dpaa_a050385_wa_xdpf()
2214 xdpf->data = aligned_data; in dpaa_a050385_wa_xdpf()
[all …]
Ddpaa_eth.h155 struct xdp_frame *xdpf; member
/drivers/net/ethernet/intel/ice/
Dice_txrx_lib.c274 struct xdp_frame *xdpf = xdp_convert_buff_to_frame(xdp); in ice_xmit_xdp_buff() local
276 if (unlikely(!xdpf)) in ice_xmit_xdp_buff()
279 return ice_xmit_xdp_ring(xdpf->data, xdpf->len, xdp_ring); in ice_xmit_xdp_buff()
/drivers/net/ethernet/broadcom/bnxt/
Dbnxt_xdp.c62 struct xdp_frame *xdpf) in __bnxt_xmit_xdp_redirect() argument
68 tx_buf->xdpf = xdpf; in __bnxt_xmit_xdp_redirect()
93 xdp_return_frame(tx_buf->xdpf); in bnxt_tx_int_xdp()
95 tx_buf->xdpf = NULL; in bnxt_tx_int_xdp()
/drivers/net/ethernet/socionext/
Dnetsec.c272 struct xdp_frame *xdpf; member
675 bytes += desc->xdpf->len; in netsec_clean_tx_dring()
677 xdp_return_frame_rx_napi(desc->xdpf); in netsec_clean_tx_dring()
679 xdp_return_frame_bulk(desc->xdpf, &bq); in netsec_clean_tx_dring()
821 dring->desc[idx].xdpf = buf; in netsec_set_tx_de()
829 struct xdp_frame *xdpf, bool is_ndo) in netsec_xdp_queue_one() argument
833 struct page *page = virt_to_page(xdpf->data); in netsec_xdp_queue_one()
851 dma_handle = dma_map_single(priv->dev, xdpf->data, xdpf->len, in netsec_xdp_queue_one()
865 dma_handle = page_pool_get_dma_addr(page) + xdpf->headroom + in netsec_xdp_queue_one()
866 sizeof(*xdpf); in netsec_xdp_queue_one()
[all …]
/drivers/net/ethernet/sfc/
Dtx.c426 struct xdp_frame *xdpf; in efx_xdp_tx_buffers() local
468 xdpf = xdpfs[i]; in efx_xdp_tx_buffers()
476 len = xdpf->len; in efx_xdp_tx_buffers()
480 xdpf->data, len, in efx_xdp_tx_buffers()
487 tx_buffer->xdpf = xdpf; in efx_xdp_tx_buffers()
Drx.c257 struct xdp_frame *xdpf; in efx_do_xdp() local
312 xdpf = xdp_convert_buff_to_frame(&xdp); in efx_do_xdp()
313 err = efx_xdp_tx_buffers(efx, 1, &xdpf, true); in efx_do_xdp()
/drivers/net/ethernet/qlogic/qede/
Dqede_fp.c306 u16 len, struct page *page, struct xdp_frame *xdpf) in qede_xdp_xmit() argument
333 xdp->xdpf = xdpf; in qede_xdp_xmit()
346 struct xdp_frame *xdpf; in qede_xdp_transmit() local
363 xdpf = frames[i]; in qede_xdp_transmit()
365 mapping = dma_map_single(dmadev, xdpf->data, xdpf->len, in qede_xdp_transmit()
370 if (unlikely(qede_xdp_xmit(xdp_tx, mapping, 0, xdpf->len, in qede_xdp_transmit()
371 NULL, xdpf))) in qede_xdp_transmit()
405 struct xdp_frame *xdpf; in qede_xdp_tx_int() local
413 xdpf = xdp_info->xdpf; in qede_xdp_tx_int()
415 if (xdpf) { in qede_xdp_tx_int()
[all …]
/drivers/net/ethernet/intel/i40e/
Di40e_xsk.c561 xdp_return_frame(tx_bi->xdpf); in i40e_clean_xdp_tx_buffer()
601 if (tx_bi->xdpf) { in i40e_clean_xdp_tx_irq()
603 tx_bi->xdpf = NULL; in i40e_clean_xdp_tx_irq()
700 if (tx_bi->xdpf) in i40e_xsk_clean_tx_ring()
705 tx_bi->xdpf = NULL; in i40e_xsk_clean_tx_ring()
Di40e_txrx.c763 xdp_return_frame(tx_buffer->xdpf); in i40e_unmap_and_free_tx_resource()
969 xdp_return_frame(tx_buf->xdpf); in i40e_clean_tx_irq()
2262 static int i40e_xmit_xdp_ring(struct xdp_frame *xdpf,
2267 struct xdp_frame *xdpf = xdp_convert_buff_to_frame(xdp); in i40e_xmit_xdp_tx_ring() local
2269 if (unlikely(!xdpf)) in i40e_xmit_xdp_tx_ring()
2272 return i40e_xmit_xdp_ring(xdpf, xdp_ring); in i40e_xmit_xdp_tx_ring()
3675 static int i40e_xmit_xdp_ring(struct xdp_frame *xdpf, in i40e_xmit_xdp_ring() argument
3681 void *data = xdpf->data; in i40e_xmit_xdp_ring()
3682 u32 size = xdpf->len; in i40e_xmit_xdp_ring()
3696 tx_bi->xdpf = xdpf; in i40e_xmit_xdp_ring()
[all …]
/drivers/net/
Dvirtio_net.c521 struct xdp_frame *xdpf) in __virtnet_xdp_xmit_one() argument
526 if (unlikely(xdpf->headroom < vi->hdr_len)) in __virtnet_xdp_xmit_one()
530 xdpf->data -= vi->hdr_len; in __virtnet_xdp_xmit_one()
532 hdr = xdpf->data; in __virtnet_xdp_xmit_one()
534 xdpf->len += vi->hdr_len; in __virtnet_xdp_xmit_one()
536 sg_init_one(sq->sg, xdpf->data, xdpf->len); in __virtnet_xdp_xmit_one()
538 err = virtqueue_add_outbuf(sq->vq, sq->sg, 1, xdp_to_ptr(xdpf), in __virtnet_xdp_xmit_one()
631 struct xdp_frame *xdpf = frames[i]; in virtnet_xdp_xmit() local
633 if (__virtnet_xdp_xmit_one(vi, sq, xdpf)) in virtnet_xdp_xmit()
763 struct xdp_frame *xdpf; in receive_small() local
[all …]
Dxen-netfront.c615 struct xdp_frame *xdpf) in xennet_xdp_xmit_one() argument
622 .page = virt_to_page(xdpf->data), in xennet_xdp_xmit_one()
627 offset_in_page(xdpf->data), in xennet_xdp_xmit_one()
628 xdpf->len); in xennet_xdp_xmit_one()
637 tx_stats->bytes += xdpf->len; in xennet_xdp_xmit_one()
665 struct xdp_frame *xdpf = frames[i]; in xennet_xdp_xmit() local
667 if (!xdpf) in xennet_xdp_xmit()
669 if (xennet_xdp_xmit_one(dev, queue, xdpf)) in xennet_xdp_xmit()
973 struct xdp_frame *xdpf; in xennet_run_xdp() local
987 xdpf = xdp_convert_buff_to_frame(xdp); in xennet_run_xdp()
[all …]
Dveth.c855 void *xdpf[VETH_XDP_BATCH]; in veth_xdp_rcv() local
871 xdpf[n_xdpf++] = frame; in veth_xdp_rcv()
873 veth_xdp_rcv_bulk_skb(rq, xdpf, n_xdpf, in veth_xdp_rcv()
895 veth_xdp_rcv_bulk_skb(rq, xdpf, n_xdpf, bq, stats); in veth_xdp_rcv()
/drivers/net/ethernet/amazon/ena/
Dena_netdev.c241 struct xdp_frame *xdpf, in ena_xdp_tx_map_frame() argument
251 tx_info->xdpf = xdpf; in ena_xdp_tx_map_frame()
252 data = tx_info->xdpf->data; in ena_xdp_tx_map_frame()
253 size = tx_info->xdpf->len; in ena_xdp_tx_map_frame()
297 struct xdp_frame *xdpf, in ena_xdp_xmit_frame() argument
310 rc = ena_xdp_tx_map_frame(xdp_ring, tx_info, xdpf, &ena_tx_ctx); in ena_xdp_xmit_frame()
321 xdpf->len); in ena_xdp_xmit_frame()
335 tx_info->xdpf = NULL; in ena_xdp_xmit_frame()
384 struct xdp_frame *xdpf; in ena_xdp_execute() local
396 xdpf = xdp_convert_buff_to_frame(xdp); in ena_xdp_execute()
[all …]
/drivers/net/ethernet/stmicro/stmmac/
Dstmmac_main.c1547 if (tx_q->xdpf[i] && in stmmac_free_tx_buffer()
1550 xdp_return_frame(tx_q->xdpf[i]); in stmmac_free_tx_buffer()
1551 tx_q->xdpf[i] = NULL; in stmmac_free_tx_buffer()
2457 tx_q->xdpf[entry] = NULL; in stmmac_xdp_xmit_zc()
2527 struct xdp_frame *xdpf; in stmmac_tx_clean() local
2534 xdpf = tx_q->xdpf[entry]; in stmmac_tx_clean()
2537 xdpf = NULL; in stmmac_tx_clean()
2540 xdpf = NULL; in stmmac_tx_clean()
2600 if (xdpf && in stmmac_tx_clean()
2602 xdp_return_frame_rx_napi(xdpf); in stmmac_tx_clean()
[all …]
/drivers/net/ethernet/marvell/
Dmvneta.c618 struct xdp_frame *xdpf; member
1862 xdp_return_frame_rx_napi(buf->xdpf); in mvneta_txq_bufs_free()
1864 xdp_return_frame_bulk(buf->xdpf, &bq); in mvneta_txq_bufs_free()
2049 struct xdp_frame *xdpf, bool dma_map) in mvneta_xdp_submit_frame() argument
2063 dma_addr = dma_map_single(pp->dev->dev.parent, xdpf->data, in mvneta_xdp_submit_frame()
2064 xdpf->len, DMA_TO_DEVICE); in mvneta_xdp_submit_frame()
2071 struct page *page = virt_to_page(xdpf->data); in mvneta_xdp_submit_frame()
2074 sizeof(*xdpf) + xdpf->headroom; in mvneta_xdp_submit_frame()
2076 xdpf->len, DMA_BIDIRECTIONAL); in mvneta_xdp_submit_frame()
2079 buf->xdpf = xdpf; in mvneta_xdp_submit_frame()
[all …]
/drivers/net/ethernet/intel/igc/
Digc_main.c208 xdp_return_frame(tx_buffer->xdpf); in igc_clean_tx_ring()
2301 struct xdp_frame *xdpf, in igc_xdp_init_tx_buffer() argument
2306 dma = dma_map_single(ring->dev, xdpf->data, xdpf->len, DMA_TO_DEVICE); in igc_xdp_init_tx_buffer()
2313 buffer->xdpf = xdpf; in igc_xdp_init_tx_buffer()
2315 buffer->bytecount = xdpf->len; in igc_xdp_init_tx_buffer()
2318 dma_unmap_len_set(buffer, len, xdpf->len); in igc_xdp_init_tx_buffer()
2325 struct xdp_frame *xdpf) in igc_xdp_init_tx_descriptor() argument
2336 err = igc_xdp_init_tx_buffer(buffer, xdpf, ring); in igc_xdp_init_tx_descriptor()
2377 struct xdp_frame *xdpf = xdp_convert_buff_to_frame(xdp); in igc_xdp_xmit_back() local
2383 if (unlikely(!xdpf)) in igc_xdp_xmit_back()
[all …]
/drivers/net/ethernet/freescale/dpaa2/
Ddpaa2-eth.c1061 xdp_return_frame(swa->xdp.xdpf); in dpaa2_eth_free_tx_fd()
2380 struct xdp_frame *xdpf, in dpaa2_eth_xdp_create_fd() argument
2393 if (xdpf->headroom < needed_headroom) in dpaa2_eth_xdp_create_fd()
2400 buffer_start = xdpf->data - needed_headroom; in dpaa2_eth_xdp_create_fd()
2403 if (aligned_start >= xdpf->data - xdpf->headroom) in dpaa2_eth_xdp_create_fd()
2409 swa->xdp.dma_size = xdpf->data + xdpf->len - buffer_start; in dpaa2_eth_xdp_create_fd()
2410 swa->xdp.xdpf = xdpf; in dpaa2_eth_xdp_create_fd()
2419 dpaa2_fd_set_offset(fd, xdpf->data - buffer_start); in dpaa2_eth_xdp_create_fd()
2420 dpaa2_fd_set_len(fd, xdpf->len); in dpaa2_eth_xdp_create_fd()
/drivers/net/ethernet/intel/igb/
Digb.h269 struct xdp_frame *xdpf; member
720 struct xdp_frame *xdpf);
/drivers/net/ethernet/marvell/mvpp2/
Dmvpp2_main.c318 tx_buf->xdpf = data; in mvpp2_txq_inc_put()
2830 xdp_return_frame_bulk(tx_buf->xdpf, &bq); in mvpp2_txq_bufs_free()
3667 struct xdp_frame *xdpf, bool dma_map) in mvpp2_xdp_submit_frame() argument
3694 mvpp2_txdesc_size_set(port, tx_desc, xdpf->len); in mvpp2_xdp_submit_frame()
3698 dma_addr = dma_map_single(port->dev->dev.parent, xdpf->data, in mvpp2_xdp_submit_frame()
3699 xdpf->len, DMA_TO_DEVICE); in mvpp2_xdp_submit_frame()
3710 struct page *page = virt_to_page(xdpf->data); in mvpp2_xdp_submit_frame()
3713 sizeof(*xdpf) + xdpf->headroom; in mvpp2_xdp_submit_frame()
3715 xdpf->len, DMA_BIDIRECTIONAL); in mvpp2_xdp_submit_frame()
3723 mvpp2_txq_inc_put(port, txq_pcpu, xdpf, tx_desc, buf_type); in mvpp2_xdp_submit_frame()
[all …]

12