Home
last modified time | relevance | path

Searched refs:page_pool (Results 1 – 25 of 27) sorted by relevance

12

/drivers/net/ethernet/apm/xgene/
Dxgene_enet_main.c672 struct xgene_enet_desc_ring *buf_pool, *page_pool; in xgene_enet_rx_frame() local
690 page_pool = rx_ring->page_pool; in xgene_enet_rx_frame()
719 xgene_enet_free_pagepool(page_pool, raw_desc, exp_desc); in xgene_enet_rx_frame()
729 slots = page_pool->slots - 1; in xgene_enet_rx_frame()
730 head = page_pool->head; in xgene_enet_rx_frame()
741 page = page_pool->frag_page[head]; in xgene_enet_rx_frame()
747 page_pool->frag_page[head] = NULL; in xgene_enet_rx_frame()
751 page_pool->head = head; in xgene_enet_rx_frame()
764 ret = xgene_enet_refill_pagepool(page_pool, NUM_NXTBUFPOOL); in xgene_enet_rx_frame()
1062 struct xgene_enet_desc_ring *buf_pool, *page_pool; in xgene_enet_delete_desc_rings() local
[all …]
Dxgene_enet_cle.c709 if (pdata->rx_ring[idx]->page_pool) { in xgene_cle_set_rss_idt()
710 pool_id = pdata->rx_ring[idx]->page_pool->id; in xgene_cle_set_rss_idt()
786 if (pdata->rx_ring[0]->page_pool) { in xgene_enet_cle_init()
787 pool_id = pdata->rx_ring[0]->page_pool->id; in xgene_enet_cle_init()
Dxgene_enet_main.h119 struct xgene_enet_desc_ring *page_pool; member
/drivers/net/ethernet/socionext/
Dnetsec.c286 struct page_pool *page_pool; member
742 page = page_pool_dev_alloc_pages(dring->page_pool); in netsec_alloc_rx_data()
863 page_pool_get_dma_dir(rx_ring->page_pool); in netsec_xdp_queue_one()
922 page_pool_put_page(dring->page_pool, page, sync, true); in netsec_run_xdp()
932 page_pool_put_page(dring->page_pool, page, sync, true); in netsec_run_xdp()
944 page_pool_put_page(dring->page_pool, page, sync, true); in netsec_run_xdp()
966 dma_dir = page_pool_get_dma_dir(dring->page_pool); in netsec_process_rx()
1041 page_pool_put_page(dring->page_pool, page, pkt_len, in netsec_process_rx()
1047 page_pool_release_page(dring->page_pool, page); in netsec_process_rx()
1215 page_pool_put_full_page(dring->page_pool, page, false); in netsec_uninit_pkt_dring()
[all …]
/drivers/net/ethernet/ti/
Dcpsw_priv.c1103 struct page_pool *pool; in cpsw_fill_rx_channels()
1110 pool = cpsw->page_pool[ch]; in cpsw_fill_rx_channels()
1144 static struct page_pool *cpsw_create_page_pool(struct cpsw_common *cpsw, in cpsw_create_page_pool()
1148 struct page_pool *pool; in cpsw_create_page_pool()
1166 struct page_pool *pool; in cpsw_create_rx_pool()
1174 cpsw->page_pool[ch] = pool; in cpsw_create_rx_pool()
1183 struct page_pool *pool; in cpsw_ndev_create_xdp_rxq()
1186 pool = cpsw->page_pool[ch]; in cpsw_ndev_create_xdp_rxq()
1224 page_pool_destroy(cpsw->page_pool[ch]); in cpsw_destroy_xdp_rxqs()
1225 cpsw->page_pool[ch] = NULL; in cpsw_destroy_xdp_rxqs()
[all …]
Dcpsw_priv.h358 struct page_pool *page_pool[CPSW_MAX_QUEUES]; member
Dcpsw.c355 struct page_pool *pool; in cpsw_rx_handler()
367 pool = cpsw->page_pool[ch]; in cpsw_rx_handler()
Dcpsw_new.c292 struct page_pool *pool; in cpsw_rx_handler()
311 pool = cpsw->page_pool[ch]; in cpsw_rx_handler()
/drivers/dma-buf/heaps/
DMakefile3 obj-$(CONFIG_DMABUF_HEAPS_PAGE_POOL) += page_pool.o
/drivers/net/ethernet/marvell/mvpp2/
Dmvpp2_main.c104 static struct page_pool *
361 struct page_pool *page_pool) in mvpp2_frag_alloc() argument
363 if (page_pool) in mvpp2_frag_alloc()
364 return page_pool_dev_alloc_pages(page_pool); in mvpp2_frag_alloc()
373 struct page_pool *page_pool, void *data) in mvpp2_frag_free() argument
375 if (page_pool) in mvpp2_frag_free()
376 page_pool_put_full_page(page_pool, virt_to_head_page(data), false); in mvpp2_frag_free()
495 struct page_pool *pp = NULL; in mvpp2_bm_bufs_free()
505 pp = priv->page_pool[bm_pool->id]; in mvpp2_bm_bufs_free()
570 page_pool_destroy(priv->page_pool[bm_pool->id]); in mvpp2_bm_pool_destroy()
[all …]
Dmvpp2.h1104 struct page_pool *page_pool[MVPP2_PORT_MAX_RXQ]; member
/drivers/net/
Dxen-netfront.c158 struct page_pool *page_pool; member
283 page = page_pool_alloc_pages(queue->page_pool, in xennet_alloc_one_rx_buffer()
1848 page_pool_destroy(queue->page_pool); in xennet_disconnect_backend()
2203 queue->page_pool = page_pool_create(&pp_params); in xennet_create_page_pool()
2204 if (IS_ERR(queue->page_pool)) { in xennet_create_page_pool()
2205 err = PTR_ERR(queue->page_pool); in xennet_create_page_pool()
2206 queue->page_pool = NULL; in xennet_create_page_pool()
2218 MEM_TYPE_PAGE_POOL, queue->page_pool); in xennet_create_page_pool()
2228 page_pool_destroy(queue->page_pool); in xennet_create_page_pool()
2229 queue->page_pool = NULL; in xennet_create_page_pool()
/drivers/net/ethernet/marvell/
Dmvneta.c682 struct page_pool *page_pool; member
1905 page = page_pool_alloc_pages(rxq->page_pool, in mvneta_rx_refill()
1978 page_pool_put_full_page(rxq->page_pool, data, false); in mvneta_rxq_drop_pkts()
1982 page_pool_destroy(rxq->page_pool); in mvneta_rxq_drop_pkts()
1983 rxq->page_pool = NULL; in mvneta_rxq_drop_pkts()
2041 page_pool_put_full_page(rxq->page_pool, in mvneta_xdp_put_buff()
2043 page_pool_put_page(rxq->page_pool, virt_to_head_page(xdp->data), in mvneta_xdp_put_buff()
2255 dma_dir = page_pool_get_dma_dir(rxq->page_pool); in mvneta_swbm_rx_frame()
2290 dma_dir = page_pool_get_dma_dir(rxq->page_pool); in mvneta_swbm_add_rx_fragment()
2303 page_pool_put_full_page(rxq->page_pool, page, true); in mvneta_swbm_add_rx_fragment()
[all …]
/drivers/net/ethernet/stmicro/stmmac/
Dstmmac.h100 struct page_pool *page_pool; member
Dstmmac_main.c1476 buf->page = page_pool_dev_alloc_pages(rx_q->page_pool); in stmmac_init_rx_buffers()
1483 buf->sec_page = page_pool_dev_alloc_pages(rx_q->page_pool); in stmmac_init_rx_buffers()
1515 page_pool_put_full_page(rx_q->page_pool, buf->page, false); in stmmac_free_rx_buffer()
1519 page_pool_put_full_page(rx_q->page_pool, buf->sec_page, false); in stmmac_free_rx_buffer()
1699 rx_q->page_pool)); in __init_dma_rx_desc_rings()
1941 if (rx_q->page_pool) in __free_dma_rx_desc_resources()
1942 page_pool_destroy(rx_q->page_pool); in __free_dma_rx_desc_resources()
2030 rx_q->page_pool = page_pool_create(&pp_params); in __alloc_dma_rx_desc_resources()
2031 if (IS_ERR(rx_q->page_pool)) { in __alloc_dma_rx_desc_resources()
2032 ret = PTR_ERR(rx_q->page_pool); in __alloc_dma_rx_desc_resources()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/
Den_rx.c270 dma_info->page = page_pool_dev_alloc_pages(rq->page_pool); in mlx5e_page_alloc_pool()
277 page_pool_recycle_direct(rq->page_pool, dma_info->page); in mlx5e_page_alloc_pool()
309 page_pool_recycle_direct(rq->page_pool, dma_info->page); in mlx5e_page_release_dynamic()
312 page_pool_release_page(rq->page_pool, dma_info->page); in mlx5e_page_release_dynamic()
583 if (rq->page_pool) in mlx5e_post_rx_wqes()
584 page_pool_nid_changed(rq->page_pool, numa_mem_id()); in mlx5e_post_rx_wqes()
741 if (rq->page_pool) in mlx5e_post_rx_mpwqes()
742 page_pool_nid_changed(rq->page_pool, numa_mem_id()); in mlx5e_post_rx_mpwqes()
Den.h64 struct page_pool;
665 struct page_pool *page_pool; member
Den_main.c512 rq->page_pool = page_pool_create(&pp_params); in mlx5e_alloc_rq()
513 if (IS_ERR(rq->page_pool)) { in mlx5e_alloc_rq()
514 err = PTR_ERR(rq->page_pool); in mlx5e_alloc_rq()
515 rq->page_pool = NULL; in mlx5e_alloc_rq()
520 MEM_TYPE_PAGE_POOL, rq->page_pool); in mlx5e_alloc_rq()
631 page_pool_destroy(rq->page_pool); in mlx5e_free_rq()
/drivers/net/ethernet/hisilicon/hns3/
Dhns3_enet.h461 struct page_pool *page_pool; member
Dhns3_enet.c3176 if (ring->page_pool) { in hns3_alloc_buffer()
3177 p = page_pool_dev_alloc_frag(ring->page_pool, in hns3_alloc_buffer()
3217 page_pool_put_full_page(ring->page_pool, cb->priv, in hns3_free_buffer()
3307 if (ret || ring->page_pool) in hns3_alloc_and_map_buffer()
3567 if (ring->page_pool) { in hns3_nic_reuse_page()
3865 page_pool_put_full_page(ring->page_pool, desc_cb->priv, in hns3_alloc_skb()
3875 if (ring->page_pool) in hns3_alloc_skb()
3915 if (ring->page_pool) in hns3_add_frag()
4730 ring->page_pool = page_pool_create(&pp_params); in hns3_alloc_page_pool()
4731 if (IS_ERR(ring->page_pool)) { in hns3_alloc_page_pool()
[all …]
/drivers/net/ethernet/broadcom/bnxt/
Dbnxt_xdp.c193 page_pool_recycle_direct(rxr->page_pool, page); in bnxt_rx_xdp()
Dbnxt.h38 struct page_pool;
914 struct page_pool *page_pool; member
Dbnxt.c779 page = page_pool_dev_alloc_pages(rxr->page_pool); in __bnxt_alloc_rx_page()
786 page_pool_recycle_direct(rxr->page_pool, page); in __bnxt_alloc_rx_page()
1032 page_pool_release_page(rxr->page_pool, page); in bnxt_rx_page_skb()
2845 page_pool_recycle_direct(rxr->page_pool, data); in bnxt_free_one_rx_ring_skbs()
3079 page_pool_destroy(rxr->page_pool); in bnxt_free_rx_rings()
3080 rxr->page_pool = NULL; in bnxt_free_rx_rings()
3103 rxr->page_pool = page_pool_create(&pp); in bnxt_alloc_rx_page_pool()
3104 if (IS_ERR(rxr->page_pool)) { in bnxt_alloc_rx_page_pool()
3105 int err = PTR_ERR(rxr->page_pool); in bnxt_alloc_rx_page_pool()
3107 rxr->page_pool = NULL; in bnxt_alloc_rx_page_pool()
[all …]
/drivers/md/
Ddm-crypt.c217 mempool_t page_pool; member
1680 page = mempool_alloc(&cc->page_pool, gfp_mask); in crypt_alloc_buffer()
1715 mempool_free(bv->bv_page, &cc->page_pool); in crypt_free_buffer_pages()
2714 mempool_exit(&cc->page_pool); in crypt_dtr()
3258 ret = mempool_init(&cc->page_pool, BIO_MAX_VECS, crypt_page_alloc, crypt_page_free, cc); in crypt_ctr()
/drivers/net/ethernet/broadcom/bnx2x/
Dbnx2x_cmn.h1024 bnx2x_free_rx_mem_pool(bp, &fp->page_pool); in bnx2x_free_rx_sge_range()

12