Home
last modified time | relevance | path

Searched refs:qpl (Results 1 – 6 of 6) sorted by relevance

/kernel/linux/linux-5.10/drivers/net/ethernet/google/gve/
Dgve_tx.c31 fifo->base = vmap(fifo->qpl->pages, fifo->qpl->num_entries, VM_MAP, in gve_tx_fifo_init()
35 fifo->qpl->id); in gve_tx_fifo_init()
39 fifo->size = fifo->qpl->num_entries * PAGE_SIZE; in gve_tx_fifo_init()
162 gve_unassign_qpl(priv, tx->tx_fifo.qpl->id); in gve_tx_free_ring()
163 tx->tx_fifo.qpl = NULL; in gve_tx_free_ring()
209 tx->tx_fifo.qpl = gve_assign_tx_qpl(priv); in gve_tx_alloc_ring()
210 if (!tx->tx_fifo.qpl) in gve_tx_alloc_ring()
235 gve_unassign_qpl(priv, tx->tx_fifo.qpl->id); in gve_tx_alloc_ring()
454 gve_dma_sync_for_device(dev, tx->tx_fifo.qpl->page_buses, in gve_tx_add_skb()
470 gve_dma_sync_for_device(dev, tx->tx_fifo.qpl->page_buses, in gve_tx_add_skb()
Dgve_main.c624 struct gve_queue_page_list *qpl = &priv->qpls[id]; in gve_alloc_queue_page_list() local
636 qpl->id = id; in gve_alloc_queue_page_list()
637 qpl->num_entries = 0; in gve_alloc_queue_page_list()
638 qpl->pages = kvzalloc(pages * sizeof(*qpl->pages), GFP_KERNEL); in gve_alloc_queue_page_list()
640 if (!qpl->pages) in gve_alloc_queue_page_list()
642 qpl->page_buses = kvzalloc(pages * sizeof(*qpl->page_buses), in gve_alloc_queue_page_list()
645 if (!qpl->page_buses) in gve_alloc_queue_page_list()
649 err = gve_alloc_page(priv, &priv->pdev->dev, &qpl->pages[i], in gve_alloc_queue_page_list()
650 &qpl->page_buses[i], in gve_alloc_queue_page_list()
655 qpl->num_entries++; in gve_alloc_queue_page_list()
[all …]
Dgve_adminq.c333 .queue_page_list_id = cpu_to_be32(tx->tx_fifo.qpl->id), in gve_adminq_create_tx_queue()
374 .queue_page_list_id = cpu_to_be32(rx->data.qpl->id), in gve_adminq_create_rx_queue()
531 struct gve_queue_page_list *qpl) in gve_adminq_register_page_list() argument
534 u32 num_entries = qpl->num_entries; in gve_adminq_register_page_list()
535 u32 size = num_entries * sizeof(qpl->page_buses[0]); in gve_adminq_register_page_list()
548 page_list[i] = cpu_to_be64(qpl->page_buses[i]); in gve_adminq_register_page_list()
552 .page_list_id = cpu_to_be32(qpl->id), in gve_adminq_register_page_list()
Dgve_rx.c36 gve_unassign_qpl(priv, rx->data.qpl->id); in gve_rx_free_ring()
37 rx->data.qpl = NULL; in gve_rx_free_ring()
74 rx->data.qpl = gve_assign_rx_qpl(priv); in gve_prefill_rx_pages()
77 struct page *page = rx->data.qpl->pages[i]; in gve_prefill_rx_pages()
302 dma_sync_single_for_cpu(&priv->pdev->dev, rx->data.qpl->page_buses[idx], in gve_rx()
Dgve.h70 struct gve_queue_page_list *qpl; /* qpl assigned to this queue */ member
124 struct gve_queue_page_list *qpl; /* QPL mapped into this FIFO */ member
Dgve_adminq.h262 struct gve_queue_page_list *qpl);