Lines Matching refs:buffer
38 tx_queue->buffer = kcalloc(entries, sizeof(*tx_queue->buffer), in efx_probe_tx_queue()
40 if (!tx_queue->buffer) in efx_probe_tx_queue()
62 kfree(tx_queue->buffer); in efx_probe_tx_queue()
63 tx_queue->buffer = NULL; in efx_probe_tx_queue()
99 struct efx_tx_buffer *buffer; in efx_fini_tx_queue() local
106 if (!tx_queue->buffer) in efx_fini_tx_queue()
114 buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask]; in efx_fini_tx_queue()
115 efx_dequeue_buffer(tx_queue, buffer, &pkts_compl, &bytes_compl, in efx_fini_tx_queue()
128 if (!tx_queue->buffer) in efx_remove_tx_queue()
143 kfree(tx_queue->buffer); in efx_remove_tx_queue()
144 tx_queue->buffer = NULL; in efx_remove_tx_queue()
149 struct efx_tx_buffer *buffer, in efx_dequeue_buffer() argument
154 if (buffer->unmap_len) { in efx_dequeue_buffer()
156 dma_addr_t unmap_addr = buffer->dma_addr - buffer->dma_offset; in efx_dequeue_buffer()
158 if (buffer->flags & EFX_TX_BUF_MAP_SINGLE) in efx_dequeue_buffer()
159 dma_unmap_single(dma_dev, unmap_addr, buffer->unmap_len, in efx_dequeue_buffer()
162 dma_unmap_page(dma_dev, unmap_addr, buffer->unmap_len, in efx_dequeue_buffer()
164 buffer->unmap_len = 0; in efx_dequeue_buffer()
167 if (buffer->flags & EFX_TX_BUF_SKB) { in efx_dequeue_buffer()
168 struct sk_buff *skb = (struct sk_buff *)buffer->skb; in efx_dequeue_buffer()
170 if (unlikely(buffer->flags & EFX_TX_BUF_EFV)) { in efx_dequeue_buffer()
191 dev_consume_skb_any((struct sk_buff *)buffer->skb); in efx_dequeue_buffer()
195 } else if (buffer->flags & EFX_TX_BUF_XDP) { in efx_dequeue_buffer()
196 xdp_return_frame_rx_napi(buffer->xdpf); in efx_dequeue_buffer()
199 buffer->len = 0; in efx_dequeue_buffer()
200 buffer->flags = 0; in efx_dequeue_buffer()
221 struct efx_tx_buffer *buffer = &tx_queue->buffer[read_ptr]; in efx_dequeue_buffers() local
223 if (!efx_tx_buffer_in_use(buffer)) { in efx_dequeue_buffers()
231 efx_dequeue_buffer(tx_queue, buffer, pkts_compl, bytes_compl, in efx_dequeue_buffers()
293 struct efx_tx_buffer *buffer; in efx_enqueue_unwind() local
300 buffer = __efx_tx_queue_get_insert_buffer(tx_queue); in efx_enqueue_unwind()
301 efx_dequeue_buffer(tx_queue, buffer, &pkts_compl, &bytes_compl, in efx_enqueue_unwind()
310 struct efx_tx_buffer *buffer; in efx_tx_map_chunk() local
315 buffer = efx_tx_queue_get_insert_buffer(tx_queue); in efx_tx_map_chunk()
322 buffer->len = dma_len; in efx_tx_map_chunk()
323 buffer->dma_addr = dma_addr; in efx_tx_map_chunk()
324 buffer->flags = EFX_TX_BUF_CONT; in efx_tx_map_chunk()
330 return buffer; in efx_tx_map_chunk()
387 struct efx_tx_buffer *buffer; in efx_tx_map_data() local
390 buffer = efx_tx_map_chunk(tx_queue, dma_addr, len); in efx_tx_map_data()
395 buffer->flags = EFX_TX_BUF_CONT | dma_flags; in efx_tx_map_data()
396 buffer->unmap_len = unmap_len; in efx_tx_map_data()
397 buffer->dma_offset = buffer->dma_addr - unmap_addr; in efx_tx_map_data()
403 buffer->skb = skb; in efx_tx_map_data()
404 buffer->flags = EFX_TX_BUF_SKB | dma_flags; in efx_tx_map_data()