• Home
  • Raw
  • Download

Lines Matching refs:buffer

37 					 struct efx_tx_buffer *buffer)  in efx_tx_get_copy_buffer()  argument
49 buffer->dma_addr = page_buf->dma_addr + offset; in efx_tx_get_copy_buffer()
50 buffer->unmap_len = 0; in efx_tx_get_copy_buffer()
55 struct efx_tx_buffer *buffer, size_t len) in efx_tx_get_copy_buffer_limited() argument
59 return efx_tx_get_copy_buffer(tx_queue, buffer); in efx_tx_get_copy_buffer_limited()
63 struct efx_tx_buffer *buffer, in efx_dequeue_buffer() argument
67 if (buffer->unmap_len) { in efx_dequeue_buffer()
69 dma_addr_t unmap_addr = buffer->dma_addr - buffer->dma_offset; in efx_dequeue_buffer()
70 if (buffer->flags & EFX_TX_BUF_MAP_SINGLE) in efx_dequeue_buffer()
71 dma_unmap_single(dma_dev, unmap_addr, buffer->unmap_len, in efx_dequeue_buffer()
74 dma_unmap_page(dma_dev, unmap_addr, buffer->unmap_len, in efx_dequeue_buffer()
76 buffer->unmap_len = 0; in efx_dequeue_buffer()
79 if (buffer->flags & EFX_TX_BUF_SKB) { in efx_dequeue_buffer()
82 (*bytes_compl) += buffer->skb->len; in efx_dequeue_buffer()
83 dev_consume_skb_any((struct sk_buff *)buffer->skb); in efx_dequeue_buffer()
89 buffer->len = 0; in efx_dequeue_buffer()
90 buffer->flags = 0; in efx_dequeue_buffer()
157 struct efx_tx_buffer *buffer; in efx_enqueue_skb_copy() local
163 buffer = efx_tx_queue_get_insert_buffer(tx_queue); in efx_enqueue_skb_copy()
165 copy_buffer = efx_tx_get_copy_buffer(tx_queue, buffer); in efx_enqueue_skb_copy()
171 buffer->len = copy_len; in efx_enqueue_skb_copy()
173 buffer->skb = skb; in efx_enqueue_skb_copy()
174 buffer->flags = EFX_TX_BUF_SKB; in efx_enqueue_skb_copy()
277 struct efx_tx_buffer *buffer = in efx_enqueue_skb_pio() local
308 buffer->skb = skb; in efx_enqueue_skb_pio()
309 buffer->flags = EFX_TX_BUF_SKB | EFX_TX_BUF_OPTION; in efx_enqueue_skb_pio()
311 EFX_POPULATE_QWORD_5(buffer->option, in efx_enqueue_skb_pio()
328 struct efx_tx_buffer *buffer; in efx_tx_map_chunk() local
333 buffer = efx_tx_queue_get_insert_buffer(tx_queue); in efx_tx_map_chunk()
336 buffer->len = dma_len; in efx_tx_map_chunk()
337 buffer->dma_addr = dma_addr; in efx_tx_map_chunk()
338 buffer->flags = EFX_TX_BUF_CONT; in efx_tx_map_chunk()
344 return buffer; in efx_tx_map_chunk()
389 struct efx_tx_buffer *buffer; in efx_tx_map_data() local
392 buffer = efx_tx_map_chunk(tx_queue, dma_addr, len); in efx_tx_map_data()
397 buffer->flags = EFX_TX_BUF_CONT | dma_flags; in efx_tx_map_data()
398 buffer->unmap_len = unmap_len; in efx_tx_map_data()
399 buffer->dma_offset = buffer->dma_addr - unmap_addr; in efx_tx_map_data()
405 buffer->skb = skb; in efx_tx_map_data()
406 buffer->flags = EFX_TX_BUF_SKB | dma_flags; in efx_tx_map_data()
429 struct efx_tx_buffer *buffer; in efx_enqueue_unwind() local
436 buffer = __efx_tx_queue_get_insert_buffer(tx_queue); in efx_enqueue_unwind()
437 efx_dequeue_buffer(tx_queue, buffer, &pkts_compl, &bytes_compl); in efx_enqueue_unwind()
594 struct efx_tx_buffer *buffer = &tx_queue->buffer[read_ptr]; in efx_dequeue_buffers() local
596 if (!(buffer->flags & EFX_TX_BUF_OPTION) && in efx_dequeue_buffers()
597 unlikely(buffer->len == 0)) { in efx_dequeue_buffers()
605 efx_dequeue_buffer(tx_queue, buffer, pkts_compl, bytes_compl); in efx_dequeue_buffers()
694 if (!tx_queue->buffer) { in efx_setup_tc()
788 tx_queue->buffer = kcalloc(entries, sizeof(*tx_queue->buffer), in efx_probe_tx_queue()
790 if (!tx_queue->buffer) in efx_probe_tx_queue()
811 kfree(tx_queue->buffer); in efx_probe_tx_queue()
812 tx_queue->buffer = NULL; in efx_probe_tx_queue()
845 struct efx_tx_buffer *buffer; in efx_fini_tx_queue() local
850 if (!tx_queue->buffer) in efx_fini_tx_queue()
856 buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask]; in efx_fini_tx_queue()
857 efx_dequeue_buffer(tx_queue, buffer, &pkts_compl, &bytes_compl); in efx_fini_tx_queue()
869 if (!tx_queue->buffer) in efx_remove_tx_queue()
884 kfree(tx_queue->buffer); in efx_remove_tx_queue()
885 tx_queue->buffer = NULL; in efx_remove_tx_queue()