Lines Matching refs:buffer
26 struct ef4_tx_buffer *buffer) in ef4_tx_get_copy_buffer() argument
38 buffer->dma_addr = page_buf->dma_addr + offset; in ef4_tx_get_copy_buffer()
39 buffer->unmap_len = 0; in ef4_tx_get_copy_buffer()
44 struct ef4_tx_buffer *buffer, size_t len) in ef4_tx_get_copy_buffer_limited() argument
48 return ef4_tx_get_copy_buffer(tx_queue, buffer); in ef4_tx_get_copy_buffer_limited()
52 struct ef4_tx_buffer *buffer, in ef4_dequeue_buffer() argument
56 if (buffer->unmap_len) { in ef4_dequeue_buffer()
58 dma_addr_t unmap_addr = buffer->dma_addr - buffer->dma_offset; in ef4_dequeue_buffer()
59 if (buffer->flags & EF4_TX_BUF_MAP_SINGLE) in ef4_dequeue_buffer()
60 dma_unmap_single(dma_dev, unmap_addr, buffer->unmap_len, in ef4_dequeue_buffer()
63 dma_unmap_page(dma_dev, unmap_addr, buffer->unmap_len, in ef4_dequeue_buffer()
65 buffer->unmap_len = 0; in ef4_dequeue_buffer()
68 if (buffer->flags & EF4_TX_BUF_SKB) { in ef4_dequeue_buffer()
70 (*bytes_compl) += buffer->skb->len; in ef4_dequeue_buffer()
71 dev_consume_skb_any((struct sk_buff *)buffer->skb); in ef4_dequeue_buffer()
77 buffer->len = 0; in ef4_dequeue_buffer()
78 buffer->flags = 0; in ef4_dequeue_buffer()
153 struct ef4_tx_buffer *buffer; in ef4_enqueue_skb_copy() local
159 buffer = ef4_tx_queue_get_insert_buffer(tx_queue); in ef4_enqueue_skb_copy()
161 copy_buffer = ef4_tx_get_copy_buffer(tx_queue, buffer); in ef4_enqueue_skb_copy()
169 buffer->len = min_len; in ef4_enqueue_skb_copy()
171 buffer->len = copy_len; in ef4_enqueue_skb_copy()
174 buffer->skb = skb; in ef4_enqueue_skb_copy()
175 buffer->flags = EF4_TX_BUF_SKB; in ef4_enqueue_skb_copy()
186 struct ef4_tx_buffer *buffer; in ef4_tx_map_chunk() local
191 buffer = ef4_tx_queue_get_insert_buffer(tx_queue); in ef4_tx_map_chunk()
194 buffer->len = dma_len; in ef4_tx_map_chunk()
195 buffer->dma_addr = dma_addr; in ef4_tx_map_chunk()
196 buffer->flags = EF4_TX_BUF_CONT; in ef4_tx_map_chunk()
202 return buffer; in ef4_tx_map_chunk()
231 struct ef4_tx_buffer *buffer; in ef4_tx_map_data() local
234 buffer = ef4_tx_map_chunk(tx_queue, dma_addr, len); in ef4_tx_map_data()
239 buffer->flags = EF4_TX_BUF_CONT | dma_flags; in ef4_tx_map_data()
240 buffer->unmap_len = unmap_len; in ef4_tx_map_data()
241 buffer->dma_offset = buffer->dma_addr - unmap_addr; in ef4_tx_map_data()
247 buffer->skb = skb; in ef4_tx_map_data()
248 buffer->flags = EF4_TX_BUF_SKB | dma_flags; in ef4_tx_map_data()
271 struct ef4_tx_buffer *buffer; in ef4_enqueue_unwind() local
276 buffer = __ef4_tx_queue_get_insert_buffer(tx_queue); in ef4_enqueue_unwind()
277 ef4_dequeue_buffer(tx_queue, buffer, NULL, NULL); in ef4_enqueue_unwind()
367 struct ef4_tx_buffer *buffer = &tx_queue->buffer[read_ptr]; in ef4_dequeue_buffers() local
369 if (!(buffer->flags & EF4_TX_BUF_OPTION) && in ef4_dequeue_buffers()
370 unlikely(buffer->len == 0)) { in ef4_dequeue_buffers()
378 ef4_dequeue_buffer(tx_queue, buffer, pkts_compl, bytes_compl); in ef4_dequeue_buffers()
461 if (!tx_queue->buffer) { in ef4_setup_tc()
555 tx_queue->buffer = kcalloc(entries, sizeof(*tx_queue->buffer), in ef4_probe_tx_queue()
557 if (!tx_queue->buffer) in ef4_probe_tx_queue()
578 kfree(tx_queue->buffer); in ef4_probe_tx_queue()
579 tx_queue->buffer = NULL; in ef4_probe_tx_queue()
609 struct ef4_tx_buffer *buffer; in ef4_fini_tx_queue() local
614 if (!tx_queue->buffer) in ef4_fini_tx_queue()
620 buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask]; in ef4_fini_tx_queue()
621 ef4_dequeue_buffer(tx_queue, buffer, &pkts_compl, &bytes_compl); in ef4_fini_tx_queue()
633 if (!tx_queue->buffer) in ef4_remove_tx_queue()
648 kfree(tx_queue->buffer); in ef4_remove_tx_queue()
649 tx_queue->buffer = NULL; in ef4_remove_tx_queue()