Lines Matching refs:rx_ring
191 static int i40e_run_xdp_zc(struct i40e_ring *rx_ring, struct xdp_buff *xdp) in i40e_run_xdp_zc() argument
193 struct xdp_umem *umem = rx_ring->xsk_umem; in i40e_run_xdp_zc()
204 xdp_prog = READ_ONCE(rx_ring->xdp_prog); in i40e_run_xdp_zc()
214 xdp_ring = rx_ring->vsi->xdp_rings[rx_ring->queue_index]; in i40e_run_xdp_zc()
218 err = xdp_do_redirect(rx_ring->netdev, xdp, xdp_prog); in i40e_run_xdp_zc()
225 trace_xdp_exception(rx_ring->netdev, xdp_prog, act); in i40e_run_xdp_zc()
245 static bool i40e_alloc_buffer_zc(struct i40e_ring *rx_ring, in i40e_alloc_buffer_zc() argument
248 struct xdp_umem *umem = rx_ring->xsk_umem; in i40e_alloc_buffer_zc()
253 rx_ring->rx_stats.page_reuse_count++; in i40e_alloc_buffer_zc()
258 rx_ring->rx_stats.alloc_page_failed++; in i40e_alloc_buffer_zc()
286 static bool i40e_alloc_buffer_slow_zc(struct i40e_ring *rx_ring, in i40e_alloc_buffer_slow_zc() argument
289 struct xdp_umem *umem = rx_ring->xsk_umem; in i40e_alloc_buffer_slow_zc()
293 rx_ring->rx_stats.alloc_page_failed++; in i40e_alloc_buffer_slow_zc()
297 handle &= rx_ring->xsk_umem->chunk_mask; in i40e_alloc_buffer_slow_zc()
314 __i40e_alloc_rx_buffers_zc(struct i40e_ring *rx_ring, u16 count, in __i40e_alloc_rx_buffers_zc() argument
315 bool alloc(struct i40e_ring *rx_ring, in __i40e_alloc_rx_buffers_zc() argument
318 u16 ntu = rx_ring->next_to_use; in __i40e_alloc_rx_buffers_zc()
323 rx_desc = I40E_RX_DESC(rx_ring, ntu); in __i40e_alloc_rx_buffers_zc()
324 bi = &rx_ring->rx_bi[ntu]; in __i40e_alloc_rx_buffers_zc()
326 if (!alloc(rx_ring, bi)) { in __i40e_alloc_rx_buffers_zc()
331 dma_sync_single_range_for_device(rx_ring->dev, bi->dma, 0, in __i40e_alloc_rx_buffers_zc()
332 rx_ring->rx_buf_len, in __i40e_alloc_rx_buffers_zc()
341 if (unlikely(ntu == rx_ring->count)) { in __i40e_alloc_rx_buffers_zc()
342 rx_desc = I40E_RX_DESC(rx_ring, 0); in __i40e_alloc_rx_buffers_zc()
343 bi = rx_ring->rx_bi; in __i40e_alloc_rx_buffers_zc()
352 if (rx_ring->next_to_use != ntu) in __i40e_alloc_rx_buffers_zc()
353 i40e_release_rx_desc(rx_ring, ntu); in __i40e_alloc_rx_buffers_zc()
368 bool i40e_alloc_rx_buffers_zc(struct i40e_ring *rx_ring, u16 count) in i40e_alloc_rx_buffers_zc() argument
370 return __i40e_alloc_rx_buffers_zc(rx_ring, count, in i40e_alloc_rx_buffers_zc()
384 static bool i40e_alloc_rx_buffers_fast_zc(struct i40e_ring *rx_ring, u16 count) in i40e_alloc_rx_buffers_fast_zc() argument
386 return __i40e_alloc_rx_buffers_zc(rx_ring, count, in i40e_alloc_rx_buffers_fast_zc()
400 static struct i40e_rx_buffer *i40e_get_rx_buffer_zc(struct i40e_ring *rx_ring, in i40e_get_rx_buffer_zc() argument
405 bi = &rx_ring->rx_bi[rx_ring->next_to_clean]; in i40e_get_rx_buffer_zc()
408 dma_sync_single_range_for_cpu(rx_ring->dev, in i40e_get_rx_buffer_zc()
424 static void i40e_reuse_rx_buffer_zc(struct i40e_ring *rx_ring, in i40e_reuse_rx_buffer_zc() argument
427 struct i40e_rx_buffer *new_bi = &rx_ring->rx_bi[rx_ring->next_to_alloc]; in i40e_reuse_rx_buffer_zc()
428 u16 nta = rx_ring->next_to_alloc; in i40e_reuse_rx_buffer_zc()
432 rx_ring->next_to_alloc = (nta < rx_ring->count) ? nta : 0; in i40e_reuse_rx_buffer_zc()
450 struct i40e_ring *rx_ring; in i40e_zca_free() local
454 rx_ring = container_of(alloc, struct i40e_ring, zca); in i40e_zca_free()
455 hr = rx_ring->xsk_umem->headroom + XDP_PACKET_HEADROOM; in i40e_zca_free()
456 mask = rx_ring->xsk_umem->chunk_mask; in i40e_zca_free()
458 nta = rx_ring->next_to_alloc; in i40e_zca_free()
459 bi = &rx_ring->rx_bi[nta]; in i40e_zca_free()
462 rx_ring->next_to_alloc = (nta < rx_ring->count) ? nta : 0; in i40e_zca_free()
466 bi->dma = xdp_umem_get_dma(rx_ring->xsk_umem, handle); in i40e_zca_free()
469 bi->addr = xdp_umem_get_data(rx_ring->xsk_umem, handle); in i40e_zca_free()
472 bi->handle = xsk_umem_adjust_offset(rx_ring->xsk_umem, (u64)handle, in i40e_zca_free()
473 rx_ring->xsk_umem->headroom); in i40e_zca_free()
486 static struct sk_buff *i40e_construct_skb_zc(struct i40e_ring *rx_ring, in i40e_construct_skb_zc() argument
495 skb = __napi_alloc_skb(&rx_ring->q_vector->napi, in i40e_construct_skb_zc()
506 i40e_reuse_rx_buffer_zc(rx_ring, bi); in i40e_construct_skb_zc()
514 static void i40e_inc_ntc(struct i40e_ring *rx_ring) in i40e_inc_ntc() argument
516 u32 ntc = rx_ring->next_to_clean + 1; in i40e_inc_ntc()
518 ntc = (ntc < rx_ring->count) ? ntc : 0; in i40e_inc_ntc()
519 rx_ring->next_to_clean = ntc; in i40e_inc_ntc()
520 prefetch(I40E_RX_DESC(rx_ring, ntc)); in i40e_inc_ntc()
530 int i40e_clean_rx_irq_zc(struct i40e_ring *rx_ring, int budget) in i40e_clean_rx_irq_zc() argument
533 u16 cleaned_count = I40E_DESC_UNUSED(rx_ring); in i40e_clean_rx_irq_zc()
539 xdp.rxq = &rx_ring->xdp_rxq; in i40e_clean_rx_irq_zc()
549 !i40e_alloc_rx_buffers_fast_zc(rx_ring, in i40e_clean_rx_irq_zc()
554 rx_desc = I40E_RX_DESC(rx_ring, rx_ring->next_to_clean); in i40e_clean_rx_irq_zc()
563 bi = i40e_clean_programming_status(rx_ring, rx_desc, in i40e_clean_rx_irq_zc()
566 i40e_reuse_rx_buffer_zc(rx_ring, bi); in i40e_clean_rx_irq_zc()
576 bi = i40e_get_rx_buffer_zc(rx_ring, size); in i40e_clean_rx_irq_zc()
583 xdp_res = i40e_run_xdp_zc(rx_ring, &xdp); in i40e_clean_rx_irq_zc()
589 i40e_reuse_rx_buffer_zc(rx_ring, bi); in i40e_clean_rx_irq_zc()
596 i40e_inc_ntc(rx_ring); in i40e_clean_rx_irq_zc()
607 skb = i40e_construct_skb_zc(rx_ring, bi, &xdp); in i40e_clean_rx_irq_zc()
609 rx_ring->rx_stats.alloc_buff_failed++; in i40e_clean_rx_irq_zc()
614 i40e_inc_ntc(rx_ring); in i40e_clean_rx_irq_zc()
622 i40e_process_skb_fields(rx_ring, rx_desc, skb); in i40e_clean_rx_irq_zc()
623 napi_gro_receive(&rx_ring->q_vector->napi, skb); in i40e_clean_rx_irq_zc()
626 i40e_finalize_xdp_rx(rx_ring, xdp_xmit); in i40e_clean_rx_irq_zc()
627 i40e_update_rx_stats(rx_ring, total_rx_bytes, total_rx_packets); in i40e_clean_rx_irq_zc()
629 if (xsk_umem_uses_need_wakeup(rx_ring->xsk_umem)) { in i40e_clean_rx_irq_zc()
630 if (failure || rx_ring->next_to_clean == rx_ring->next_to_use) in i40e_clean_rx_irq_zc()
631 xsk_set_rx_need_wakeup(rx_ring->xsk_umem); in i40e_clean_rx_irq_zc()
633 xsk_clear_rx_need_wakeup(rx_ring->xsk_umem); in i40e_clean_rx_irq_zc()
822 void i40e_xsk_clean_rx_ring(struct i40e_ring *rx_ring) in i40e_xsk_clean_rx_ring() argument
826 for (i = 0; i < rx_ring->count; i++) { in i40e_xsk_clean_rx_ring()
827 struct i40e_rx_buffer *rx_bi = &rx_ring->rx_bi[i]; in i40e_xsk_clean_rx_ring()
832 xsk_umem_fq_reuse(rx_ring->xsk_umem, rx_bi->handle); in i40e_xsk_clean_rx_ring()