Home
last modified time | relevance | path

Searched refs:frag (Results 1 – 25 of 233) sorted by relevance

12345678910

/drivers/net/ethernet/netronome/nfp/nfdk/
Ddp.c119 const skb_frag_t *frag, *fend; in nfp_nfdk_tx_maybe_close_block() local
128 frag = skb_shinfo(skb)->frags; in nfp_nfdk_tx_maybe_close_block()
129 fend = frag + nr_frags; in nfp_nfdk_tx_maybe_close_block()
130 for (; frag < fend; frag++) in nfp_nfdk_tx_maybe_close_block()
131 n_descs += DIV_ROUND_UP(skb_frag_size(frag), in nfp_nfdk_tx_maybe_close_block()
238 const skb_frag_t *frag, *fend; in nfp_nfdk_tx() local
331 frag = skb_shinfo(skb)->frags; in nfp_nfdk_tx()
332 fend = frag + nr_frags; in nfp_nfdk_tx()
347 if (frag >= fend) in nfp_nfdk_tx()
350 dma_len = skb_frag_size(frag); in nfp_nfdk_tx()
[all …]
Drings.c17 const skb_frag_t *frag, *fend; in nfp_nfdk_tx_ring_reset() local
41 frag = skb_shinfo(skb)->frags; in nfp_nfdk_tx_ring_reset()
42 fend = frag + nr_frags; in nfp_nfdk_tx_ring_reset()
43 for (; frag < fend; frag++) { in nfp_nfdk_tx_ring_reset()
44 size = skb_frag_size(frag); in nfp_nfdk_tx_ring_reset()
46 skb_frag_size(frag), DMA_TO_DEVICE); in nfp_nfdk_tx_ring_reset()
/drivers/net/ethernet/netronome/nfp/nfd3/
Ddp.c244 const skb_frag_t *frag; in nfp_nfd3_tx() local
323 frag = &skb_shinfo(skb)->frags[f]; in nfp_nfd3_tx()
324 fsize = skb_frag_size(frag); in nfp_nfd3_tx()
326 dma_addr = skb_frag_dma_map(dp->dev, frag, 0, in nfp_nfd3_tx()
365 frag = &skb_shinfo(skb)->frags[f]; in nfp_nfd3_tx()
367 skb_frag_size(frag), DMA_TO_DEVICE); in nfp_nfd3_tx()
418 const skb_frag_t *frag; in nfp_nfd3_tx_complete() local
443 frag = &skb_shinfo(skb)->frags[fidx]; in nfp_nfd3_tx_complete()
445 skb_frag_size(frag), DMA_TO_DEVICE); in nfp_nfd3_tx_complete()
530 void *frag; in nfp_nfd3_napi_alloc_one() local
[all …]
Drings.c45 const skb_frag_t *frag; in nfp_nfd3_tx_ring_reset() local
64 frag = &skb_shinfo(skb)->frags[tx_buf->fidx]; in nfp_nfd3_tx_ring_reset()
66 skb_frag_size(frag), DMA_TO_DEVICE); in nfp_nfd3_tx_ring_reset()
169 if (!tx_ring->txbufs[i].frag) in nfp_nfd3_tx_ring_bufs_free()
173 __free_page(virt_to_page(tx_ring->txbufs[i].frag)); in nfp_nfd3_tx_ring_bufs_free()
188 txbufs[i].frag = nfp_net_rx_alloc_one(dp, &txbufs[i].dma_addr); in nfp_nfd3_tx_ring_bufs_alloc()
189 if (!txbufs[i].frag) { in nfp_nfd3_tx_ring_bufs_alloc()
/drivers/infiniband/hw/hfi1/
Dipoib_rx.c26 void *frag; in prepare_frag_skb() local
30 frag = napi_alloc_frag(skb_size); in prepare_frag_skb()
32 if (unlikely(!frag)) in prepare_frag_skb()
35 skb = build_skb(frag, skb_size); in prepare_frag_skb()
38 skb_free_frag(frag); in prepare_frag_skb()
Dvnic_sdma.c63 skb_frag_t *frag = &skb_shinfo(tx->skb)->frags[i]; in build_vnic_ulp_payload() local
68 skb_frag_page(frag), in build_vnic_ulp_payload()
69 skb_frag_off(frag), in build_vnic_ulp_payload()
70 skb_frag_size(frag), in build_vnic_ulp_payload()
/drivers/net/ethernet/netronome/nfp/
Dnfp_net_dp.c19 void *frag; in nfp_net_rx_alloc_one() local
22 frag = netdev_alloc_frag(dp->fl_bufsz); in nfp_net_rx_alloc_one()
27 frag = page ? page_address(page) : NULL; in nfp_net_rx_alloc_one()
29 if (!frag) { in nfp_net_rx_alloc_one()
34 *dma_addr = nfp_net_dma_map_rx(dp, frag); in nfp_net_rx_alloc_one()
36 nfp_net_free_frag(frag, dp->xdp_prog); in nfp_net_rx_alloc_one()
41 return frag; in nfp_net_rx_alloc_one()
145 if (!rx_ring->rxbufs[i].frag) in nfp_net_rx_ring_bufs_free()
149 nfp_net_free_frag(rx_ring->rxbufs[i].frag, dp->xdp_prog); in nfp_net_rx_ring_bufs_free()
151 rx_ring->rxbufs[i].frag = NULL; in nfp_net_rx_ring_bufs_free()
[all …]
Dnfp_net_dp.h9 static inline dma_addr_t nfp_net_dma_map_rx(struct nfp_net_dp *dp, void *frag) in nfp_net_dma_map_rx() argument
11 return dma_map_single_attrs(dp->dev, frag + NFP_NET_RX_BUF_HEADROOM, in nfp_net_dma_map_rx()
71 static inline void nfp_net_free_frag(void *frag, bool xdp) in nfp_net_free_frag() argument
74 skb_free_frag(frag); in nfp_net_free_frag()
76 __free_page(virt_to_page(frag)); in nfp_net_free_frag()
Dnfp_net_debugfs.c19 void *frag; in nfp_rx_q_show() local
47 frag = READ_ONCE(rx_ring->rxbufs[i].frag); in nfp_rx_q_show()
48 if (frag) in nfp_rx_q_show()
49 seq_printf(file, " frag=%p", frag); in nfp_rx_q_show()
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_vm_pt.c815 unsigned int *frag, uint64_t *frag_end) in amdgpu_vm_pte_fragment() argument
847 *frag = 0; in amdgpu_vm_pte_fragment()
853 *frag = min_t(unsigned int, ffs(start) - 1, fls64(end - start) - 1); in amdgpu_vm_pte_fragment()
854 if (*frag >= max_frag) { in amdgpu_vm_pte_fragment()
855 *frag = max_frag; in amdgpu_vm_pte_fragment()
858 *frag_end = start + (1 << *frag); in amdgpu_vm_pte_fragment()
883 unsigned int frag; in amdgpu_vm_ptes_update() local
887 amdgpu_vm_pte_fragment(params, frag_start, end, flags, &frag, in amdgpu_vm_ptes_update()
921 } else if (frag < shift) { in amdgpu_vm_ptes_update()
928 } else if (frag >= parent_shift) { in amdgpu_vm_ptes_update()
[all …]
/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
Drx.c167 struct mlx5e_wqe_frag_info *frag; in mlx5e_xsk_alloc_rx_wqes_batched() local
173 frag = &rq->wqe.frags[j]; in mlx5e_xsk_alloc_rx_wqes_batched()
175 addr = xsk_buff_xdp_get_frame_dma(frag->au->xsk); in mlx5e_xsk_alloc_rx_wqes_batched()
189 struct mlx5e_wqe_frag_info *frag; in mlx5e_xsk_alloc_rx_wqes() local
195 frag = &rq->wqe.frags[j]; in mlx5e_xsk_alloc_rx_wqes()
197 frag->au->xsk = xsk_buff_alloc(rq->xsk_pool); in mlx5e_xsk_alloc_rx_wqes()
198 if (unlikely(!frag->au->xsk)) in mlx5e_xsk_alloc_rx_wqes()
201 addr = xsk_buff_xdp_get_frame_dma(frag->au->xsk); in mlx5e_xsk_alloc_rx_wqes()
/drivers/net/ethernet/mellanox/mlx5/core/
Dalloc.c88 struct mlx5_buf_list *frag = &buf->frags[i]; in mlx5_frag_buf_alloc_node() local
91 frag->buf = mlx5_dma_zalloc_coherent_node(dev, frag_sz, in mlx5_frag_buf_alloc_node()
92 &frag->map, node); in mlx5_frag_buf_alloc_node()
93 if (!frag->buf) in mlx5_frag_buf_alloc_node()
95 if (frag->map & ((1 << buf->page_shift) - 1)) { in mlx5_frag_buf_alloc_node()
99 &frag->map, buf->page_shift); in mlx5_frag_buf_alloc_node()
/drivers/net/wireless/zydas/
Dzd1201.c341 struct zd1201_frag *frag = NULL; in zd1201_usbrx() local
347 frag = kmalloc(sizeof(*frag), GFP_ATOMIC); in zd1201_usbrx()
348 if (!frag) in zd1201_usbrx()
352 kfree(frag); in zd1201_usbrx()
355 frag->skb = skb; in zd1201_usbrx()
356 frag->seq = seq & IEEE80211_SCTL_SEQ; in zd1201_usbrx()
361 hlist_add_head(&frag->fnode, &zd->fraglist); in zd1201_usbrx()
364 hlist_for_each_entry(frag, &zd->fraglist, fnode) in zd1201_usbrx()
365 if (frag->seq == (seq&IEEE80211_SCTL_SEQ)) in zd1201_usbrx()
367 if (!frag) in zd1201_usbrx()
[all …]
/drivers/net/ethernet/dlink/
Dsundance.c343 struct desc_frag { __le32 addr, length; } frag; member
983 le32_to_cpu(np->tx_ring[i].frag.addr), in tx_timeout()
984 le32_to_cpu(np->tx_ring[i].frag.length)); in tx_timeout()
1030 np->rx_ring[i].frag.length = 0; in init_ring()
1042 np->rx_ring[i].frag.addr = cpu_to_le32( in init_ring()
1046 np->rx_ring[i].frag.addr)) { in init_ring()
1051 np->rx_ring[i].frag.length = cpu_to_le32(np->rx_buf_sz | LastFrag); in init_ring()
1100 txdesc->frag.addr = cpu_to_le32(dma_map_single(&np->pci_dev->dev, in start_tx()
1103 txdesc->frag.addr)) in start_tx()
1105 txdesc->frag.length = cpu_to_le32 (skb->len | LastFrag); in start_tx()
[all …]
/drivers/net/xen-netback/
Drx.c245 int frag; /* frag == -1 => frag_iter->head */ member
269 pkt->frag = -1; in xenvif_rx_next_skb()
342 pkt->frag++; in xenvif_rx_next_frag()
345 if (pkt->frag >= nr_frags) { in xenvif_rx_next_frag()
351 pkt->frag = -1; in xenvif_rx_next_frag()
366 if (pkt->frag == -1) { in xenvif_rx_next_chunk()
370 skb_frag_t *frag = &skb_shinfo(frag_iter)->frags[pkt->frag]; in xenvif_rx_next_chunk() local
372 frag_data = skb_frag_address(frag); in xenvif_rx_next_chunk()
373 frag_len = skb_frag_size(frag); in xenvif_rx_next_chunk()
/drivers/net/ethernet/mellanox/mlx5/core/en/
Dxdp.c126 skb_frag_t *frag = &sinfo->frags[i]; in mlx5e_xmit_xdp_buff() local
130 addr = page_pool_get_dma_addr(skb_frag_page(frag)) + in mlx5e_xmit_xdp_buff()
131 skb_frag_off(frag); in mlx5e_xmit_xdp_buff()
132 len = skb_frag_size(frag); in mlx5e_xmit_xdp_buff()
149 skb_frag_t *frag = &sinfo->frags[i]; in mlx5e_xmit_xdp_buff() local
151 xdpi.page.page = skb_frag_page(frag); in mlx5e_xmit_xdp_buff()
445 skb_frag_t *frag = &sinfo->frags[i]; in mlx5e_xmit_xdp_frame() local
448 addr = page_pool_get_dma_addr(skb_frag_page(frag)) + in mlx5e_xmit_xdp_frame()
449 skb_frag_off(frag); in mlx5e_xmit_xdp_frame()
453 dseg->byte_count = cpu_to_be32(skb_frag_size(frag)); in mlx5e_xmit_xdp_frame()
/drivers/net/wireless/intel/iwlwifi/
Diwl-dbg-tlv.c400 struct iwl_dram_data *frag = &fw_mon->frags[i]; in iwl_dbg_tlv_fragments_free() local
402 dma_free_coherent(trans->dev, frag->size, frag->block, in iwl_dbg_tlv_fragments_free()
403 frag->physical); in iwl_dbg_tlv_fragments_free()
405 frag->physical = 0; in iwl_dbg_tlv_fragments_free()
406 frag->block = NULL; in iwl_dbg_tlv_fragments_free()
407 frag->size = 0; in iwl_dbg_tlv_fragments_free()
536 struct iwl_dram_data *frag, u32 pages) in iwl_dbg_tlv_alloc_fragment() argument
541 if (!frag || frag->size || !pages) in iwl_dbg_tlv_alloc_fragment()
567 frag->physical = physical; in iwl_dbg_tlv_alloc_fragment()
568 frag->block = block; in iwl_dbg_tlv_alloc_fragment()
[all …]
/drivers/net/ethernet/stmicro/stmmac/
Dstmmac_tc.c58 struct stmmac_tc_entry *frag, in tc_fill_actions() argument
69 if (frag) in tc_fill_actions()
70 action_entry = frag; in tc_fill_actions()
94 struct stmmac_tc_entry *entry, *frag = NULL; in tc_fill_entry() local
129 frag = tc_find_entry(priv, cls, true); in tc_fill_entry()
130 if (!frag) { in tc_fill_entry()
135 entry->frag_ptr = frag; in tc_fill_entry()
143 frag->val.match_en = (mask >> (rem * 8)) & in tc_fill_entry()
145 frag->val.match_data = (data >> (rem * 8)) & in tc_fill_entry()
147 frag->val.frame_offset = real_off + 1; in tc_fill_entry()
[all …]
Ddwmac5.c436 struct stmmac_tc_entry *entry, *frag; in dwmac5_rxp_config() local
464 frag = entry->frag_ptr; in dwmac5_rxp_config()
467 if (frag) { in dwmac5_rxp_config()
481 if (frag && !frag->in_hw) { in dwmac5_rxp_config()
482 ret = dwmac5_rxp_update_single_entry(ioaddr, frag, nve); in dwmac5_rxp_config()
485 frag->table_pos = nve++; in dwmac5_rxp_config()
486 frag->in_hw = true; in dwmac5_rxp_config()
/drivers/net/wireless/intersil/hostap/
Dhostap_80211_rx.c222 unsigned int frag, u8 *src, u8 *dst) in prism2_frag_cache_find() argument
239 (entry->last_frag + 1 == frag || frag == -1) && in prism2_frag_cache_find()
255 unsigned int frag, seq; in prism2_frag_cache_get() local
259 frag = sc & IEEE80211_SCTL_FRAG; in prism2_frag_cache_get()
262 if (frag == 0) { in prism2_frag_cache_get()
282 entry->last_frag = frag; in prism2_frag_cache_get()
289 entry = prism2_frag_cache_find(local, seq, frag, hdr->addr2, in prism2_frag_cache_get()
292 entry->last_frag = frag; in prism2_frag_cache_get()
725 unsigned int frag; in hostap_80211_rx() local
756 frag = sc & IEEE80211_SCTL_FRAG; in hostap_80211_rx()
[all …]
/drivers/net/wireless/intel/ipw2x00/
Dlibipw_rx.c56 unsigned int frag, in libipw_frag_cache_find() argument
75 (entry->last_frag + 1 == frag || frag == -1) && in libipw_frag_cache_find()
90 unsigned int frag, seq; in libipw_frag_cache_get() local
94 frag = WLAN_GET_SEQ_FRAG(sc); in libipw_frag_cache_get()
97 if (frag == 0) { in libipw_frag_cache_get()
117 entry->last_frag = frag; in libipw_frag_cache_get()
124 entry = libipw_frag_cache_find(ieee, seq, frag, hdr->addr2, in libipw_frag_cache_get()
127 entry->last_frag = frag; in libipw_frag_cache_get()
335 unsigned int frag; in libipw_rx() local
362 frag = WLAN_GET_SEQ_FRAG(sc); in libipw_rx()
[all …]
/drivers/net/ethernet/sfc/
Dtx_tso.c209 skb_frag_t *frag) in tso_get_fragment() argument
211 st->unmap_addr = skb_frag_dma_map(&efx->pci_dev->dev, frag, 0, in tso_get_fragment()
212 skb_frag_size(frag), DMA_TO_DEVICE); in tso_get_fragment()
214 st->unmap_len = skb_frag_size(frag); in tso_get_fragment()
215 st->in_len = skb_frag_size(frag); in tso_get_fragment()
/drivers/net/ethernet/chelsio/cxgb4/
Dcxgb4_tc_u32_parse.h64 f->val.frag = 1; in cxgb4_fill_ipv4_frag()
65 f->mask.frag = 1; in cxgb4_fill_ipv4_frag()
67 f->val.frag = 0; in cxgb4_fill_ipv4_frag()
68 f->mask.frag = 1; in cxgb4_fill_ipv4_frag()
/drivers/staging/rtl8192u/ieee80211/
Dieee80211_rx.c63 unsigned int frag, u8 tid, u8 *src, u8 *dst) in ieee80211_frag_cache_find() argument
81 (entry->last_frag + 1 == frag || frag == -1) && in ieee80211_frag_cache_find()
98 unsigned int frag = WLAN_GET_SEQ_FRAG(sc); in ieee80211_frag_cache_get() local
119 if (frag == 0) { in ieee80211_frag_cache_get()
141 entry->last_frag = frag; in ieee80211_frag_cache_get()
148 entry = ieee80211_frag_cache_find(ieee, seq, frag, tid, hdr->addr2, in ieee80211_frag_cache_get()
151 entry->last_frag = frag; in ieee80211_frag_cache_get()
417 u16 frag = WLAN_GET_SEQ_FRAG(sc); in is_duplicate_packet() local
460 entry->frag_num[tid] = frag; in is_duplicate_packet()
486 if (*last_frag == frag) in is_duplicate_packet()
[all …]
/drivers/staging/rtl8192e/
Drtllib_rx.c59 unsigned int frag, u8 tid, u8 *src, u8 *dst) in rtllib_frag_cache_find() argument
76 (entry->last_frag + 1 == frag || frag == -1) && in rtllib_frag_cache_find()
93 unsigned int frag = WLAN_GET_SEQ_FRAG(sc); in rtllib_frag_cache_get() local
115 if (frag == 0) { in rtllib_frag_cache_get()
138 entry->last_frag = frag; in rtllib_frag_cache_get()
146 entry = rtllib_frag_cache_find(ieee, seq, frag, tid, hdr->addr2, in rtllib_frag_cache_get()
149 entry->last_frag = frag; in rtllib_frag_cache_get()
367 u16 frag = WLAN_GET_SEQ_FRAG(sc); in is_duplicate_packet() local
410 entry->frag_num[tid] = frag; in is_duplicate_packet()
432 if (*last_frag == frag) in is_duplicate_packet()
[all …]

12345678910