• Home
  • Raw
  • Download

Lines Matching full:unmap

101 	struct bnad_tx_unmap *unmap;  in bnad_tx_buff_unmap()  local
105 unmap = &unmap_q[index]; in bnad_tx_buff_unmap()
106 nvecs = unmap->nvecs; in bnad_tx_buff_unmap()
108 skb = unmap->skb; in bnad_tx_buff_unmap()
109 unmap->skb = NULL; in bnad_tx_buff_unmap()
110 unmap->nvecs = 0; in bnad_tx_buff_unmap()
112 dma_unmap_addr(&unmap->vectors[0], dma_addr), in bnad_tx_buff_unmap()
114 dma_unmap_addr_set(&unmap->vectors[0], dma_addr, 0); in bnad_tx_buff_unmap()
123 unmap = &unmap_q[index]; in bnad_tx_buff_unmap()
127 dma_unmap_addr(&unmap->vectors[vector], dma_addr), in bnad_tx_buff_unmap()
128 dma_unmap_len(&unmap->vectors[vector], dma_len), in bnad_tx_buff_unmap()
130 dma_unmap_addr_set(&unmap->vectors[vector], dma_addr, 0); in bnad_tx_buff_unmap()
142 * so DMA unmap & freeing is fine.
172 struct bnad_tx_unmap *unmap; in bnad_txcmpl_process() local
188 unmap = &unmap_q[cons]; in bnad_txcmpl_process()
190 skb = unmap->skb; in bnad_txcmpl_process()
195 unmap_wis = BNA_TXQ_WI_NEEDED(unmap->nvecs); in bnad_txcmpl_process()
300 bnad_rxq_cleanup_page(struct bnad *bnad, struct bnad_rx_unmap *unmap) in bnad_rxq_cleanup_page() argument
302 if (!unmap->page) in bnad_rxq_cleanup_page()
306 dma_unmap_addr(&unmap->vector, dma_addr), in bnad_rxq_cleanup_page()
307 unmap->vector.len, DMA_FROM_DEVICE); in bnad_rxq_cleanup_page()
308 put_page(unmap->page); in bnad_rxq_cleanup_page()
309 unmap->page = NULL; in bnad_rxq_cleanup_page()
310 dma_unmap_addr_set(&unmap->vector, dma_addr, 0); in bnad_rxq_cleanup_page()
311 unmap->vector.len = 0; in bnad_rxq_cleanup_page()
315 bnad_rxq_cleanup_skb(struct bnad *bnad, struct bnad_rx_unmap *unmap) in bnad_rxq_cleanup_skb() argument
317 if (!unmap->skb) in bnad_rxq_cleanup_skb()
321 dma_unmap_addr(&unmap->vector, dma_addr), in bnad_rxq_cleanup_skb()
322 unmap->vector.len, DMA_FROM_DEVICE); in bnad_rxq_cleanup_skb()
323 dev_kfree_skb_any(unmap->skb); in bnad_rxq_cleanup_skb()
324 unmap->skb = NULL; in bnad_rxq_cleanup_skb()
325 dma_unmap_addr_set(&unmap->vector, dma_addr, 0); in bnad_rxq_cleanup_skb()
326 unmap->vector.len = 0; in bnad_rxq_cleanup_skb()
336 struct bnad_rx_unmap *unmap = &unmap_q->unmap[i]; in bnad_rxq_cleanup() local
339 bnad_rxq_cleanup_skb(bnad, unmap); in bnad_rxq_cleanup()
341 bnad_rxq_cleanup_page(bnad, unmap); in bnad_rxq_cleanup()
351 struct bnad_rx_unmap *unmap, *prev; in bnad_rxq_refill_page() local
364 unmap = &unmap_q->unmap[prod]; in bnad_rxq_refill_page()
371 prev = &unmap_q->unmap[unmap_q->reuse_pi]; in bnad_rxq_refill_page()
392 unmap->page = page; in bnad_rxq_refill_page()
393 unmap->page_offset = page_offset; in bnad_rxq_refill_page()
394 dma_unmap_addr_set(&unmap->vector, dma_addr, dma_addr); in bnad_rxq_refill_page()
395 unmap->vector.len = unmap_q->map_size; in bnad_rxq_refill_page()
425 struct bnad_rx_unmap *unmap; in bnad_rxq_refill_skb() local
436 unmap = &unmap_q->unmap[prod]; in bnad_rxq_refill_skb()
455 unmap->skb = skb; in bnad_rxq_refill_skb()
456 dma_unmap_addr_set(&unmap->vector, dma_addr, dma_addr); in bnad_rxq_refill_skb()
457 unmap->vector.len = buff_sz; in bnad_rxq_refill_skb()
511 struct bnad_rx_unmap *unmap; in bnad_cq_drop_packet() local
516 unmap = &unmap_q->unmap[ci]; in bnad_cq_drop_packet()
520 bnad_rxq_cleanup_skb(bnad, unmap); in bnad_cq_drop_packet()
522 bnad_rxq_cleanup_page(bnad, unmap); in bnad_cq_drop_packet()
545 prefetch(page_address(unmap_q->unmap[ci].page) + in bnad_cq_setup_skb_frags()
546 unmap_q->unmap[ci].page_offset); in bnad_cq_setup_skb_frags()
549 struct bnad_rx_unmap *unmap; in bnad_cq_setup_skb_frags() local
552 unmap = &unmap_q->unmap[ci]; in bnad_cq_setup_skb_frags()
556 dma_unmap_addr(&unmap->vector, dma_addr), in bnad_cq_setup_skb_frags()
557 unmap->vector.len, DMA_FROM_DEVICE); in bnad_cq_setup_skb_frags()
560 skb->truesize += unmap->vector.len; in bnad_cq_setup_skb_frags()
564 unmap->page, unmap->page_offset, len); in bnad_cq_setup_skb_frags()
566 unmap->page = NULL; in bnad_cq_setup_skb_frags()
567 unmap->vector.len = 0; in bnad_cq_setup_skb_frags()
579 struct bnad_rx_unmap *unmap, u32 len) in bnad_cq_setup_skb() argument
584 dma_unmap_addr(&unmap->vector, dma_addr), in bnad_cq_setup_skb()
585 unmap->vector.len, DMA_FROM_DEVICE); in bnad_cq_setup_skb()
590 unmap->skb = NULL; in bnad_cq_setup_skb()
591 unmap->vector.len = 0; in bnad_cq_setup_skb()
600 struct bnad_rx_unmap *unmap = NULL; in bnad_cq_process() local
638 unmap = &unmap_q->unmap[sop_ci]; in bnad_cq_process()
639 skb = unmap->skb; in bnad_cq_process()
698 bnad_cq_setup_skb(bnad, skb, unmap, len); in bnad_cq_process()
1983 /* Fill Unmap Q memory requirements */ in bnad_setup_tx()
2228 /* Fill Unmap Q memory requirements */ in bnad_setup_rx()
2932 struct bnad_tx_unmap *unmap_q, *unmap, *head_unmap; in bnad_start_xmit() local
3029 unmap = head_unmap; in bnad_start_xmit()
3039 dma_unmap_addr_set(&unmap->vectors[0], dma_addr, dma_addr); in bnad_start_xmit()
3063 unmap = &unmap_q[prod]; in bnad_start_xmit()
3077 dma_unmap_len_set(&unmap->vectors[vect_id], dma_len, size); in bnad_start_xmit()
3080 dma_unmap_addr_set(&unmap->vectors[vect_id], dma_addr, in bnad_start_xmit()