Home
last modified time | relevance | path

Searched refs:gl (Results 1 – 12 of 12) sorted by relevance

/drivers/target/iscsi/cxgbit/
Dcxgbit_main.c212 cxgbit_copy_frags(struct sk_buff *skb, const struct pkt_gl *gl, in cxgbit_copy_frags() argument
219 __skb_fill_page_desc(skb, skb_frag_idx, gl->frags[0].page, in cxgbit_copy_frags()
220 gl->frags[0].offset + offset, in cxgbit_copy_frags()
221 gl->frags[0].size - offset); in cxgbit_copy_frags()
222 for (i = 1; i < gl->nfrags; i++) in cxgbit_copy_frags()
224 gl->frags[i].page, in cxgbit_copy_frags()
225 gl->frags[i].offset, in cxgbit_copy_frags()
226 gl->frags[i].size); in cxgbit_copy_frags()
228 skb_shinfo(skb)->nr_frags += gl->nfrags; in cxgbit_copy_frags()
231 get_page(gl->frags[gl->nfrags - 1].page); in cxgbit_copy_frags()
[all …]
/drivers/net/ethernet/chelsio/cxgb4vf/
Dsge.c1473 const struct pkt_gl *gl, in copy_frags() argument
1479 __skb_fill_page_desc(skb, 0, gl->frags[0].page, in copy_frags()
1480 gl->frags[0].offset + offset, in copy_frags()
1481 gl->frags[0].size - offset); in copy_frags()
1482 skb_shinfo(skb)->nr_frags = gl->nfrags; in copy_frags()
1483 for (i = 1; i < gl->nfrags; i++) in copy_frags()
1484 __skb_fill_page_desc(skb, i, gl->frags[i].page, in copy_frags()
1485 gl->frags[i].offset, in copy_frags()
1486 gl->frags[i].size); in copy_frags()
1489 get_page(gl->frags[gl->nfrags - 1].page); in copy_frags()
[all …]
Dcxgb4vf_main.c488 const struct pkt_gl *gl) in fwevtq_handler() argument
/drivers/net/ethernet/chelsio/inline_crypto/chtls/
Dchtls_main.c339 static struct sk_buff *copy_gl_to_skb_pkt(const struct pkt_gl *gl, in copy_gl_to_skb_pkt() argument
349 skb = alloc_skb(gl->tot_len + sizeof(struct cpl_pass_accept_req) in copy_gl_to_skb_pkt()
353 __skb_put(skb, gl->tot_len + sizeof(struct cpl_pass_accept_req) in copy_gl_to_skb_pkt()
358 , gl->va + pktshift, in copy_gl_to_skb_pkt()
359 gl->tot_len - pktshift); in copy_gl_to_skb_pkt()
365 const struct pkt_gl *gl, const __be64 *rsp) in chtls_recv_packet() argument
371 skb = copy_gl_to_skb_pkt(gl, rsp, cdev->lldi->sge_pktshift); in chtls_recv_packet()
439 const struct pkt_gl *gl) in chtls_uld_rx_handler() argument
448 if (chtls_recv_packet(cdev, gl, rsp) < 0) in chtls_uld_rx_handler()
453 if (!gl) in chtls_uld_rx_handler()
[all …]
/drivers/infiniband/hw/cxgb4/
Ddevice.c1104 static inline struct sk_buff *copy_gl_to_skb_pkt(const struct pkt_gl *gl, in copy_gl_to_skb_pkt() argument
1117 skb = alloc_skb(gl->tot_len + sizeof(struct cpl_pass_accept_req) + in copy_gl_to_skb_pkt()
1122 __skb_put(skb, gl->tot_len + sizeof(struct cpl_pass_accept_req) + in copy_gl_to_skb_pkt()
1137 gl->va + pktshift, in copy_gl_to_skb_pkt()
1138 gl->tot_len - pktshift); in copy_gl_to_skb_pkt()
1142 static inline int recv_rx_pkt(struct c4iw_dev *dev, const struct pkt_gl *gl, in recv_rx_pkt() argument
1151 skb = copy_gl_to_skb_pkt(gl , rsp, dev->rdev.lldi.sge_pktshift); in recv_rx_pkt()
1167 const struct pkt_gl *gl) in c4iw_uld_rx_handler() argument
1174 if (gl == NULL) { in c4iw_uld_rx_handler()
1183 } else if (gl == CXGB4_MSG_AN) { in c4iw_uld_rx_handler()
[all …]
/drivers/net/ethernet/chelsio/cxgb4/
Dsge.c3313 const struct pkt_gl *gl, unsigned int offset) in copy_frags() argument
3318 __skb_fill_page_desc(skb, 0, gl->frags[0].page, in copy_frags()
3319 gl->frags[0].offset + offset, in copy_frags()
3320 gl->frags[0].size - offset); in copy_frags()
3321 skb_shinfo(skb)->nr_frags = gl->nfrags; in copy_frags()
3322 for (i = 1; i < gl->nfrags; i++) in copy_frags()
3323 __skb_fill_page_desc(skb, i, gl->frags[i].page, in copy_frags()
3324 gl->frags[i].offset, in copy_frags()
3325 gl->frags[i].size); in copy_frags()
3328 get_page(gl->frags[gl->nfrags - 1].page); in copy_frags()
[all …]
Dcxgb4_uld.h483 const struct pkt_gl *gl);
487 const struct pkt_gl *gl,
529 struct sk_buff *cxgb4_pktgl_to_skb(const struct pkt_gl *gl,
Dcxgb4_uld.c75 const struct pkt_gl *gl) in uldrx_handler() argument
88 rsp, gl, &q->lro_mgr, in uldrx_handler()
92 rsp, gl); in uldrx_handler()
99 if (!gl) in uldrx_handler()
101 else if (gl == CXGB4_MSG_AN) in uldrx_handler()
Dcxgb4.h765 const struct pkt_gl *gl);
1620 const struct pkt_gl *gl);
Dcxgb4_main.c557 const struct pkt_gl *gl) in fwevtq_handler() argument
/drivers/scsi/qedi/
Dqedi_main.c1519 struct global_queue **gl = qedi->global_queues; in qedi_free_global_queues() local
1522 if (!gl[i]) in qedi_free_global_queues()
1525 if (gl[i]->cq) in qedi_free_global_queues()
1526 dma_free_coherent(&qedi->pdev->dev, gl[i]->cq_mem_size, in qedi_free_global_queues()
1527 gl[i]->cq, gl[i]->cq_dma); in qedi_free_global_queues()
1528 if (gl[i]->cq_pbl) in qedi_free_global_queues()
1529 dma_free_coherent(&qedi->pdev->dev, gl[i]->cq_pbl_size, in qedi_free_global_queues()
1530 gl[i]->cq_pbl, gl[i]->cq_pbl_dma); in qedi_free_global_queues()
1532 kfree(gl[i]); in qedi_free_global_queues()
/drivers/scsi/qedf/
Dqedf_main.c2932 struct global_queue **gl = qedf->global_queues; in qedf_free_global_queues() local
2935 if (!gl[i]) in qedf_free_global_queues()
2938 if (gl[i]->cq) in qedf_free_global_queues()
2940 gl[i]->cq_mem_size, gl[i]->cq, gl[i]->cq_dma); in qedf_free_global_queues()
2941 if (gl[i]->cq_pbl) in qedf_free_global_queues()
2942 dma_free_coherent(&qedf->pdev->dev, gl[i]->cq_pbl_size, in qedf_free_global_queues()
2943 gl[i]->cq_pbl, gl[i]->cq_pbl_dma); in qedf_free_global_queues()
2945 kfree(gl[i]); in qedf_free_global_queues()