/drivers/target/iscsi/cxgbit/ |
D | cxgbit_main.c | 215 cxgbit_copy_frags(struct sk_buff *skb, const struct pkt_gl *gl, in cxgbit_copy_frags() argument 222 __skb_fill_page_desc(skb, skb_frag_idx, gl->frags[0].page, in cxgbit_copy_frags() 223 gl->frags[0].offset + offset, in cxgbit_copy_frags() 224 gl->frags[0].size - offset); in cxgbit_copy_frags() 225 for (i = 1; i < gl->nfrags; i++) in cxgbit_copy_frags() 227 gl->frags[i].page, in cxgbit_copy_frags() 228 gl->frags[i].offset, in cxgbit_copy_frags() 229 gl->frags[i].size); in cxgbit_copy_frags() 231 skb_shinfo(skb)->nr_frags += gl->nfrags; in cxgbit_copy_frags() 234 get_page(gl->frags[gl->nfrags - 1].page); in cxgbit_copy_frags() [all …]
|
/drivers/net/ethernet/chelsio/cxgb4vf/ |
D | sge.c | 1474 const struct pkt_gl *gl, in copy_frags() argument 1480 __skb_fill_page_desc(skb, 0, gl->frags[0].page, in copy_frags() 1481 gl->frags[0].offset + offset, in copy_frags() 1482 gl->frags[0].size - offset); in copy_frags() 1483 skb_shinfo(skb)->nr_frags = gl->nfrags; in copy_frags() 1484 for (i = 1; i < gl->nfrags; i++) in copy_frags() 1485 __skb_fill_page_desc(skb, i, gl->frags[i].page, in copy_frags() 1486 gl->frags[i].offset, in copy_frags() 1487 gl->frags[i].size); in copy_frags() 1490 get_page(gl->frags[gl->nfrags - 1].page); in copy_frags() [all …]
|
D | cxgb4vf_main.c | 426 const struct pkt_gl *gl) in fwevtq_handler() argument
|
/drivers/net/ethernet/chelsio/cxgb4/ |
D | sge.c | 1889 const struct pkt_gl *gl, unsigned int offset) in copy_frags() argument 1894 __skb_fill_page_desc(skb, 0, gl->frags[0].page, in copy_frags() 1895 gl->frags[0].offset + offset, in copy_frags() 1896 gl->frags[0].size - offset); in copy_frags() 1897 skb_shinfo(skb)->nr_frags = gl->nfrags; in copy_frags() 1898 for (i = 1; i < gl->nfrags; i++) in copy_frags() 1899 __skb_fill_page_desc(skb, i, gl->frags[i].page, in copy_frags() 1900 gl->frags[i].offset, in copy_frags() 1901 gl->frags[i].size); in copy_frags() 1904 get_page(gl->frags[gl->nfrags - 1].page); in copy_frags() [all …]
|
D | cxgb4_uld.h | 360 const struct pkt_gl *gl); 364 const struct pkt_gl *gl, 390 struct sk_buff *cxgb4_pktgl_to_skb(const struct pkt_gl *gl,
|
D | cxgb4_uld.c | 104 const struct pkt_gl *gl) in uldrx_handler() argument 117 rsp, gl, &q->lro_mgr, in uldrx_handler() 121 rsp, gl); in uldrx_handler() 128 if (!gl) in uldrx_handler() 130 else if (gl == CXGB4_MSG_AN) in uldrx_handler()
|
D | cxgb4.h | 590 const struct pkt_gl *gl); 1237 const struct pkt_gl *gl);
|
D | cxgb4_main.c | 485 const struct pkt_gl *gl) in fwevtq_handler() argument
|
/drivers/infiniband/hw/cxgb4/ |
D | device.c | 1094 static inline struct sk_buff *copy_gl_to_skb_pkt(const struct pkt_gl *gl, in copy_gl_to_skb_pkt() argument 1107 skb = alloc_skb(gl->tot_len + sizeof(struct cpl_pass_accept_req) + in copy_gl_to_skb_pkt() 1112 __skb_put(skb, gl->tot_len + sizeof(struct cpl_pass_accept_req) + in copy_gl_to_skb_pkt() 1127 gl->va + pktshift, in copy_gl_to_skb_pkt() 1128 gl->tot_len - pktshift); in copy_gl_to_skb_pkt() 1132 static inline int recv_rx_pkt(struct c4iw_dev *dev, const struct pkt_gl *gl, in recv_rx_pkt() argument 1141 skb = copy_gl_to_skb_pkt(gl , rsp, dev->rdev.lldi.sge_pktshift); in recv_rx_pkt() 1157 const struct pkt_gl *gl) in c4iw_uld_rx_handler() argument 1164 if (gl == NULL) { in c4iw_uld_rx_handler() 1173 } else if (gl == CXGB4_MSG_AN) { in c4iw_uld_rx_handler() [all …]
|
/drivers/iommu/ |
D | intel-svm.c | 161 unsigned long address, unsigned long pages, int ih, int gl) in intel_flush_svm_range_dev() argument 169 if (gl) in intel_flush_svm_range_dev() 181 desc.high = QI_EIOTLB_ADDR(address) | QI_EIOTLB_GL(gl) | in intel_flush_svm_range_dev() 207 unsigned long pages, int ih, int gl) in intel_flush_svm_range() argument 218 intel_flush_svm_range_dev(svm, sdev, address, pages, ih, gl); in intel_flush_svm_range()
|
/drivers/scsi/qedi/ |
D | qedi_main.c | 1199 struct global_queue **gl = qedi->global_queues; in qedi_free_global_queues() local 1202 if (!gl[i]) in qedi_free_global_queues() 1205 if (gl[i]->cq) in qedi_free_global_queues() 1206 dma_free_coherent(&qedi->pdev->dev, gl[i]->cq_mem_size, in qedi_free_global_queues() 1207 gl[i]->cq, gl[i]->cq_dma); in qedi_free_global_queues() 1208 if (gl[i]->cq_pbl) in qedi_free_global_queues() 1209 dma_free_coherent(&qedi->pdev->dev, gl[i]->cq_pbl_size, in qedi_free_global_queues() 1210 gl[i]->cq_pbl, gl[i]->cq_pbl_dma); in qedi_free_global_queues() 1212 kfree(gl[i]); in qedi_free_global_queues()
|
/drivers/scsi/qedf/ |
D | qedf_main.c | 2574 struct global_queue **gl = qedf->global_queues; in qedf_free_global_queues() local 2577 if (!gl[i]) in qedf_free_global_queues() 2580 if (gl[i]->cq) in qedf_free_global_queues() 2582 gl[i]->cq_mem_size, gl[i]->cq, gl[i]->cq_dma); in qedf_free_global_queues() 2583 if (gl[i]->cq_pbl) in qedf_free_global_queues() 2584 dma_free_coherent(&qedf->pdev->dev, gl[i]->cq_pbl_size, in qedf_free_global_queues() 2585 gl[i]->cq_pbl, gl[i]->cq_pbl_dma); in qedf_free_global_queues() 2587 kfree(gl[i]); in qedf_free_global_queues()
|