/drivers/net/ethernet/chelsio/cxgb4vf/ |
D | sge.c | 1370 const struct pkt_gl *gl, in copy_frags() argument 1376 __skb_fill_page_desc(skb, 0, gl->frags[0].page, in copy_frags() 1377 gl->frags[0].offset + offset, in copy_frags() 1378 gl->frags[0].size - offset); in copy_frags() 1379 skb_shinfo(skb)->nr_frags = gl->nfrags; in copy_frags() 1380 for (i = 1; i < gl->nfrags; i++) in copy_frags() 1381 __skb_fill_page_desc(skb, i, gl->frags[i].page, in copy_frags() 1382 gl->frags[i].offset, in copy_frags() 1383 gl->frags[i].size); in copy_frags() 1386 get_page(gl->frags[gl->nfrags - 1].page); in copy_frags() [all …]
|
D | cxgb4vf_main.c | 396 const struct pkt_gl *gl) in fwevtq_handler() argument
|
/drivers/scsi/cxgbi/ |
D | libcxgbi.c | 1267 struct cxgbi_gather_list *gl, unsigned int gidx) in cxgbi_ddp_ppod_set() argument 1273 ppod->addr[i] = gidx < gl->nelem ? in cxgbi_ddp_ppod_set() 1274 cpu_to_be64(gl->phys_addr[gidx]) : 0ULL; in cxgbi_ddp_ppod_set() 1288 struct cxgbi_gather_list *gl) in ddp_find_unused_entries() argument 1308 ddp->gl_map[k] = gl; in ddp_find_unused_entries() 1330 struct cxgbi_gather_list *gl) in ddp_gl_unmap() argument 1334 for (i = 0; i < gl->nelem; i++) in ddp_gl_unmap() 1335 dma_unmap_page(&pdev->dev, gl->phys_addr[i], PAGE_SIZE, in ddp_gl_unmap() 1340 struct cxgbi_gather_list *gl) in ddp_gl_map() argument 1344 for (i = 0; i < gl->nelem; i++) { in ddp_gl_map() [all …]
|
/drivers/net/ethernet/chelsio/cxgb4/ |
D | sge.c | 1591 const struct pkt_gl *gl, unsigned int offset) in copy_frags() argument 1596 __skb_fill_page_desc(skb, 0, gl->frags[0].page, in copy_frags() 1597 gl->frags[0].offset + offset, in copy_frags() 1598 gl->frags[0].size - offset); in copy_frags() 1599 skb_shinfo(skb)->nr_frags = gl->nfrags; in copy_frags() 1600 for (i = 1; i < gl->nfrags; i++) in copy_frags() 1601 __skb_fill_page_desc(skb, i, gl->frags[i].page, in copy_frags() 1602 gl->frags[i].offset, in copy_frags() 1603 gl->frags[i].size); in copy_frags() 1606 get_page(gl->frags[gl->nfrags - 1].page); in copy_frags() [all …]
|
D | cxgb4_uld.h | 275 const struct pkt_gl *gl); 298 struct sk_buff *cxgb4_pktgl_to_skb(const struct pkt_gl *gl,
|
D | cxgb4.h | 446 const struct pkt_gl *gl); 880 const struct pkt_gl *gl);
|
D | cxgb4_main.c | 792 const struct pkt_gl *gl) in fwevtq_handler() argument 884 const struct pkt_gl *gl) in uldrx_handler() argument 894 if (ulds[q->uld].rx_handler(q->adap->uld_handle[q->uld], rsp, gl)) { in uldrx_handler() 898 if (gl == NULL) in uldrx_handler() 900 else if (gl == CXGB4_MSG_AN) in uldrx_handler()
|
/drivers/infiniband/hw/cxgb4/ |
D | device.c | 1049 static inline struct sk_buff *copy_gl_to_skb_pkt(const struct pkt_gl *gl, in copy_gl_to_skb_pkt() argument 1062 skb = alloc_skb(gl->tot_len + sizeof(struct cpl_pass_accept_req) + in copy_gl_to_skb_pkt() 1067 __skb_put(skb, gl->tot_len + sizeof(struct cpl_pass_accept_req) + in copy_gl_to_skb_pkt() 1082 gl->va + pktshift, in copy_gl_to_skb_pkt() 1083 gl->tot_len - pktshift); in copy_gl_to_skb_pkt() 1087 static inline int recv_rx_pkt(struct c4iw_dev *dev, const struct pkt_gl *gl, in recv_rx_pkt() argument 1096 skb = copy_gl_to_skb_pkt(gl , rsp, dev->rdev.lldi.sge_pktshift); in recv_rx_pkt() 1113 const struct pkt_gl *gl) in c4iw_uld_rx_handler() argument 1120 if (gl == NULL) { in c4iw_uld_rx_handler() 1129 } else if (gl == CXGB4_MSG_AN) { in c4iw_uld_rx_handler() [all …]
|
/drivers/scsi/cxgbi/cxgb4i/ |
D | cxgb4i.c | 1464 struct cxgbi_gather_list *gl, in ddp_ppod_write_idata() argument 1493 if (!hdr && !gl) in ddp_ppod_write_idata() 1496 cxgbi_ddp_ppod_set(ppod, hdr, gl, gl_pidx); in ddp_ppod_write_idata() 1505 struct cxgbi_gather_list *gl) in ddp_set_map() argument 1515 idx, cnt, gl, 4 * i); in ddp_set_map()
|
/drivers/scsi/cxgbi/cxgb3i/ |
D | cxgb3i.c | 1082 struct cxgbi_gather_list *gl) in ddp_set_map() argument 1091 csk, idx, npods, gl); in ddp_set_map() 1103 hdr, gl, i * PPOD_PAGES_MAX); in ddp_set_map()
|