Home
last modified time | relevance | path

Searched refs:sg_list (Results 1 – 25 of 109) sorted by relevance

12345

/drivers/virt/
Dfsl_hypervisor.c153 struct fh_sg_list *sg_list = NULL; in ioctl_memcpy() local
246 sg_list = PTR_ALIGN(sg_list_unaligned, sizeof(struct fh_sg_list)); in ioctl_memcpy()
264 sg_list[0].source = page_to_phys(pages[0]) + lb_offset; in ioctl_memcpy()
265 sg_list[0].target = param.remote_paddr; in ioctl_memcpy()
267 sg_list[0].source = param.remote_paddr; in ioctl_memcpy()
268 sg_list[0].target = page_to_phys(pages[0]) + lb_offset; in ioctl_memcpy()
270 sg_list[0].size = min_t(uint64_t, param.count, PAGE_SIZE - lb_offset); in ioctl_memcpy()
272 remote_paddr = param.remote_paddr + sg_list[0].size; in ioctl_memcpy()
273 count = param.count - sg_list[0].size; in ioctl_memcpy()
278 sg_list[i].source = page_to_phys(pages[i]); in ioctl_memcpy()
[all …]
/drivers/infiniband/hw/cxgb3/
Diwch_qp.c73 if ((plen + wr->sg_list[i].length) < plen) in build_rdma_send()
76 plen += wr->sg_list[i].length; in build_rdma_send()
77 wqe->send.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_send()
78 wqe->send.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_rdma_send()
79 wqe->send.sgl[i].to = cpu_to_be64(wr->sg_list[i].addr); in build_rdma_send()
110 if ((plen + wr->sg_list[i].length) < plen) { in build_rdma_write()
113 plen += wr->sg_list[i].length; in build_rdma_write()
115 cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_write()
117 cpu_to_be32(wr->sg_list[i].length); in build_rdma_write()
119 cpu_to_be64(wr->sg_list[i].addr); in build_rdma_write()
[all …]
/drivers/scsi/qla2xxx/
Dqla_bsg.c43 bsg_job->request_payload.sg_list, in qla2x00_bsg_sp_free()
48 bsg_job->reply_payload.sg_list, in qla2x00_bsg_sp_free()
51 dma_unmap_sg(&ha->pdev->dev, bsg_job->request_payload.sg_list, in qla2x00_bsg_sp_free()
54 dma_unmap_sg(&ha->pdev->dev, bsg_job->reply_payload.sg_list, in qla2x00_bsg_sp_free()
187 bsg_job->reply_payload.sg_list, in qla24xx_proc_fcp_prio_cfg_cmd()
214 sg_copy_to_buffer(bsg_job->request_payload.sg_list, in qla24xx_proc_fcp_prio_cfg_cmd()
342 dma_map_sg(&ha->pdev->dev, bsg_job->request_payload.sg_list, in qla2x00_process_els()
345 dma_unmap_sg(&ha->pdev->dev, bsg_job->request_payload.sg_list, in qla2x00_process_els()
351 rsp_sg_cnt = dma_map_sg(&ha->pdev->dev, bsg_job->reply_payload.sg_list, in qla2x00_process_els()
354 dma_unmap_sg(&ha->pdev->dev, bsg_job->reply_payload.sg_list, in qla2x00_process_els()
[all …]
/drivers/infiniband/hw/qib/
Dqib_ruc.c55 ss->sg_list = qp->r_sg_list; in qib_init_sge()
58 if (wqe->sg_list[i].length == 0) in qib_init_sge()
61 ret = rvt_lkey_ok(rkt, pd, j ? &ss->sg_list[j - 1] : &ss->sge, in qib_init_sge()
62 NULL, &wqe->sg_list[i], in qib_init_sge()
66 qp->r_len += wqe->sg_list[i].length; in qib_init_sge()
76 struct rvt_sge *sge = --j ? &ss->sg_list[j - 1] : &ss->sge; in qib_init_sge()
413 sqp->s_sge.sge = wqe->sg_list[0]; in qib_ruc_loopback()
414 sqp->s_sge.sg_list = wqe->sg_list + 1; in qib_ruc_loopback()
453 qp->r_sge.sg_list = NULL; in qib_ruc_loopback()
467 sqp->s_sge.sg_list = NULL; in qib_ruc_loopback()
[all …]
Dqib_ud.c170 ssge.sg_list = swqe->sg_list + 1; in qib_ud_loopback()
171 ssge.sge = *swqe->sg_list; in qib_ud_loopback()
188 *sge = *ssge.sg_list++; in qib_ud_loopback()
324 qp->s_sge.sge = wqe->sg_list[0]; in qib_make_ud_req()
325 qp->s_sge.sg_list = wqe->sg_list + 1; in qib_make_ud_req()
Dqib_uc.c100 qp->s_sge.sge = wqe->sg_list[0]; in qib_make_uc_req()
101 qp->s_sge.sg_list = wqe->sg_list + 1; in qib_make_uc_req()
422 qp->r_sge.sg_list = NULL; in qib_uc_rcv()
/drivers/infiniband/hw/hfi1/
Druc.c71 ss->sg_list = qp->r_sg_list; in init_sge()
74 if (wqe->sg_list[i].length == 0) in init_sge()
77 ret = rvt_lkey_ok(rkt, pd, j ? &ss->sg_list[j - 1] : &ss->sge, in init_sge()
78 NULL, &wqe->sg_list[i], in init_sge()
82 qp->r_len += wqe->sg_list[i].length; in init_sge()
92 struct rvt_sge *sge = --j ? &ss->sg_list[j - 1] : &ss->sge; in init_sge()
408 sqp->s_sge.sge = wqe->sg_list[0]; in ruc_loopback()
409 sqp->s_sge.sg_list = wqe->sg_list + 1; in ruc_loopback()
471 qp->r_sge.sg_list = NULL; in ruc_loopback()
485 sqp->s_sge.sg_list = NULL; in ruc_loopback()
[all …]
Dud.c219 ssge.sg_list = swqe->sg_list + 1; in ud_loopback()
220 ssge.sge = *swqe->sg_list; in ud_loopback()
237 *sge = *ssge.sg_list++; in ud_loopback()
552 qp->s_sge.sge = wqe->sg_list[0]; in hfi1_make_ud_req()
553 qp->s_sge.sg_list = wqe->sg_list + 1; in hfi1_make_ud_req()
Duc.c157 qp->s_sge.sge = wqe->sg_list[0]; in hfi1_make_uc_req()
158 qp->s_sge.sg_list = wqe->sg_list + 1; in hfi1_make_uc_req()
501 qp->r_sge.sg_list = NULL; in hfi1_uc_rcv()
/drivers/dma/
Dimx-dma.c165 struct scatterlist *sg_list; member
806 kfree(imxdmac->sg_list); in imxdma_free_chan_resources()
807 imxdmac->sg_list = NULL; in imxdma_free_chan_resources()
881 kfree(imxdmac->sg_list); in imxdma_prep_dma_cyclic()
883 imxdmac->sg_list = kcalloc(periods + 1, in imxdma_prep_dma_cyclic()
885 if (!imxdmac->sg_list) in imxdma_prep_dma_cyclic()
888 sg_init_table(imxdmac->sg_list, periods); in imxdma_prep_dma_cyclic()
891 sg_assign_page(&imxdmac->sg_list[i], NULL); in imxdma_prep_dma_cyclic()
892 imxdmac->sg_list[i].offset = 0; in imxdma_prep_dma_cyclic()
893 imxdmac->sg_list[i].dma_address = dma_addr; in imxdma_prep_dma_cyclic()
[all …]
/drivers/infiniband/hw/qedr/
Dqedr_cm.c110 qp->rqe_wr_id[qp->rq.gsi_cons].sg_list[0].length = in qedr_ll2_complete_rx_packet()
404 send_size += swr->sg_list[i].length; in qedr_gsi_build_header()
553 packet->payload[i].baddr = swr->sg_list[i].addr; in qedr_gsi_build_packet()
554 packet->payload[i].len = swr->sg_list[i].length; in qedr_gsi_build_packet()
661 wr->sg_list[0].addr, in qedr_gsi_post_recv()
662 wr->sg_list[0].length, in qedr_gsi_post_recv()
674 qp->rqe_wr_id[qp->rq.prod].sg_list[0] = wr->sg_list[0]; in qedr_gsi_post_recv()
712 wc[i].byte_len = qp->rqe_wr_id[qp->rq.cons].sg_list[0].length; in qedr_gsi_poll_cq()
/drivers/scsi/aacraid/
Dcommctrl.c493 void *sg_list[HBA_MAX_SG_EMBEDDED]; in aac_send_raw_srb() local
518 memset(sg_list, 0, sizeof(sg_list)); /* cleanup may take issue */ in aac_send_raw_srb()
557 if (user_srbcmd->sg.count > ARRAY_SIZE(sg_list)) { in aac_send_raw_srb()
685 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
746 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
801 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
858 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
897 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
937 if (copy_to_user(sg_user[i], sg_list[i], sg_count[i])) { in aac_send_raw_srb()
992 kfree(sg_list[i]); in aac_send_raw_srb()
/drivers/infiniband/hw/bnxt_re/
Dib_verbs.c107 struct bnxt_qplib_sge *sg_list, int num) in bnxt_re_build_sgl() argument
112 sg_list[i].addr = ib_sg_list[i].addr; in bnxt_re_build_sgl()
113 sg_list[i].lkey = ib_sg_list[i].lkey; in bnxt_re_build_sgl()
114 sg_list[i].size = ib_sg_list[i].length; in bnxt_re_build_sgl()
115 total += sg_list[i].size; in bnxt_re_build_sgl()
1773 wqe->sg_list[i].addr = wqe->sg_list[i - 1].addr; in bnxt_re_build_qp1_send_v2()
1774 wqe->sg_list[i].lkey = wqe->sg_list[i - 1].lkey; in bnxt_re_build_qp1_send_v2()
1775 wqe->sg_list[i].size = wqe->sg_list[i - 1].size; in bnxt_re_build_qp1_send_v2()
1799 wqe->sg_list[0].addr = sge.addr; in bnxt_re_build_qp1_send_v2()
1800 wqe->sg_list[0].lkey = sge.lkey; in bnxt_re_build_qp1_send_v2()
[all …]
/drivers/infiniband/hw/cxgb4/
Dqp.c398 if ((plen + wr->sg_list[i].length) > max) in build_immd()
400 srcp = (u8 *)(unsigned long)wr->sg_list[i].addr; in build_immd()
401 plen += wr->sg_list[i].length; in build_immd()
402 rem = wr->sg_list[i].length; in build_immd()
428 struct fw_ri_isgl *isglp, struct ib_sge *sg_list, in build_isgl() argument
437 if ((plen + sg_list[i].length) < plen) in build_isgl()
439 plen += sg_list[i].length; in build_isgl()
440 *flitp = cpu_to_be64(((u64)sg_list[i].lkey << 32) | in build_isgl()
441 sg_list[i].length); in build_isgl()
444 *flitp = cpu_to_be64(sg_list[i].addr); in build_isgl()
[all …]
/drivers/infiniband/hw/i40iw/
Di40iw_verbs.c2180 static void i40iw_copy_sg_list(struct i40iw_sge *sg_list, struct ib_sge *sgl, int num_sges) in i40iw_copy_sg_list() argument
2185 sg_list[i].tag_off = sgl[i].addr; in i40iw_copy_sg_list()
2186 sg_list[i].len = sgl[i].length; in i40iw_copy_sg_list()
2187 sg_list[i].stag = sgl[i].lkey; in i40iw_copy_sg_list()
2239 info.op.inline_send.data = (void *)(unsigned long)ib_wr->sg_list[0].addr; in i40iw_post_send()
2240 info.op.inline_send.len = ib_wr->sg_list[0].length; in i40iw_post_send()
2244 info.op.send.sg_list = (struct i40iw_sge *)ib_wr->sg_list; in i40iw_post_send()
2259 info.op.inline_rdma_write.data = (void *)(unsigned long)ib_wr->sg_list[0].addr; in i40iw_post_send()
2260 info.op.inline_rdma_write.len = ib_wr->sg_list[0].length; in i40iw_post_send()
2263 info.op.inline_rdma_write.rem_addr.len = ib_wr->sg_list->length; in i40iw_post_send()
[all …]
Di40iw_user.h198 i40iw_sgl sg_list; member
208 i40iw_sgl sg_list; member
278 i40iw_sgl sg_list; member
/drivers/net/ethernet/ibm/ehea/
Dehea_qmr.h120 struct ehea_vsgentry sg_list[EHEA_MAX_WQE_SG_ENTRIES]; member
129 struct ehea_vsgentry sg_list[EHEA_MAX_WQE_SG_ENTRIES-1]; member
146 struct ehea_vsgentry sg_list[EHEA_MAX_WQE_SG_ENTRIES]; member
/drivers/xen/
Defi.c226 unsigned long count, unsigned long sg_list) in xen_efi_update_capsule() argument
236 efi_data(op).u.update_capsule.sg_list = sg_list; in xen_efi_update_capsule()
/drivers/scsi/qla4xxx/
Dql4_bsg.c63 sg_copy_from_buffer(bsg_job->reply_payload.sg_list, in qla4xxx_read_flash()
123 sg_copy_to_buffer(bsg_job->request_payload.sg_list, in qla4xxx_update_flash()
187 sg_copy_from_buffer(bsg_job->reply_payload.sg_list, in qla4xxx_get_acb_state()
258 sg_copy_from_buffer(bsg_job->reply_payload.sg_list, in qla4xxx_read_nvram()
322 sg_copy_to_buffer(bsg_job->request_payload.sg_list, in qla4xxx_update_nvram()
436 sg_copy_from_buffer(bsg_job->reply_payload.sg_list, in qla4xxx_bsg_get_acb()
/drivers/infiniband/hw/mlx4/
Dsrq.c350 scat[i].byte_count = cpu_to_be32(wr->sg_list[i].length); in mlx4_ib_post_srq_recv()
351 scat[i].lkey = cpu_to_be32(wr->sg_list[i].lkey); in mlx4_ib_post_srq_recv()
352 scat[i].addr = cpu_to_be64(wr->sg_list[i].addr); in mlx4_ib_post_srq_recv()
/drivers/infiniband/sw/rdmavt/
Dqp.c484 struct rvt_sge *sge = &wqe->sg_list[i]; in rvt_swqe_has_lkey()
1589 wqe->sg_list[i] = wr->sg_list[i]; in rvt_post_recv()
1636 wr->sg_list[0].length < sizeof(u64) || in rvt_qp_valid_operation()
1637 wr->sg_list[0].addr & (sizeof(u64) - 1))) in rvt_qp_valid_operation()
1811 u32 length = wr->sg_list[i].length; in rvt_post_one_wr()
1815 ret = rvt_lkey_ok(rkt, pd, &wqe->sg_list[j], last_sge, in rvt_post_one_wr()
1816 &wr->sg_list[i], acc); in rvt_post_one_wr()
1821 last_sge = &wqe->sg_list[j]; in rvt_post_one_wr()
1878 struct rvt_sge *sge = &wqe->sg_list[--j]; in rvt_post_one_wr()
1984 wqe->sg_list[i] = wr->sg_list[i]; in rvt_post_srq_recv()
/drivers/infiniband/core/
Dmad.c1048 mad_send_wr->sg_list[0].length = hdr_len; in ib_create_send_mad()
1049 mad_send_wr->sg_list[0].lkey = mad_agent->qp->pd->local_dma_lkey; in ib_create_send_mad()
1054 mad_send_wr->sg_list[1].length = data_len; in ib_create_send_mad()
1056 mad_send_wr->sg_list[1].length = mad_size - hdr_len; in ib_create_send_mad()
1058 mad_send_wr->sg_list[1].lkey = mad_agent->qp->pd->local_dma_lkey; in ib_create_send_mad()
1063 mad_send_wr->send_wr.wr.sg_list = mad_send_wr->sg_list; in ib_create_send_mad()
1179 sge = mad_send_wr->sg_list; in ib_send_mad()
2485 mad_send_wr->sg_list[0].length, DMA_TO_DEVICE); in ib_mad_send_done()
2488 mad_send_wr->sg_list[1].length, DMA_TO_DEVICE); in ib_mad_send_done()
2878 struct ib_sge sg_list; in ib_mad_post_receive_mads() local
[all …]
Drw.c141 reg->wr.wr.sg_list = &reg->sge; in rdma_rw_init_mr_wrs()
208 rdma_wr->wr.sg_list = sge; in rdma_rw_init_map_wrs()
252 rdma_wr->wr.sg_list = &ctx->single.sge; in rdma_rw_init_single_wr()
421 ctx->sig->sig_wr.wr.sg_list = &ctx->sig->data.sge; in rdma_rw_ctx_signature_init()
438 rdma_wr->wr.sg_list = &ctx->sig->sig_sge; in rdma_rw_ctx_signature_init()
/drivers/scsi/
D3w-9xxx.c1350 …if (full_command_packet->command.newcommand.sg_list[0].length < scsi_bufflen(tw_dev->srb[request_i… in twa_interrupt()
1351 …scsi_set_resid(cmd, scsi_bufflen(cmd) - full_command_packet->command.newcommand.sg_list[0].length); in twa_interrupt()
1395 newcommand->sg_list[0].address = TW_CPU_TO_SGL(dma_handle + sizeof(TW_Ioctl_Buf_Apache) - 1); in twa_load_sgl()
1396 newcommand->sg_list[0].length = cpu_to_le32(length); in twa_load_sgl()
1857 command_packet->sg_list[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[request_id]); in DEF_SCSI_QCMD()
1858 command_packet->sg_list[0].length = cpu_to_le32(TW_MIN_SGL_LENGTH); in DEF_SCSI_QCMD()
1865 command_packet->sg_list[i].address = TW_CPU_TO_SGL(sg_dma_address(sg)); in DEF_SCSI_QCMD()
1866 command_packet->sg_list[i].length = cpu_to_le32(sg_dma_len(sg)); in DEF_SCSI_QCMD()
1867 if (command_packet->sg_list[i].address & TW_CPU_TO_SGL(TW_ALIGNMENT_9000_SGL)) { in DEF_SCSI_QCMD()
1878 command_packet->sg_list[i].address = TW_CPU_TO_SGL(sglistarg[i].address); in DEF_SCSI_QCMD()
[all …]
/drivers/firmware/efi/
Druntime-wrappers.c273 unsigned long sg_list) in virt_efi_update_capsule() argument
282 status = efi_call_virt(update_capsule, capsules, count, sg_list); in virt_efi_update_capsule()

12345