Home
last modified time | relevance | path

Searched refs:sg_list (Results 1 – 25 of 123) sorted by relevance

12345

/drivers/virt/
Dfsl_hypervisor.c153 struct fh_sg_list *sg_list = NULL; in ioctl_memcpy() local
246 sg_list = PTR_ALIGN(sg_list_unaligned, sizeof(struct fh_sg_list)); in ioctl_memcpy()
263 sg_list[0].source = page_to_phys(pages[0]) + lb_offset; in ioctl_memcpy()
264 sg_list[0].target = param.remote_paddr; in ioctl_memcpy()
266 sg_list[0].source = param.remote_paddr; in ioctl_memcpy()
267 sg_list[0].target = page_to_phys(pages[0]) + lb_offset; in ioctl_memcpy()
269 sg_list[0].size = min_t(uint64_t, param.count, PAGE_SIZE - lb_offset); in ioctl_memcpy()
271 remote_paddr = param.remote_paddr + sg_list[0].size; in ioctl_memcpy()
272 count = param.count - sg_list[0].size; in ioctl_memcpy()
277 sg_list[i].source = page_to_phys(pages[i]); in ioctl_memcpy()
[all …]
/drivers/scsi/qla2xxx/
Dqla_bsg.c53 bsg_job->request_payload.sg_list, in qla2x00_bsg_sp_free()
58 bsg_job->reply_payload.sg_list, in qla2x00_bsg_sp_free()
68 dma_unmap_sg(&ha->pdev->dev, bsg_job->request_payload.sg_list, in qla2x00_bsg_sp_free()
71 dma_unmap_sg(&ha->pdev->dev, bsg_job->reply_payload.sg_list, in qla2x00_bsg_sp_free()
207 bsg_job->reply_payload.sg_list, in qla24xx_proc_fcp_prio_cfg_cmd()
234 sg_copy_to_buffer(bsg_job->request_payload.sg_list, in qla24xx_proc_fcp_prio_cfg_cmd()
370 dma_map_sg(&ha->pdev->dev, bsg_job->request_payload.sg_list, in qla2x00_process_els()
373 dma_unmap_sg(&ha->pdev->dev, bsg_job->request_payload.sg_list, in qla2x00_process_els()
379 rsp_sg_cnt = dma_map_sg(&ha->pdev->dev, bsg_job->reply_payload.sg_list, in qla2x00_process_els()
382 dma_unmap_sg(&ha->pdev->dev, bsg_job->reply_payload.sg_list, in qla2x00_process_els()
[all …]
/drivers/ufs/core/
Dufs_bsg.c57 sg_copy_to_buffer(job->request_payload.sg_list, in ufs_bsg_alloc_desc_buffer()
73 struct scatterlist *sg_list = NULL; in ufs_bsg_exec_advanced_rpmb_req() local
111 sg_cnt = dma_map_sg(hba->host->dma_dev, payload->sg_list, payload->sg_cnt, dir); in ufs_bsg_exec_advanced_rpmb_req()
114 sg_list = payload->sg_list; in ufs_bsg_exec_advanced_rpmb_req()
120 &rpmb_reply->ehs_rsp, sg_cnt, sg_list, dir); in ufs_bsg_exec_advanced_rpmb_req()
123 dma_unmap_sg(hba->host->dma_dev, payload->sg_list, payload->sg_cnt, dir); in ufs_bsg_exec_advanced_rpmb_req()
166 sg_copy_from_buffer(job->request_payload.sg_list, in ufs_bsg_request()
/drivers/infiniband/hw/erdma/
Derdma_qp.c221 bytes += send_wr->sg_list[i].length; in fill_inline_data()
225 remain_size = send_wr->sg_list[i].length; in fill_inline_data()
232 (void *)(uintptr_t)send_wr->sg_list[i].addr + in fill_inline_data()
273 bytes += send_wr->sg_list[i].length; in fill_sgl()
274 memcpy(sgl + sgl_offset, &send_wr->sg_list[i], in fill_sgl()
358 read_sqe->length = cpu_to_le32(send_wr->sg_list[0].length); in erdma_push_one_sqe()
359 read_sqe->sink_stag = cpu_to_le32(send_wr->sg_list[0].lkey); in erdma_push_one_sqe()
361 cpu_to_le32(lower_32_bits(send_wr->sg_list[0].addr)); in erdma_push_one_sqe()
363 cpu_to_le32(upper_32_bits(send_wr->sg_list[0].addr)); in erdma_push_one_sqe()
369 sge->length = cpu_to_le32(send_wr->sg_list[0].length); in erdma_push_one_sqe()
[all …]
/drivers/tee/amdtee/
Dcall.c315 cmd->sg_list.count = count; in handle_map_shmem()
320 cmd->sg_list.buf[i].hi_addr = upper_32_bits(paddr); in handle_map_shmem()
321 cmd->sg_list.buf[i].low_addr = lower_32_bits(paddr); in handle_map_shmem()
322 cmd->sg_list.buf[i].size = start[i].size; in handle_map_shmem()
323 cmd->sg_list.size += cmd->sg_list.buf[i].size; in handle_map_shmem()
326 cmd->sg_list.buf[i].hi_addr); in handle_map_shmem()
328 cmd->sg_list.buf[i].low_addr); in handle_map_shmem()
329 pr_debug("buf[%d]:size = 0x%x\n", i, cmd->sg_list.buf[i].size); in handle_map_shmem()
330 pr_debug("list size = 0x%x\n", cmd->sg_list.size); in handle_map_shmem()
/drivers/infiniband/sw/rdmavt/
Dqp.c626 struct rvt_sge *sge = &wqe->sg_list[i]; in rvt_swqe_has_lkey()
1078 sz = struct_size(swq, sg_list, init_attr->cap.max_send_sge); in rvt_create_qp()
1829 wqe->sg_list[i].addr = wr->sg_list[i].addr; in rvt_post_recv()
1830 wqe->sg_list[i].length = wr->sg_list[i].length; in rvt_post_recv()
1831 wqe->sg_list[i].lkey = wr->sg_list[i].lkey; in rvt_post_recv()
1878 wr->sg_list[0].length < sizeof(u64) || in rvt_qp_valid_operation()
1879 wr->sg_list[0].addr & (sizeof(u64) - 1))) in rvt_qp_valid_operation()
2051 u32 length = wr->sg_list[i].length; in rvt_post_one_wr()
2055 ret = rvt_lkey_ok(rkt, pd, &wqe->sg_list[j], last_sge, in rvt_post_one_wr()
2056 &wr->sg_list[i], acc); in rvt_post_one_wr()
[all …]
Drc.c164 ss->sge = wqe->sg_list[0]; in rvt_restart_sge()
165 ss->sg_list = wqe->sg_list + 1; in rvt_restart_sge()
/drivers/dma/
Dimx-dma.c159 struct scatterlist *sg_list; member
790 kfree(imxdmac->sg_list); in imxdma_free_chan_resources()
791 imxdmac->sg_list = NULL; in imxdma_free_chan_resources()
867 kfree(imxdmac->sg_list); in imxdma_prep_dma_cyclic()
869 imxdmac->sg_list = kcalloc(periods + 1, in imxdma_prep_dma_cyclic()
871 if (!imxdmac->sg_list) in imxdma_prep_dma_cyclic()
874 sg_init_table(imxdmac->sg_list, periods); in imxdma_prep_dma_cyclic()
877 sg_assign_page(&imxdmac->sg_list[i], NULL); in imxdma_prep_dma_cyclic()
878 imxdmac->sg_list[i].offset = 0; in imxdma_prep_dma_cyclic()
879 imxdmac->sg_list[i].dma_address = dma_addr; in imxdma_prep_dma_cyclic()
[all …]
/drivers/net/ethernet/marvell/octeon_ep/
Doctep_ctrl_net.c47 msg->sg_list[0].msg = buf; in init_send_req()
48 msg->sg_list[0].sz = msg->hdr.s.sz; in init_send_req()
287 msg->sg_list[0].msg, in process_mbox_resp()
303 req = (struct octep_ctrl_net_f2h_req *)msg->sg_list[0].msg; in process_mbox_notify()
340 msg.sg_list[0].sz = msg_sz; in octep_ctrl_net_recv_fw_messages()
341 msg.sg_list[0].msg = &data; in octep_ctrl_net_recv_fw_messages()
/drivers/scsi/aacraid/
Dcommctrl.c494 void *sg_list[HBA_MAX_SG_EMBEDDED]; in aac_send_raw_srb() local
519 memset(sg_list, 0, sizeof(sg_list)); /* cleanup may take issue */ in aac_send_raw_srb()
553 if (user_srbcmd->sg.count > ARRAY_SIZE(sg_list)) { in aac_send_raw_srb()
681 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
742 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
797 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
854 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
895 sg_list[i] = p; // save so we can clean up later in aac_send_raw_srb()
935 if (copy_to_user(sg_user[i], sg_list[i], sg_count[i])) { in aac_send_raw_srb()
990 kfree(sg_list[i]); in aac_send_raw_srb()
/drivers/infiniband/hw/qedr/
Dqedr_roce_cm.c110 qp->rqe_wr_id[qp->rq.gsi_cons].sg_list[0].length = in qedr_ll2_complete_rx_packet()
407 send_size += swr->sg_list[i].length; in qedr_gsi_build_header()
532 packet->payload[i].baddr = swr->sg_list[i].addr; in qedr_gsi_build_packet()
533 packet->payload[i].len = swr->sg_list[i].length; in qedr_gsi_build_packet()
640 wr->sg_list[0].addr, in qedr_gsi_post_recv()
641 wr->sg_list[0].length, in qedr_gsi_post_recv()
653 qp->rqe_wr_id[qp->rq.prod].sg_list[0] = wr->sg_list[0]; in qedr_gsi_post_recv()
691 wc[i].byte_len = qp->rqe_wr_id[qp->rq.cons].sg_list[0].length; in qedr_gsi_poll_cq()
/drivers/infiniband/hw/qib/
Dqib_ud.c171 ssge.sg_list = swqe->sg_list + 1; in qib_ud_loopback()
172 ssge.sge = *swqe->sg_list; in qib_ud_loopback()
184 *sge = *ssge.sg_list++; in qib_ud_loopback()
318 qp->s_sge.sge = wqe->sg_list[0]; in qib_make_ud_req()
319 qp->s_sge.sg_list = wqe->sg_list + 1; in qib_make_ud_req()
Dqib_uc.c99 qp->s_sge.sge = wqe->sg_list[0]; in qib_make_uc_req()
100 qp->s_sge.sg_list = wqe->sg_list + 1; in qib_make_uc_req()
419 qp->r_sge.sg_list = NULL; in qib_uc_rcv()
/drivers/infiniband/hw/bnxt_re/
Dib_verbs.c118 struct bnxt_qplib_sge *sg_list, int num) in bnxt_re_build_sgl() argument
123 sg_list[i].addr = ib_sg_list[i].addr; in bnxt_re_build_sgl()
124 sg_list[i].lkey = ib_sg_list[i].lkey; in bnxt_re_build_sgl()
125 sg_list[i].size = ib_sg_list[i].length; in bnxt_re_build_sgl()
126 total += sg_list[i].size; in bnxt_re_build_sgl()
1876 bnxt_re_build_sgl(wr->sg_list, wqe.sg_list, wr->num_sge); in bnxt_re_post_srq_recv()
2359 wqe->sg_list[i].addr = wqe->sg_list[i - 1].addr; in bnxt_re_build_qp1_send_v2()
2360 wqe->sg_list[i].lkey = wqe->sg_list[i - 1].lkey; in bnxt_re_build_qp1_send_v2()
2361 wqe->sg_list[i].size = wqe->sg_list[i - 1].size; in bnxt_re_build_qp1_send_v2()
2385 wqe->sg_list[0].addr = sge.addr; in bnxt_re_build_qp1_send_v2()
[all …]
/drivers/infiniband/hw/cxgb4/
Dqp.c424 if ((plen + wr->sg_list[i].length) > max) in build_immd()
426 srcp = (u8 *)(unsigned long)wr->sg_list[i].addr; in build_immd()
427 plen += wr->sg_list[i].length; in build_immd()
428 rem = wr->sg_list[i].length; in build_immd()
454 struct fw_ri_isgl *isglp, struct ib_sge *sg_list, in build_isgl() argument
468 if ((plen + sg_list[i].length) < plen) in build_isgl()
470 plen += sg_list[i].length; in build_isgl()
471 *flitp = cpu_to_be64(((u64)sg_list[i].lkey << 32) | in build_isgl()
472 sg_list[i].length); in build_isgl()
475 *flitp = cpu_to_be64(sg_list[i].addr); in build_isgl()
[all …]
/drivers/firmware/efi/
Druntime-wrappers.c102 unsigned long sg_list; member
283 args->UPDATE_CAPSULE.sg_list); in efi_call_rts()
524 unsigned long sg_list) in virt_efi_update_capsule() argument
533 status = efi_queue_work(UPDATE_CAPSULE, capsules, count, sg_list); in virt_efi_update_capsule()
/drivers/net/ethernet/marvell/octeontx2/nic/
Dotx2_txrx.h74 struct sg_list { struct
96 struct sg_list *sg; argument
Dotx2_txrx.c104 static void otx2_dma_unmap_skb_frags(struct otx2_nic *pfvf, struct sg_list *sg) in otx2_dma_unmap_skb_frags()
120 struct sg_list *sg; in otx2_xdp_snd_pkt_handler()
143 struct sg_list *sg; in otx2_snd_pkt_handler()
803 struct sg_list *sg = &sq->sg[sqe]; in otx2_dma_map_tso_skb()
835 struct sg_list *sg = &sq->sg[sqe]; in otx2_tso_frag_dma_addr()
850 struct sg_list *list, int *offset) in otx2_sqe_tso_add_sg()
887 struct sg_list list; in otx2_sq_append_tso()
1259 struct sg_list *sg; in otx2_cleanup_tx_cqes()
1324 struct sg_list *sg; in otx2_free_pending_sqe()
/drivers/infiniband/core/
Duverbs_std_types_mr.c54 struct ib_sge *sg_list; in UVERBS_HANDLER() local
77 sg_list = uverbs_attr_get_alloced_ptr(attrs, in UVERBS_HANDLER()
79 return ib_dev->ops.advise_mr(pd, advice, flags, sg_list, num_sge, in UVERBS_HANDLER()
Dmad.c879 mad_send_wr->sg_list[0].length = hdr_len; in ib_create_send_mad()
880 mad_send_wr->sg_list[0].lkey = mad_agent->qp->pd->local_dma_lkey; in ib_create_send_mad()
885 mad_send_wr->sg_list[1].length = data_len; in ib_create_send_mad()
887 mad_send_wr->sg_list[1].length = mad_size - hdr_len; in ib_create_send_mad()
889 mad_send_wr->sg_list[1].lkey = mad_agent->qp->pd->local_dma_lkey; in ib_create_send_mad()
894 mad_send_wr->send_wr.wr.sg_list = mad_send_wr->sg_list; in ib_create_send_mad()
1009 sge = mad_send_wr->sg_list; in ib_send_mad()
2297 mad_send_wr->sg_list[0].length, DMA_TO_DEVICE); in ib_mad_send_done()
2300 mad_send_wr->sg_list[1].length, DMA_TO_DEVICE); in ib_mad_send_done()
2674 struct ib_sge sg_list; in ib_mad_post_receive_mads() local
[all …]
/drivers/net/ethernet/ibm/ehea/
Dehea_qmr.h106 struct ehea_vsgentry sg_list[EHEA_MAX_WQE_SG_ENTRIES]; member
115 struct ehea_vsgentry sg_list[EHEA_MAX_WQE_SG_ENTRIES-1]; member
132 struct ehea_vsgentry sg_list[EHEA_MAX_WQE_SG_ENTRIES]; member
/drivers/infiniband/hw/mlx5/
Dodp.c1725 struct ib_sge *sg_list, u32 num_sge) in init_prefetch_work() argument
1735 mr = get_prefetchable_mr(pd, advice, sg_list[i].lkey); in init_prefetch_work()
1740 work->frags[i].io_virt = sg_list[i].addr; in init_prefetch_work()
1741 work->frags[i].length = sg_list[i].length; in init_prefetch_work()
1750 u32 pf_flags, struct ib_sge *sg_list, in mlx5_ib_prefetch_sg_list() argument
1760 mr = get_prefetchable_mr(pd, advice, sg_list[i].lkey); in mlx5_ib_prefetch_sg_list()
1763 ret = pagefault_mr(mr, sg_list[i].addr, sg_list[i].length, in mlx5_ib_prefetch_sg_list()
1778 u32 flags, struct ib_sge *sg_list, u32 num_sge) in mlx5_ib_advise_mr_prefetch() argument
1791 return mlx5_ib_prefetch_sg_list(pd, advice, pf_flags, sg_list, in mlx5_ib_advise_mr_prefetch()
1798 rc = init_prefetch_work(pd, advice, pf_flags, work, sg_list, num_sge); in mlx5_ib_advise_mr_prefetch()
/drivers/scsi/mpi3mr/
Dmpi3mr_app.c160 sg_copy_from_buffer(job->request_payload.sg_list, in mpi3mr_enable_logdata()
191 sg_copy_from_buffer(job->request_payload.sg_list, in mpi3mr_get_logdata()
242 sg_copy_to_buffer(job->request_payload.sg_list, in mpi3mr_bsg_pel_enable()
332 sg_copy_from_buffer(job->request_payload.sg_list, in mpi3mr_get_all_tgt_info()
369 sg_copy_from_buffer(job->request_payload.sg_list, in mpi3mr_get_all_tgt_info()
394 sg_copy_from_buffer(job->request_payload.sg_list, in mpi3mr_get_change_count()
427 sg_copy_to_buffer(job->request_payload.sg_list, in mpi3mr_bsg_adp_reset()
497 sg_copy_from_buffer(job->request_payload.sg_list, in mpi3mr_bsg_populate_adpinfo()
1019 sg_copy_to_buffer(job->request_payload.sg_list, in mpi3mr_bsg_process_mpt_cmds()
1396 sg_copy_from_buffer(job->reply_payload.sg_list, in mpi3mr_bsg_process_mpt_cmds()
/drivers/scsi/qla4xxx/
Dql4_bsg.c62 sg_copy_from_buffer(bsg_job->reply_payload.sg_list, in qla4xxx_read_flash()
122 sg_copy_to_buffer(bsg_job->request_payload.sg_list, in qla4xxx_update_flash()
186 sg_copy_from_buffer(bsg_job->reply_payload.sg_list, in qla4xxx_get_acb_state()
257 sg_copy_from_buffer(bsg_job->reply_payload.sg_list, in qla4xxx_read_nvram()
321 sg_copy_to_buffer(bsg_job->request_payload.sg_list, in qla4xxx_update_nvram()
435 sg_copy_from_buffer(bsg_job->reply_payload.sg_list, in qla4xxx_bsg_get_acb()
/drivers/infiniband/hw/hfi1/
Duc.c112 qp->s_sge.sge = wqe->sg_list[0]; in hfi1_make_uc_req()
113 qp->s_sge.sg_list = wqe->sg_list + 1; in hfi1_make_uc_req()
447 qp->r_sge.sg_list = NULL; in hfi1_uc_rcv()

12345