Home
last modified time | relevance | path

Searched refs:sg_cnt (Results 1 – 25 of 62) sorted by relevance

123

/drivers/scsi/qla2xxx/
Dqla_bsg.c54 bsg_job->request_payload.sg_cnt, DMA_TO_DEVICE); in qla2x00_bsg_sp_free()
59 bsg_job->reply_payload.sg_cnt, DMA_FROM_DEVICE); in qla2x00_bsg_sp_free()
69 bsg_job->request_payload.sg_cnt, DMA_TO_DEVICE); in qla2x00_bsg_sp_free()
72 bsg_job->reply_payload.sg_cnt, DMA_FROM_DEVICE); in qla2x00_bsg_sp_free()
208 bsg_job->reply_payload.sg_cnt, ha->fcp_prio_cfg, in qla24xx_proc_fcp_prio_cfg_cmd()
235 bsg_job->request_payload.sg_cnt, ha->fcp_prio_cfg, in qla24xx_proc_fcp_prio_cfg_cmd()
320 if (bsg_job->request_payload.sg_cnt > 1 || in qla2x00_process_els()
321 bsg_job->reply_payload.sg_cnt > 1) { in qla2x00_process_els()
325 bsg_job->request_payload.sg_cnt, in qla2x00_process_els()
326 bsg_job->reply_payload.sg_cnt); in qla2x00_process_els()
[all …]
Dqla_edif.c546 bsg_job->request_payload.sg_cnt, &appstart, in qla_edif_app_start()
651 bsg_job->reply_payload.sg_cnt, in qla_edif_app_start()
678 bsg_job->request_payload.sg_cnt, &appstop, in qla_edif_app_stop()
763 bsg_job->request_payload.sg_cnt, &appplogiok, in qla_edif_app_authok()
853 bsg_job->reply_payload.sg_cnt, in qla_edif_app_authok()
878 bsg_job->request_payload.sg_cnt, &appplogifail, in qla_edif_app_authfail()
952 bsg_job->request_payload.sg_cnt, &app_req, in qla_edif_app_getfcinfo()
1053 bsg_job->reply_payload.sg_cnt, in qla_edif_app_getfcinfo()
1079 bsg_job->request_payload.sg_cnt, &app_req, in qla_edif_app_getstats()
1126 bsg_job->reply_payload.sg_cnt, app_reply, in qla_edif_app_getstats()
[all …]
/drivers/infiniband/core/
Drw.c93 u32 sg_cnt, u32 offset) in rdma_rw_init_one_mr() argument
97 u32 nents = min(sg_cnt, pages_per_mr); in rdma_rw_init_one_mr()
125 u32 port_num, struct scatterlist *sg, u32 sg_cnt, u32 offset, in rdma_rw_init_mr_wrs() argument
133 ctx->nr_ops = DIV_ROUND_UP(sg_cnt, pages_per_mr); in rdma_rw_init_mr_wrs()
142 u32 nents = min(sg_cnt, pages_per_mr); in rdma_rw_init_mr_wrs()
144 ret = rdma_rw_init_one_mr(qp, port_num, reg, sg, sg_cnt, in rdma_rw_init_mr_wrs()
174 sg_cnt -= nents; in rdma_rw_init_mr_wrs()
196 struct scatterlist *sg, u32 sg_cnt, u32 offset, in rdma_rw_init_map_wrs() argument
204 ctx->nr_ops = DIV_ROUND_UP(sg_cnt, max_sge); in rdma_rw_init_map_wrs()
206 ctx->map.sges = sge = kcalloc(sg_cnt, sizeof(*sge), GFP_KERNEL); in rdma_rw_init_map_wrs()
[all …]
/drivers/ufs/core/
Dufs_bsg.c58 job->request_payload.sg_cnt, descp, in ufs_bsg_alloc_desc_buffer()
75 int sg_cnt = 0; in ufs_bsg_exec_advanced_rpmb_req() local
108 if (!payload || !payload->payload_len || !payload->sg_cnt) in ufs_bsg_exec_advanced_rpmb_req()
111 sg_cnt = dma_map_sg(hba->host->dma_dev, payload->sg_list, payload->sg_cnt, dir); in ufs_bsg_exec_advanced_rpmb_req()
112 if (unlikely(!sg_cnt)) in ufs_bsg_exec_advanced_rpmb_req()
120 &rpmb_reply->ehs_rsp, sg_cnt, sg_list, dir); in ufs_bsg_exec_advanced_rpmb_req()
123 dma_unmap_sg(hba->host->dma_dev, payload->sg_list, payload->sg_cnt, dir); in ufs_bsg_exec_advanced_rpmb_req()
167 job->request_payload.sg_cnt, in ufs_bsg_request()
/drivers/scsi/snic/
Dsnic_fwint.h94 __le16 sg_cnt; member
101 u16 sg_cnt, ulong ctx) in snic_io_hdr_enc() argument
108 hdr->sg_cnt = cpu_to_le16(sg_cnt); in snic_io_hdr_enc()
180 __le16 sg_cnt; member
254 __le16 sg_cnt; /* Number of SG Elements */ member
Dsnic_io.c220 snic_req_init(struct snic *snic, int sg_cnt) in snic_req_init() argument
225 typ = (sg_cnt <= SNIC_REQ_CACHE_DFLT_SGL) ? in snic_req_init()
246 if (sg_cnt == 0) in snic_req_init()
249 rqi->req_len += (sg_cnt * sizeof(struct snic_sg_desc)); in snic_req_init()
251 if (sg_cnt > atomic64_read(&snic->s_stats.io.max_sgl)) in snic_req_init()
252 atomic64_set(&snic->s_stats.io.max_sgl, sg_cnt); in snic_req_init()
254 SNIC_BUG_ON(sg_cnt > SNIC_MAX_SG_DESC_CNT); in snic_req_init()
255 atomic64_inc(&snic->s_stats.io.sgl_cnt[sg_cnt - 1]); in snic_req_init()
531 fn, line, req->hdr.cmnd_id, req->hdr.sg_cnt, req->hdr.status, in snic_dump_desc()
Dsnic_res.h16 u32 data_len, u16 sg_cnt, ulong sgl_addr, in snic_icmnd_init() argument
19 snic_io_hdr_enc(&req->hdr, SNIC_REQ_ICMND, 0, cmnd_id, host_id, sg_cnt, in snic_icmnd_init()
Dsnic_scsi.c152 int sg_cnt) in snic_queue_icmnd_req() argument
162 if (sg_cnt) { in snic_queue_icmnd_req()
166 for_each_sg(scsi_sglist(sc), sg, sg_cnt, i) { in snic_queue_icmnd_req()
204 sg_cnt, in snic_queue_icmnd_req()
231 int sg_cnt = 0; in snic_issue_scsi_req() local
240 sg_cnt = scsi_dma_map(sc); in snic_issue_scsi_req()
241 if (sg_cnt < 0) { in snic_issue_scsi_req()
243 sc->cmnd[0], sg_cnt, CMD_STATE(sc)); in snic_issue_scsi_req()
251 rqi = snic_req_init(snic, sg_cnt); in snic_issue_scsi_req()
270 ret = snic_queue_icmnd_req(snic, rqi, sc, sg_cnt); in snic_issue_scsi_req()
[all …]
Dsnic_io.h96 snic_req_init(struct snic *, int sg_cnt);
/drivers/infiniband/ulp/rtrs/
Drtrs-clt.c393 if (req->sg_cnt) { in complete_rdma_req()
438 req->sg_cnt, req->dir); in complete_rdma_req()
949 struct scatterlist *sg, size_t sg_cnt, in rtrs_clt_init_req() argument
960 req->sg_cnt = sg_cnt; in rtrs_clt_init_req()
983 struct scatterlist *sg, size_t sg_cnt, in rtrs_clt_get_req() argument
990 sg, sg_cnt, data_len, dir); in rtrs_clt_get_req()
1007 fail_req->sglist, fail_req->sg_cnt, in rtrs_clt_get_copy_req()
1098 if (req->sg_cnt) { in rtrs_clt_write_req()
1100 req->sg_cnt, req->dir); in rtrs_clt_write_req()
1125 req->sg_cnt, req->dir); in rtrs_clt_write_req()
[all …]
Drtrs-srv.c205 size_t sg_cnt; in rdma_write_sg() local
213 sg_cnt = le16_to_cpu(id->rd_msg->sg_cnt); in rdma_write_sg()
215 if (sg_cnt != 1) in rdma_write_sg()
354 size_t sg_cnt; in send_io_resp_imm() local
358 sg_cnt = le16_to_cpu(rd_msg->sg_cnt); in send_io_resp_imm()
361 if (sg_cnt) { in send_io_resp_imm()
519 if (status || id->dir == WRITE || !id->rd_msg->sg_cnt) in rtrs_srv_resp_rdma()
832 rsp->sg_cnt = cpu_to_le16(srv_path->mrs_num); in process_info_req()
1021 if (msg->sg_cnt != 1 && msg->sg_cnt != 0) { in process_read()
Drtrs-pri.h245 __le16 sg_cnt; member
273 __le16 sg_cnt; member
Drtrs.h104 struct scatterlist *sg, unsigned int sg_cnt);
/drivers/staging/rts5208/
Drtsx_transport.c321 int sg_cnt, i, resid; in rtsx_transfer_sglist_adma_partial() local
356 sg_cnt = dma_map_sg(&rtsx->pci->dev, sg, num_sg, dma_dir); in rtsx_transfer_sglist_adma_partial()
369 for (i = *index; i < sg_cnt; i++) { in rtsx_transfer_sglist_adma_partial()
395 if ((i == sg_cnt - 1) || !resid) in rtsx_transfer_sglist_adma_partial()
525 int sg_cnt, j; in rtsx_transfer_sglist_adma() local
528 sg_cnt = buf_cnt % (HOST_SG_TBL_BUF_LEN / 8); in rtsx_transfer_sglist_adma()
530 sg_cnt = HOST_SG_TBL_BUF_LEN / 8; in rtsx_transfer_sglist_adma()
533 for (j = 0; j < sg_cnt; j++) { in rtsx_transfer_sglist_adma()
542 if (j == (sg_cnt - 1)) in rtsx_transfer_sglist_adma()
583 sg_ptr += sg_cnt; in rtsx_transfer_sglist_adma()
/drivers/nvme/target/
Dio-cmd-bdev.c242 unsigned int sg_cnt = req->sg_cnt; in nvmet_bdev_execute_rw() local
256 if (!req->sg_cnt) { in nvmet_bdev_execute_rw()
281 bio = bio_alloc(req->ns->bdev, bio_max_segs(sg_cnt), opf, in nvmet_bdev_execute_rw()
293 for_each_sg(req->sg, sg, req->sg_cnt, i) { in nvmet_bdev_execute_rw()
307 bio = bio_alloc(req->ns->bdev, bio_max_segs(sg_cnt), in nvmet_bdev_execute_rw()
316 sg_cnt--; in nvmet_bdev_execute_rw()
Dpassthru.c266 if (req->sg_cnt > BIO_MAX_VECS) in nvmet_passthru_map_sg()
274 bio = bio_alloc(NULL, bio_max_segs(req->sg_cnt), req_op(rq), in nvmet_passthru_map_sg()
279 for_each_sg(req->sg, sg, req->sg_cnt, i) { in nvmet_passthru_map_sg()
287 blk_rq_bio_prep(rq, bio, req->sg_cnt); in nvmet_passthru_map_sg()
329 if (req->sg_cnt) { in nvmet_passthru_execute_cmd()
Dio-cmd-file.c122 ssize_t nr_bvec = req->sg_cnt; in nvmet_file_execute_io()
141 for_each_sg(req->sg, sg, req->sg_cnt, i) { in nvmet_file_execute_io()
220 ssize_t nr_bvec = req->sg_cnt; in nvmet_file_execute_rw()
225 if (!req->sg_cnt || !nr_bvec) { in nvmet_file_execute_rw()
Dzns.c546 int sg_cnt; in nvmet_bdev_execute_zone_append() local
552 if (!req->sg_cnt) { in nvmet_bdev_execute_zone_append()
574 bio = bio_alloc(req->ns->bdev, req->sg_cnt, opf, GFP_KERNEL); in nvmet_bdev_execute_zone_append()
583 for_each_sg(req->sg, sg, req->sg_cnt, sg_cnt) { in nvmet_bdev_execute_zone_append()
Dcore.c98 if (sg_pcopy_from_buffer(req->sg, req->sg_cnt, buf, len, off) != len) { in nvmet_copy_to_sgl()
107 if (sg_pcopy_to_buffer(req->sg, req->sg_cnt, buf, len, off) != len) { in nvmet_copy_from_sgl()
116 if (sg_zero_buffer(req->sg, req->sg_cnt, len, off) != len) { in nvmet_zero_sgl()
944 req->sg_cnt = 0; in nvmet_req_init()
1041 req->sg = pci_p2pmem_alloc_sgl(p2p_dev, &req->sg_cnt, in nvmet_req_alloc_p2pmem_sgls()
1078 &req->sg_cnt); in nvmet_req_alloc_sgls()
1112 req->sg_cnt = 0; in nvmet_req_free_sgls()
/drivers/net/wireless/broadcom/brcm80211/brcmfmac/
Dbcmsdh.c335 struct mmc_command *mc, int sg_cnt, int req_sz, in mmc_submit_one() argument
342 md->sg_len = sg_cnt; in mmc_submit_one()
382 unsigned int req_sz, func_blk_sz, sg_cnt, sg_data_sz, pkt_offset; in brcmf_sdiod_sglist_rw() local
446 sg_cnt = 0; in brcmf_sdiod_sglist_rw()
459 sg_cnt++; in brcmf_sdiod_sglist_rw()
464 if (req_sz >= max_req_sz || sg_cnt >= max_seg_cnt) { in brcmf_sdiod_sglist_rw()
466 sg_cnt, req_sz, func_blk_sz, in brcmf_sdiod_sglist_rw()
471 sg_cnt = 0; in brcmf_sdiod_sglist_rw()
476 if (sg_cnt) in brcmf_sdiod_sglist_rw()
478 sg_cnt, req_sz, func_blk_sz, in brcmf_sdiod_sglist_rw()
/drivers/i2c/busses/
Di2c-qup.c221 unsigned int sg_cnt; member
669 ret = qup_sg_set_buf(&qup->brx.sg[qup->brx.sg_cnt++], in qup_i2c_bam_make_desc()
676 ret = qup_sg_set_buf(&qup->brx.sg[qup->brx.sg_cnt++], in qup_i2c_bam_make_desc()
686 ret = qup_sg_set_buf(&qup->btx.sg[qup->btx.sg_cnt++], in qup_i2c_bam_make_desc()
700 ret = qup_sg_set_buf(&qup->btx.sg[qup->btx.sg_cnt++], in qup_i2c_bam_make_desc()
707 ret = qup_sg_set_buf(&qup->btx.sg[qup->btx.sg_cnt++], in qup_i2c_bam_make_desc()
728 u32 tx_cnt = qup->btx.sg_cnt, rx_cnt = qup->brx.sg_cnt; in qup_i2c_bam_schedule_desc()
831 qup->btx.sg_cnt = 0; in qup_i2c_bam_clear_tag_buffers()
832 qup->brx.sg_cnt = 0; in qup_i2c_bam_clear_tag_buffers()
881 if (qup->btx.sg_cnt > qup->max_xfer_sg_len || in qup_i2c_bam_xfer()
[all …]
/drivers/scsi/smartpqi/
Dsmartpqi_sas_transport.c495 job->reply_payload.sg_cnt, &parameters->request, in pqi_build_csmi_smp_passthru_buffer()
506 job->reply_payload.sg_cnt, &smp_buf->parameters.response, in pqi_build_sas_smp_handler_reply()
543 if (job->request_payload.sg_cnt > 1 || job->reply_payload.sg_cnt > 1) { in pqi_sas_smp_handler()
/drivers/scsi/mpi3mr/
Dmpi3mr_app.c161 job->request_payload.sg_cnt, in mpi3mr_enable_logdata()
192 job->request_payload.sg_cnt, in mpi3mr_get_logdata()
243 job->request_payload.sg_cnt, in mpi3mr_bsg_pel_enable()
333 job->request_payload.sg_cnt, in mpi3mr_get_all_tgt_info()
370 job->request_payload.sg_cnt, in mpi3mr_get_all_tgt_info()
395 job->request_payload.sg_cnt, in mpi3mr_get_change_count()
428 job->request_payload.sg_cnt, in mpi3mr_bsg_adp_reset()
498 job->request_payload.sg_cnt, in mpi3mr_bsg_populate_adpinfo()
1020 job->request_payload.sg_cnt, in mpi3mr_bsg_process_mpt_cmds()
1397 job->reply_payload.sg_cnt, in mpi3mr_bsg_process_mpt_cmds()
/drivers/scsi/qla4xxx/
Dql4_bsg.c63 bsg_job->reply_payload.sg_cnt, in qla4xxx_read_flash()
123 bsg_job->request_payload.sg_cnt, flash, length); in qla4xxx_update_flash()
187 bsg_job->reply_payload.sg_cnt, in qla4xxx_get_acb_state()
258 bsg_job->reply_payload.sg_cnt, in qla4xxx_read_nvram()
322 bsg_job->request_payload.sg_cnt, nvram, len); in qla4xxx_update_nvram()
436 bsg_job->reply_payload.sg_cnt, in qla4xxx_bsg_get_acb()
/drivers/scsi/libsas/
Dsas_host_smp.c242 job->request_payload.sg_cnt, req_data, in sas_smp_host_handler()
344 job->reply_payload.sg_cnt, resp_data, in sas_smp_host_handler()

123