/drivers/crypto/rockchip/ |
D | rk3288_crypto_skcipher.c | 21 struct scatterlist *sgs, *sgd; in rk_cipher_need_fallback() local 28 sgs = req->src; in rk_cipher_need_fallback() 30 while (sgs && sgd) { in rk_cipher_need_fallback() 31 if (!IS_ALIGNED(sgs->offset, sizeof(u32))) { in rk_cipher_need_fallback() 37 stodo = min(len, sgs->length); in rk_cipher_need_fallback() 49 sgs = sg_next(sgs); in rk_cipher_need_fallback() 302 struct scatterlist *sgs, in crypto_dma_start() argument 305 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, sg_dma_address(sgs)); in crypto_dma_start() 318 struct scatterlist *sgs, *sgd; in rk_cipher_run() local 337 sgs = areq->src; in rk_cipher_run() [all …]
|
/drivers/crypto/virtio/ |
D | virtio_crypto_algs.c | 120 struct scatterlist outhdr, key_sg, inhdr, *sgs[3]; in virtio_crypto_alg_skcipher_init_session() local 162 sgs[num_out++] = &outhdr; in virtio_crypto_alg_skcipher_init_session() 166 sgs[num_out++] = &key_sg; in virtio_crypto_alg_skcipher_init_session() 170 sgs[num_out + num_in++] = &inhdr; in virtio_crypto_alg_skcipher_init_session() 172 err = virtio_crypto_ctrl_vq_request(vcrypto, sgs, num_out, num_in, vc_ctrl_req); in virtio_crypto_alg_skcipher_init_session() 199 struct scatterlist outhdr, status_sg, *sgs[2]; in virtio_crypto_alg_skcipher_close_session() local 228 sgs[num_out++] = &outhdr; in virtio_crypto_alg_skcipher_close_session() 232 sgs[num_out + num_in++] = &status_sg; in virtio_crypto_alg_skcipher_close_session() 234 err = virtio_crypto_ctrl_vq_request(vcrypto, sgs, num_out, num_in, vc_ctrl_req); in virtio_crypto_alg_skcipher_close_session() 339 struct scatterlist outhdr, iv_sg, status_sg, **sgs; in __virtio_crypto_skcipher_do_req() local [all …]
|
D | virtio_crypto_akcipher_algs.c | 108 struct scatterlist outhdr_sg, key_sg, inhdr_sg, *sgs[3]; in virtio_crypto_alg_akcipher_init_session() local 134 sgs[num_out++] = &outhdr_sg; in virtio_crypto_alg_akcipher_init_session() 137 sgs[num_out++] = &key_sg; in virtio_crypto_alg_akcipher_init_session() 140 sgs[num_out + num_in++] = &inhdr_sg; in virtio_crypto_alg_akcipher_init_session() 142 err = virtio_crypto_ctrl_vq_request(vcrypto, sgs, num_out, num_in, vc_ctrl_req); in virtio_crypto_alg_akcipher_init_session() 166 struct scatterlist outhdr_sg, inhdr_sg, *sgs[2]; in virtio_crypto_alg_akcipher_close_session() local 192 sgs[num_out++] = &outhdr_sg; in virtio_crypto_alg_akcipher_close_session() 195 sgs[num_out + num_in++] = &inhdr_sg; in virtio_crypto_alg_akcipher_close_session() 197 err = virtio_crypto_ctrl_vq_request(vcrypto, sgs, num_out, num_in, vc_ctrl_req); in virtio_crypto_alg_akcipher_close_session() 224 struct scatterlist *sgs[4], outhdr_sg, inhdr_sg, srcdata_sg, dstdata_sg; in __virtio_crypto_akcipher_do_req() local [all …]
|
D | virtio_crypto_common.h | 105 struct scatterlist **sgs; member 147 int virtio_crypto_ctrl_vq_request(struct virtio_crypto *vcrypto, struct scatterlist *sgs[],
|
D | virtio_crypto_core.c | 21 kfree(vc_req->sgs); in virtcrypto_clear_request() 51 int virtio_crypto_ctrl_vq_request(struct virtio_crypto *vcrypto, struct scatterlist *sgs[], in virtio_crypto_ctrl_vq_request() argument 61 err = virtqueue_add_sgs(vcrypto->ctrl_vq, sgs, out_sgs, in_sgs, vc_ctrl_req, GFP_ATOMIC); in virtio_crypto_ctrl_vq_request() 483 kfree(vc_req->sgs); in virtcrypto_free_unused_reqs()
|
/drivers/nvdimm/ |
D | nd_virtio.c | 43 struct scatterlist *sgs[2], sg, ret; in virtio_pmem_flush() local 58 sgs[0] = &sg; in virtio_pmem_flush() 60 sgs[1] = &ret; in virtio_pmem_flush() 69 while ((err = virtqueue_add_sgs(vpmem->req_vq, sgs, 1, 1, req_data, in virtio_pmem_flush()
|
/drivers/gpu/drm/virtio/ |
D | virtgpu_vq.c | 321 struct scatterlist **sgs, in virtio_gpu_queue_ctrl_sgs() argument 360 ret = virtqueue_add_sgs(vq, sgs, outcnt, incnt, vbuf, GFP_ATOMIC); in virtio_gpu_queue_ctrl_sgs() 377 struct scatterlist *sgs[3], vcmd, vout, vresp; in virtio_gpu_queue_fenced_ctrl_buffer() local 384 sgs[outcnt] = &vcmd; in virtio_gpu_queue_fenced_ctrl_buffer() 400 sgs[outcnt] = sgt->sgl; in virtio_gpu_queue_fenced_ctrl_buffer() 404 sgs[outcnt] = &vout; in virtio_gpu_queue_fenced_ctrl_buffer() 413 sgs[outcnt + incnt] = &vresp; in virtio_gpu_queue_fenced_ctrl_buffer() 417 ret = virtio_gpu_queue_ctrl_sgs(vgdev, vbuf, fence, elemcnt, sgs, outcnt, in virtio_gpu_queue_fenced_ctrl_buffer() 453 struct scatterlist *sgs[1], ccmd; in virtio_gpu_queue_cursor() local 463 sgs[0] = &ccmd; in virtio_gpu_queue_cursor() [all …]
|
/drivers/scsi/ |
D | virtio_scsi.c | 423 struct scatterlist *sgs[6], req, resp; in __virtscsi_add_cmd() local 438 sgs[out_num++] = &req; in __virtscsi_add_cmd() 444 sgs[out_num++] = scsi_prot_sglist(sc); in __virtscsi_add_cmd() 445 sgs[out_num++] = out->sgl; in __virtscsi_add_cmd() 450 sgs[out_num + in_num++] = &resp; in __virtscsi_add_cmd() 456 sgs[out_num + in_num++] = scsi_prot_sglist(sc); in __virtscsi_add_cmd() 457 sgs[out_num + in_num++] = in->sgl; in __virtscsi_add_cmd() 460 return virtqueue_add_sgs(vq, sgs, out_num, in_num, cmd, GFP_ATOMIC); in __virtscsi_add_cmd()
|
/drivers/virtio/ |
D | virtio_ring.c | 416 struct scatterlist *sgs[], in virtqueue_add_split() argument 482 for (sg = sgs[n]; sg; sg = sg_next(sg)) { in virtqueue_add_split() 495 for (sg = sgs[n]; sg; sg = sg_next(sg)) { in virtqueue_add_split() 980 struct scatterlist *sgs[], in virtqueue_add_indirect_packed() argument 1010 for (sg = sgs[n]; sg; sg = sg_next(sg)) { in virtqueue_add_indirect_packed() 1094 struct scatterlist *sgs[], in virtqueue_add_packed() argument 1125 err = virtqueue_add_indirect_packed(vq, sgs, total_sg, out_sgs, in virtqueue_add_packed() 1157 for (sg = sgs[n]; sg; sg = sg_next(sg)) { in virtqueue_add_packed() 1704 struct scatterlist *sgs[], in virtqueue_add() argument 1714 return vq->packed_ring ? virtqueue_add_packed(_vq, sgs, total_sg, in virtqueue_add() [all …]
|
D | virtio_mem.c | 837 struct scatterlist *sgs[2], sg_req, sg_resp; in virtio_mem_send_request() local 846 sgs[0] = &sg_req; in virtio_mem_send_request() 850 sgs[1] = &sg_resp; in virtio_mem_send_request() 852 rc = virtqueue_add_sgs(vm->vq, sgs, 1, 1, vm, GFP_KERNEL); in virtio_mem_send_request()
|
/drivers/spi/ |
D | spi-mxs.c | 171 const int sgs = DIV_ROUND_UP(len, desc_len); in mxs_spi_txrx_dma() local 184 dma_xfer = kcalloc(sgs, sizeof(*dma_xfer), GFP_KERNEL); in mxs_spi_txrx_dma() 200 for (sg_count = 0; sg_count < sgs; sg_count++) { in mxs_spi_txrx_dma() 208 if ((sg_count + 1 == sgs) && (flags & TXRX_DEASSERT_CS)) in mxs_spi_txrx_dma()
|
D | spi.c | 876 int sgs; in spi_map_buf() local 885 sgs = DIV_ROUND_UP(len + offset_in_page(buf), desc_len); in spi_map_buf() 888 sgs = DIV_ROUND_UP(len, desc_len); in spi_map_buf() 893 ret = sg_alloc_table(sgt, sgs, GFP_KERNEL); in spi_map_buf() 898 for (i = 0; i < sgs; i++) { in spi_map_buf()
|
/drivers/block/ |
D | virtio_blk.c | 95 struct scatterlist hdr, status, *sgs[3]; in virtblk_add_req() local 99 sgs[num_out++] = &hdr; in virtblk_add_req() 103 sgs[num_out++] = data_sg; in virtblk_add_req() 105 sgs[num_out + num_in++] = data_sg; in virtblk_add_req() 109 sgs[num_out + num_in++] = &status; in virtblk_add_req() 111 return virtqueue_add_sgs(vq, sgs, num_out, num_in, vbr, GFP_ATOMIC); in virtblk_add_req()
|
/drivers/net/ |
D | virtio_net.c | 1758 struct scatterlist *sgs[4], hdr, stat; in virtnet_send_command() local 1769 sgs[out_num++] = &hdr; in virtnet_send_command() 1772 sgs[out_num++] = out; in virtnet_send_command() 1776 sgs[out_num] = &stat; in virtnet_send_command() 1778 BUG_ON(out_num + 1 > ARRAY_SIZE(sgs)); in virtnet_send_command() 1779 virtqueue_add_sgs(vi->cvq, sgs, out_num, 1, vi, GFP_ATOMIC); in virtnet_send_command()
|
/drivers/nvme/host/ |
D | core.c | 1929 if (ctrl->nr_streams && ns->sws && ns->sgs) in nvme_config_discard() 1930 size *= ns->sws * ns->sgs; in nvme_config_discard() 1992 ns->sgs = le16_to_cpu(s.sgs); in nvme_setup_streams_ns() 1996 if (ns->sgs) in nvme_setup_streams_ns() 1997 *io_opt = *phys_bs * ns->sgs; in nvme_setup_streams_ns()
|
D | nvme.h | 454 u16 sgs; member
|