Home
last modified time | relevance | path

Searched full:req (Results 1 – 25 of 3357) sorted by relevance

12345678910>>...135

/kernel/linux/linux-5.10/drivers/staging/greybus/
Daudio_apbridgea.c16 struct audio_apbridgea_set_config_request req; in gb_audio_apbridgea_set_config() local
18 req.hdr.type = AUDIO_APBRIDGEA_TYPE_SET_CONFIG; in gb_audio_apbridgea_set_config()
19 req.hdr.i2s_port = cpu_to_le16(i2s_port); in gb_audio_apbridgea_set_config()
20 req.format = cpu_to_le32(format); in gb_audio_apbridgea_set_config()
21 req.rate = cpu_to_le32(rate); in gb_audio_apbridgea_set_config()
22 req.mclk_freq = cpu_to_le32(mclk_freq); in gb_audio_apbridgea_set_config()
24 return gb_hd_output(connection->hd, &req, sizeof(req), in gb_audio_apbridgea_set_config()
33 struct audio_apbridgea_register_cport_request req; in gb_audio_apbridgea_register_cport() local
36 req.hdr.type = AUDIO_APBRIDGEA_TYPE_REGISTER_CPORT; in gb_audio_apbridgea_register_cport()
37 req.hdr.i2s_port = cpu_to_le16(i2s_port); in gb_audio_apbridgea_register_cport()
[all …]
Daudio_gb.c50 struct gb_audio_get_control_request req; in gb_audio_gb_get_control() local
54 req.control_id = control_id; in gb_audio_gb_get_control()
55 req.index = index; in gb_audio_gb_get_control()
58 &req, sizeof(req), &resp, sizeof(resp)); in gb_audio_gb_get_control()
72 struct gb_audio_set_control_request req; in gb_audio_gb_set_control() local
74 req.control_id = control_id; in gb_audio_gb_set_control()
75 req.index = index; in gb_audio_gb_set_control()
76 memcpy(&req.value, value, sizeof(req.value)); in gb_audio_gb_set_control()
79 &req, sizeof(req), NULL, 0); in gb_audio_gb_set_control()
86 struct gb_audio_enable_widget_request req; in gb_audio_gb_enable_widget() local
[all …]
/kernel/linux/linux-4.19/drivers/staging/greybus/
Daudio_apbridgea.c17 struct audio_apbridgea_set_config_request req; in gb_audio_apbridgea_set_config() local
19 req.hdr.type = AUDIO_APBRIDGEA_TYPE_SET_CONFIG; in gb_audio_apbridgea_set_config()
20 req.hdr.i2s_port = cpu_to_le16(i2s_port); in gb_audio_apbridgea_set_config()
21 req.format = cpu_to_le32(format); in gb_audio_apbridgea_set_config()
22 req.rate = cpu_to_le32(rate); in gb_audio_apbridgea_set_config()
23 req.mclk_freq = cpu_to_le32(mclk_freq); in gb_audio_apbridgea_set_config()
25 return gb_hd_output(connection->hd, &req, sizeof(req), in gb_audio_apbridgea_set_config()
34 struct audio_apbridgea_register_cport_request req; in gb_audio_apbridgea_register_cport() local
37 req.hdr.type = AUDIO_APBRIDGEA_TYPE_REGISTER_CPORT; in gb_audio_apbridgea_register_cport()
38 req.hdr.i2s_port = cpu_to_le16(i2s_port); in gb_audio_apbridgea_register_cport()
[all …]
Daudio_gb.c52 struct gb_audio_get_control_request req; in gb_audio_gb_get_control() local
56 req.control_id = control_id; in gb_audio_gb_get_control()
57 req.index = index; in gb_audio_gb_get_control()
60 &req, sizeof(req), &resp, sizeof(resp)); in gb_audio_gb_get_control()
74 struct gb_audio_set_control_request req; in gb_audio_gb_set_control() local
76 req.control_id = control_id; in gb_audio_gb_set_control()
77 req.index = index; in gb_audio_gb_set_control()
78 memcpy(&req.value, value, sizeof(req.value)); in gb_audio_gb_set_control()
81 &req, sizeof(req), NULL, 0); in gb_audio_gb_set_control()
88 struct gb_audio_enable_widget_request req; in gb_audio_gb_enable_widget() local
[all …]
/kernel/linux/linux-4.19/drivers/s390/scsi/
Dzfcp_fsf.c72 static void zfcp_fsf_class_not_supp(struct zfcp_fsf_req *req) in zfcp_fsf_class_not_supp() argument
74 dev_err(&req->adapter->ccw_device->dev, "FCP device not " in zfcp_fsf_class_not_supp()
76 zfcp_erp_adapter_shutdown(req->adapter, 0, "fscns_1"); in zfcp_fsf_class_not_supp()
77 req->status |= ZFCP_STATUS_FSFREQ_ERROR; in zfcp_fsf_class_not_supp()
84 void zfcp_fsf_req_free(struct zfcp_fsf_req *req) in zfcp_fsf_req_free() argument
86 if (likely(req->pool)) { in zfcp_fsf_req_free()
87 if (likely(req->qtcb)) in zfcp_fsf_req_free()
88 mempool_free(req->qtcb, req->adapter->pool.qtcb_pool); in zfcp_fsf_req_free()
89 mempool_free(req, req->pool); in zfcp_fsf_req_free()
93 if (likely(req->qtcb)) in zfcp_fsf_req_free()
[all …]
/kernel/linux/linux-5.10/drivers/nvme/target/
Dio-cmd-file.c101 static ssize_t nvmet_file_submit_bvec(struct nvmet_req *req, loff_t pos, in nvmet_file_submit_bvec() argument
104 struct kiocb *iocb = &req->f.iocb; in nvmet_file_submit_bvec()
109 if (req->cmd->rw.opcode == nvme_cmd_write) { in nvmet_file_submit_bvec()
110 if (req->cmd->rw.control & cpu_to_le16(NVME_RW_FUA)) in nvmet_file_submit_bvec()
112 call_iter = req->ns->file->f_op->write_iter; in nvmet_file_submit_bvec()
115 call_iter = req->ns->file->f_op->read_iter; in nvmet_file_submit_bvec()
119 iov_iter_bvec(&iter, rw, req->f.bvec, nr_segs, count); in nvmet_file_submit_bvec()
122 iocb->ki_filp = req->ns->file; in nvmet_file_submit_bvec()
123 iocb->ki_flags = ki_flags | iocb_flags(req->ns->file); in nvmet_file_submit_bvec()
130 struct nvmet_req *req = container_of(iocb, struct nvmet_req, f.iocb); in nvmet_file_io_done() local
[all …]
Dadmin-cmd.c28 static u32 nvmet_feat_data_len(struct nvmet_req *req, u32 cdw10) in nvmet_feat_data_len() argument
32 return sizeof(req->sq->ctrl->hostid); in nvmet_feat_data_len()
43 static void nvmet_execute_get_log_page_noop(struct nvmet_req *req) in nvmet_execute_get_log_page_noop() argument
45 nvmet_req_complete(req, nvmet_zero_sgl(req, 0, req->transfer_len)); in nvmet_execute_get_log_page_noop()
48 static void nvmet_execute_get_log_page_error(struct nvmet_req *req) in nvmet_execute_get_log_page_error() argument
50 struct nvmet_ctrl *ctrl = req->sq->ctrl; in nvmet_execute_get_log_page_error()
60 if (nvmet_copy_to_sgl(req, offset, &ctrl->slots[slot], in nvmet_execute_get_log_page_error()
71 nvmet_req_complete(req, 0); in nvmet_execute_get_log_page_error()
74 static u16 nvmet_get_smart_log_nsid(struct nvmet_req *req, in nvmet_get_smart_log_nsid() argument
80 ns = nvmet_find_namespace(req->sq->ctrl, req->cmd->get_log_page.nsid); in nvmet_get_smart_log_nsid()
[all …]
Dio-cmd-bdev.c105 static u16 blk_to_nvme_status(struct nvmet_req *req, blk_status_t blk_sts) in blk_to_nvme_status() argument
120 req->error_loc = offsetof(struct nvme_rw_command, length); in blk_to_nvme_status()
124 req->error_loc = offsetof(struct nvme_rw_command, slba); in blk_to_nvme_status()
127 req->error_loc = offsetof(struct nvme_common_command, opcode); in blk_to_nvme_status()
128 switch (req->cmd->common.opcode) { in blk_to_nvme_status()
139 req->error_loc = offsetof(struct nvme_rw_command, nsid); in blk_to_nvme_status()
144 req->error_loc = offsetof(struct nvme_common_command, opcode); in blk_to_nvme_status()
147 switch (req->cmd->common.opcode) { in blk_to_nvme_status()
150 req->error_slba = le64_to_cpu(req->cmd->rw.slba); in blk_to_nvme_status()
153 req->error_slba = in blk_to_nvme_status()
[all …]
Dfabrics-cmd.c10 static void nvmet_execute_prop_set(struct nvmet_req *req) in nvmet_execute_prop_set() argument
12 u64 val = le64_to_cpu(req->cmd->prop_set.value); in nvmet_execute_prop_set()
15 if (!nvmet_check_transfer_len(req, 0)) in nvmet_execute_prop_set()
18 if (req->cmd->prop_set.attrib & 1) { in nvmet_execute_prop_set()
19 req->error_loc = in nvmet_execute_prop_set()
25 switch (le32_to_cpu(req->cmd->prop_set.offset)) { in nvmet_execute_prop_set()
27 nvmet_update_cc(req->sq->ctrl, val); in nvmet_execute_prop_set()
30 req->error_loc = in nvmet_execute_prop_set()
35 nvmet_req_complete(req, status); in nvmet_execute_prop_set()
38 static void nvmet_execute_prop_get(struct nvmet_req *req) in nvmet_execute_prop_get() argument
[all …]
/kernel/linux/linux-5.10/drivers/media/mc/
Dmc-request.c39 static void media_request_clean(struct media_request *req) in media_request_clean() argument
44 WARN_ON(req->state != MEDIA_REQUEST_STATE_CLEANING); in media_request_clean()
45 WARN_ON(req->updating_count); in media_request_clean()
46 WARN_ON(req->access_count); in media_request_clean()
48 list_for_each_entry_safe(obj, obj_safe, &req->objects, list) { in media_request_clean()
53 req->updating_count = 0; in media_request_clean()
54 req->access_count = 0; in media_request_clean()
55 WARN_ON(req->num_incomplete_objects); in media_request_clean()
56 req->num_incomplete_objects = 0; in media_request_clean()
57 wake_up_interruptible_all(&req->poll_wait); in media_request_clean()
[all …]
/kernel/linux/linux-5.10/drivers/s390/scsi/
Dzfcp_fsf.c80 static void zfcp_fsf_class_not_supp(struct zfcp_fsf_req *req) in zfcp_fsf_class_not_supp() argument
82 dev_err(&req->adapter->ccw_device->dev, "FCP device not " in zfcp_fsf_class_not_supp()
84 zfcp_erp_adapter_shutdown(req->adapter, 0, "fscns_1"); in zfcp_fsf_class_not_supp()
85 req->status |= ZFCP_STATUS_FSFREQ_ERROR; in zfcp_fsf_class_not_supp()
90 * @req: pointer to struct zfcp_fsf_req
92 void zfcp_fsf_req_free(struct zfcp_fsf_req *req) in zfcp_fsf_req_free() argument
94 if (likely(req->pool)) { in zfcp_fsf_req_free()
95 if (likely(!zfcp_fsf_req_is_status_read_buffer(req))) in zfcp_fsf_req_free()
96 mempool_free(req->qtcb, req->adapter->pool.qtcb_pool); in zfcp_fsf_req_free()
97 mempool_free(req, req->pool); in zfcp_fsf_req_free()
[all …]
/kernel/linux/linux-4.19/drivers/nvme/target/
Dio-cmd-file.c85 static ssize_t nvmet_file_submit_bvec(struct nvmet_req *req, loff_t pos, in nvmet_file_submit_bvec() argument
88 struct kiocb *iocb = &req->f.iocb; in nvmet_file_submit_bvec()
94 if (req->cmd->rw.opcode == nvme_cmd_write) { in nvmet_file_submit_bvec()
95 if (req->cmd->rw.control & cpu_to_le16(NVME_RW_FUA)) in nvmet_file_submit_bvec()
97 call_iter = req->ns->file->f_op->write_iter; in nvmet_file_submit_bvec()
100 call_iter = req->ns->file->f_op->read_iter; in nvmet_file_submit_bvec()
104 iov_iter_bvec(&iter, ITER_BVEC | rw, req->f.bvec, nr_segs, count); in nvmet_file_submit_bvec()
107 iocb->ki_filp = req->ns->file; in nvmet_file_submit_bvec()
108 iocb->ki_flags = ki_flags | iocb_flags(req->ns->file); in nvmet_file_submit_bvec()
120 struct nvmet_req *req = container_of(iocb, struct nvmet_req, f.iocb); in nvmet_file_io_done() local
[all …]
Dadmin-cmd.c27 static inline void nvmet_clear_aen(struct nvmet_req *req, u32 aen_bit) in nvmet_clear_aen() argument
29 int rae = le32_to_cpu(req->cmd->common.cdw10[0]) & 1 << 15; in nvmet_clear_aen()
32 clear_bit(aen_bit, &req->sq->ctrl->aen_masked); in nvmet_clear_aen()
48 static void nvmet_execute_get_log_page_noop(struct nvmet_req *req) in nvmet_execute_get_log_page_noop() argument
50 nvmet_req_complete(req, nvmet_zero_sgl(req, 0, req->data_len)); in nvmet_execute_get_log_page_noop()
53 static u16 nvmet_get_smart_log_nsid(struct nvmet_req *req, in nvmet_get_smart_log_nsid() argument
59 ns = nvmet_find_namespace(req->sq->ctrl, req->cmd->get_log_page.nsid); in nvmet_get_smart_log_nsid()
62 le32_to_cpu(req->cmd->get_log_page.nsid)); in nvmet_get_smart_log_nsid()
87 static u16 nvmet_get_smart_log_all(struct nvmet_req *req, in nvmet_get_smart_log_all() argument
95 ctrl = req->sq->ctrl; in nvmet_get_smart_log_all()
[all …]
Dio-cmd-bdev.c49 struct nvmet_req *req = bio->bi_private; in nvmet_bio_done() local
51 nvmet_req_complete(req, in nvmet_bio_done()
54 if (bio != &req->b.inline_bio) in nvmet_bio_done()
58 static void nvmet_bdev_execute_rw(struct nvmet_req *req) in nvmet_bdev_execute_rw() argument
60 int sg_cnt = req->sg_cnt; in nvmet_bdev_execute_rw()
61 struct bio *bio = &req->b.inline_bio; in nvmet_bdev_execute_rw()
67 if (!req->sg_cnt) { in nvmet_bdev_execute_rw()
68 nvmet_req_complete(req, 0); in nvmet_bdev_execute_rw()
72 if (req->cmd->rw.opcode == nvme_cmd_write) { in nvmet_bdev_execute_rw()
75 if (req->cmd->rw.control & cpu_to_le16(NVME_RW_FUA)) in nvmet_bdev_execute_rw()
[all …]
/kernel/linux/linux-5.10/crypto/
Dchacha20poly1305.c42 struct ahash_request req; /* must be last member */ member
48 struct skcipher_request req; /* must be last member */ member
70 static inline void async_done_continue(struct aead_request *req, int err, in async_done_continue() argument
74 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in async_done_continue()
77 err = cont(req); in async_done_continue()
81 aead_request_complete(req, err); in async_done_continue()
84 static void chacha_iv(u8 *iv, struct aead_request *req, u32 icb) in chacha_iv() argument
86 struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); in chacha_iv()
91 memcpy(iv + sizeof(leicb) + ctx->saltlen, req->iv, in chacha_iv()
95 static int poly_verify_tag(struct aead_request *req) in poly_verify_tag() argument
[all …]
Dseqiv.c21 static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err) in seqiv_aead_encrypt_complete2() argument
23 struct aead_request *subreq = aead_request_ctx(req); in seqiv_aead_encrypt_complete2()
32 geniv = crypto_aead_reqtfm(req); in seqiv_aead_encrypt_complete2()
33 memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv)); in seqiv_aead_encrypt_complete2()
42 struct aead_request *req = base->data; in seqiv_aead_encrypt_complete() local
44 seqiv_aead_encrypt_complete2(req, err); in seqiv_aead_encrypt_complete()
45 aead_request_complete(req, err); in seqiv_aead_encrypt_complete()
48 static int seqiv_aead_encrypt(struct aead_request *req) in seqiv_aead_encrypt() argument
50 struct crypto_aead *geniv = crypto_aead_reqtfm(req); in seqiv_aead_encrypt()
52 struct aead_request *subreq = aead_request_ctx(req); in seqiv_aead_encrypt()
[all …]
Dgcm.c60 int (*complete)(struct aead_request *req, u32 flags);
82 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc);
85 struct aead_request *req) in crypto_gcm_reqctx() argument
87 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); in crypto_gcm_reqctx()
89 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); in crypto_gcm_reqctx()
105 struct skcipher_request req; in crypto_gcm_setkey() member
123 skcipher_request_set_tfm(&data->req, ctr); in crypto_gcm_setkey()
124 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP | in crypto_gcm_setkey()
128 skcipher_request_set_crypt(&data->req, data->sg, data->sg, in crypto_gcm_setkey()
131 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req), in crypto_gcm_setkey()
[all …]
/kernel/linux/linux-4.19/drivers/block/drbd/
Ddrbd_req.c37 static void _drbd_start_io_acct(struct drbd_device *device, struct drbd_request *req) in _drbd_start_io_acct() argument
41 generic_start_io_acct(q, bio_op(req->master_bio), in _drbd_start_io_acct()
42 req->i.size >> 9, &device->vdisk->part0); in _drbd_start_io_acct()
46 static void _drbd_end_io_acct(struct drbd_device *device, struct drbd_request *req) in _drbd_end_io_acct() argument
50 generic_end_io_acct(q, bio_op(req->master_bio), in _drbd_end_io_acct()
51 &device->vdisk->part0, req->start_jif); in _drbd_end_io_acct()
56 struct drbd_request *req; in drbd_req_new() local
58 req = mempool_alloc(&drbd_request_mempool, GFP_NOIO); in drbd_req_new()
59 if (!req) in drbd_req_new()
61 memset(req, 0, sizeof(*req)); in drbd_req_new()
[all …]
/kernel/linux/linux-5.10/drivers/crypto/inside-secure/
Dsafexcel_hash.c68 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req) in safexcel_queued_len() argument
70 return req->len - req->processed; in safexcel_queued_len()
107 struct safexcel_ahash_req *req, in safexcel_context_control() argument
121 if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) { in safexcel_context_control()
122 if (req->xcbcmac) in safexcel_context_control()
125 memcpy(ctx->base.ctxr->data, req->state, req->state_sz); in safexcel_context_control()
127 if (!req->finish && req->xcbcmac) in safexcel_context_control()
132 CONTEXT_CONTROL_SIZE(req->state_sz / in safexcel_context_control()
138 CONTEXT_CONTROL_SIZE(req->state_sz / in safexcel_context_control()
141 } else if (!req->processed) { in safexcel_context_control()
[all …]
/kernel/linux/linux-4.19/crypto/
Dchacha20poly1305.c50 struct ahash_request req; /* must be last member */ member
56 struct skcipher_request req; /* must be last member */ member
78 static inline void async_done_continue(struct aead_request *req, int err, in async_done_continue() argument
82 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in async_done_continue()
85 err = cont(req); in async_done_continue()
89 aead_request_complete(req, err); in async_done_continue()
92 static void chacha_iv(u8 *iv, struct aead_request *req, u32 icb) in chacha_iv() argument
94 struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); in chacha_iv()
99 memcpy(iv + sizeof(leicb) + ctx->saltlen, req->iv, in chacha_iv()
103 static int poly_verify_tag(struct aead_request *req) in poly_verify_tag() argument
[all …]
/kernel/linux/linux-5.10/drivers/block/drbd/
Ddrbd_req.c26 struct drbd_request *req; in drbd_req_new() local
28 req = mempool_alloc(&drbd_request_mempool, GFP_NOIO); in drbd_req_new()
29 if (!req) in drbd_req_new()
31 memset(req, 0, sizeof(*req)); in drbd_req_new()
33 drbd_req_make_private_bio(req, bio_src); in drbd_req_new()
34 req->rq_state = (bio_data_dir(bio_src) == WRITE ? RQ_WRITE : 0) in drbd_req_new()
38 req->device = device; in drbd_req_new()
39 req->master_bio = bio_src; in drbd_req_new()
40 req->epoch = 0; in drbd_req_new()
42 drbd_clear_interval(&req->i); in drbd_req_new()
[all …]
/kernel/linux/linux-4.19/drivers/crypto/amcc/
Dcrypto4xx_alg.c78 static inline int crypto4xx_crypt(struct skcipher_request *req, in crypto4xx_crypt() argument
82 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req); in crypto4xx_crypt()
86 if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE)) in crypto4xx_crypt()
90 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen); in crypto4xx_crypt()
92 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, in crypto4xx_crypt()
93 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out, in crypto4xx_crypt()
97 int crypto4xx_encrypt_noiv_block(struct skcipher_request *req) in crypto4xx_encrypt_noiv_block() argument
99 return crypto4xx_crypt(req, 0, false, true); in crypto4xx_encrypt_noiv_block()
102 int crypto4xx_encrypt_iv_stream(struct skcipher_request *req) in crypto4xx_encrypt_iv_stream() argument
104 return crypto4xx_crypt(req, AES_IV_SIZE, false, false); in crypto4xx_encrypt_iv_stream()
[all …]
/kernel/linux/linux-4.19/drivers/infiniband/hw/hfi1/
Duser_sdma.c79 static int user_sdma_send_pkts(struct user_sdma_request *req,
83 static void user_sdma_free_request(struct user_sdma_request *req, bool unpin);
84 static int pin_vector_pages(struct user_sdma_request *req,
88 static int check_header_template(struct user_sdma_request *req,
91 static int set_txreq_header(struct user_sdma_request *req,
93 static int set_txreq_header_ahg(struct user_sdma_request *req,
337 struct user_sdma_request *req; in hfi1_user_sdma_process_request() local
344 if (iovec[idx].iov_len < sizeof(info) + sizeof(req->hdr)) { in hfi1_user_sdma_process_request()
349 iovec[idx].iov_len, sizeof(info) + sizeof(req->hdr)); in hfi1_user_sdma_process_request()
399 req = pq->reqs + info.comp_idx; in hfi1_user_sdma_process_request()
[all …]
/kernel/linux/linux-5.10/drivers/crypto/marvell/cesa/
Dcipher.c42 struct skcipher_request *req) in mv_cesa_skcipher_req_iter_init() argument
44 mv_cesa_req_dma_iter_init(&iter->base, req->cryptlen); in mv_cesa_skcipher_req_iter_init()
45 mv_cesa_sg_dma_iter_init(&iter->src, req->src, DMA_TO_DEVICE); in mv_cesa_skcipher_req_iter_init()
46 mv_cesa_sg_dma_iter_init(&iter->dst, req->dst, DMA_FROM_DEVICE); in mv_cesa_skcipher_req_iter_init()
59 mv_cesa_skcipher_dma_cleanup(struct skcipher_request *req) in mv_cesa_skcipher_dma_cleanup() argument
61 struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); in mv_cesa_skcipher_dma_cleanup()
63 if (req->dst != req->src) { in mv_cesa_skcipher_dma_cleanup()
64 dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents, in mv_cesa_skcipher_dma_cleanup()
66 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
69 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
[all …]
/kernel/linux/linux-5.10/drivers/infiniband/hw/hfi1/
Duser_sdma.c80 static int user_sdma_send_pkts(struct user_sdma_request *req, u16 maxpkts);
83 static void user_sdma_free_request(struct user_sdma_request *req, bool unpin);
84 static int pin_vector_pages(struct user_sdma_request *req,
88 static int check_header_template(struct user_sdma_request *req,
91 static int set_txreq_header(struct user_sdma_request *req,
93 static int set_txreq_header_ahg(struct user_sdma_request *req,
355 struct user_sdma_request *req; in hfi1_user_sdma_process_request() local
362 if (iovec[idx].iov_len < sizeof(info) + sizeof(req->hdr)) { in hfi1_user_sdma_process_request()
367 iovec[idx].iov_len, sizeof(info) + sizeof(req->hdr)); in hfi1_user_sdma_process_request()
417 req = pq->reqs + info.comp_idx; in hfi1_user_sdma_process_request()
[all …]

12345678910>>...135