Home
last modified time | relevance | path

Searched refs:sreq (Results 1 – 11 of 11) sorted by relevance

/drivers/scsi/
Dscsi_bsg.c15 struct scsi_request *sreq; in scsi_bsg_sg_io_fn() local
35 sreq = scsi_req(rq); in scsi_bsg_sg_io_fn()
36 sreq->cmd_len = hdr->request_len; in scsi_bsg_sg_io_fn()
37 if (sreq->cmd_len > BLK_MAX_CDB) { in scsi_bsg_sg_io_fn()
38 sreq->cmd = kzalloc(sreq->cmd_len, GFP_KERNEL); in scsi_bsg_sg_io_fn()
39 if (!sreq->cmd) in scsi_bsg_sg_io_fn()
44 if (copy_from_user(sreq->cmd, uptr64(hdr->request), sreq->cmd_len)) in scsi_bsg_sg_io_fn()
47 if (!scsi_cmd_allowed(sreq->cmd, mode)) in scsi_bsg_sg_io_fn()
68 hdr->device_status = sreq->result & 0xff; in scsi_bsg_sg_io_fn()
69 hdr->transport_status = host_byte(sreq->result); in scsi_bsg_sg_io_fn()
[all …]
/drivers/crypto/marvell/cesa/
Dcipher.c86 struct mv_cesa_skcipher_std_req *sreq = &creq->std; in mv_cesa_skcipher_std_step() local
88 size_t len = min_t(size_t, req->cryptlen - sreq->offset, in mv_cesa_skcipher_std_step()
91 mv_cesa_adjust_op(engine, &sreq->op); in mv_cesa_skcipher_std_step()
93 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
95 memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
99 sreq->offset); in mv_cesa_skcipher_std_step()
101 sreq->size = len; in mv_cesa_skcipher_std_step()
102 mv_cesa_set_crypt_op_len(&sreq->op, len); in mv_cesa_skcipher_std_step()
105 if (!sreq->skip_ctx) { in mv_cesa_skcipher_std_step()
107 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
[all …]
Dhash.c161 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_step() local
178 if (!sreq->offset) { in mv_cesa_ahash_std_step()
194 len = min_t(size_t, req->nbytes + creq->cache_ptr - sreq->offset, in mv_cesa_ahash_std_step()
203 sreq->offset += mv_cesa_sg_copy_to_sram( in mv_cesa_ahash_std_step()
206 len - creq->cache_ptr, sreq->offset); in mv_cesa_ahash_std_step()
212 if (creq->last_req && sreq->offset == req->nbytes && in mv_cesa_ahash_std_step()
287 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_process() local
289 if (sreq->offset < (req->nbytes - creq->cache_ptr)) in mv_cesa_ahash_std_process()
306 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_prepare() local
308 sreq->offset = 0; in mv_cesa_ahash_std_prepare()
/drivers/crypto/inside-secure/
Dsafexcel_cipher.c508 struct safexcel_cipher_req *sreq, in safexcel_context_control() argument
530 (sreq->direction == SAFEXCEL_ENCRYPT ? in safexcel_context_control()
545 if (sreq->direction == SAFEXCEL_ENCRYPT && in safexcel_context_control()
550 else if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
560 if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
613 struct safexcel_cipher_req *sreq, in safexcel_handle_req_result() argument
624 if (unlikely(!sreq->rdescs)) in safexcel_handle_req_result()
627 while (sreq->rdescs--) { in safexcel_handle_req_result()
645 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); in safexcel_handle_req_result()
647 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); in safexcel_handle_req_result()
[all …]
Dsafexcel_hash.c234 struct safexcel_ahash_req *sreq = ahash_request_ctx(areq); in safexcel_handle_req_result() local
251 if (sreq->nents) { in safexcel_handle_req_result()
252 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE); in safexcel_handle_req_result()
253 sreq->nents = 0; in safexcel_handle_req_result()
256 if (sreq->result_dma) { in safexcel_handle_req_result()
257 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz, in safexcel_handle_req_result()
259 sreq->result_dma = 0; in safexcel_handle_req_result()
262 if (sreq->cache_dma) { in safexcel_handle_req_result()
263 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz, in safexcel_handle_req_result()
265 sreq->cache_dma = 0; in safexcel_handle_req_result()
[all …]
/drivers/net/wireless/mediatek/mt76/
Dmt76_connac_mcu.c1403 struct cfg80211_scan_request *sreq = &scan_req->req; in mt76_connac_mcu_hw_scan() local
1405 int ext_channels_num = max_t(int, sreq->n_channels - 32, 0); in mt76_connac_mcu_hw_scan()
1406 struct ieee80211_channel **scan_list = sreq->channels; in mt76_connac_mcu_hw_scan()
1424 req->scan_type = sreq->n_ssids ? 1 : 0; in mt76_connac_mcu_hw_scan()
1425 req->probe_req_num = sreq->n_ssids ? 2 : 0; in mt76_connac_mcu_hw_scan()
1428 for (i = 0; i < sreq->n_ssids; i++) { in mt76_connac_mcu_hw_scan()
1429 if (!sreq->ssids[i].ssid_len) in mt76_connac_mcu_hw_scan()
1432 req->ssids[i].ssid_len = cpu_to_le32(sreq->ssids[i].ssid_len); in mt76_connac_mcu_hw_scan()
1433 memcpy(req->ssids[i].ssid, sreq->ssids[i].ssid, in mt76_connac_mcu_hw_scan()
1434 sreq->ssids[i].ssid_len); in mt76_connac_mcu_hw_scan()
[all …]
Dmt76_connac_mcu.h1077 struct cfg80211_sched_scan_request *sreq);
/drivers/crypto/hisilicon/sec2/
Dsec_crypto.c1962 struct sec_req *sreq) in sec_skcipher_cryptlen_ckeck() argument
1964 u32 cryptlen = sreq->c_req.sk_req->cryptlen; in sec_skcipher_cryptlen_ckeck()
1998 static int sec_skcipher_param_check(struct sec_ctx *ctx, struct sec_req *sreq) in sec_skcipher_param_check() argument
2000 struct skcipher_request *sk_req = sreq->c_req.sk_req; in sec_skcipher_param_check()
2009 sreq->c_req.c_len = sk_req->cryptlen; in sec_skcipher_param_check()
2012 sreq->use_pbuf = true; in sec_skcipher_param_check()
2014 sreq->use_pbuf = false; in sec_skcipher_param_check()
2023 return sec_skcipher_cryptlen_ckeck(ctx, sreq); in sec_skcipher_param_check()
2032 struct skcipher_request *sreq, bool encrypt) in sec_skcipher_soft_crypto() argument
2048 skcipher_request_set_callback(subreq, sreq->base.flags, in sec_skcipher_soft_crypto()
[all …]
/drivers/crypto/marvell/octeontx/
Dotx_cptvf_algs.c147 struct skcipher_request *sreq; in output_iv_copyback() local
153 sreq = container_of(areq, struct skcipher_request, base); in output_iv_copyback()
154 stfm = crypto_skcipher_reqtfm(sreq); in output_iv_copyback()
158 rctx = skcipher_request_ctx(sreq); in output_iv_copyback()
161 start = sreq->cryptlen - ivsize; in output_iv_copyback()
164 scatterwalk_map_and_copy(sreq->iv, sreq->dst, start, in output_iv_copyback()
167 if (sreq->src != sreq->dst) { in output_iv_copyback()
168 scatterwalk_map_and_copy(sreq->iv, sreq->src, in output_iv_copyback()
171 memcpy(sreq->iv, req_info->iv_out, ivsize); in output_iv_copyback()
/drivers/crypto/marvell/octeontx2/
Dotx2_cptvf_algs.c130 struct skcipher_request *sreq; in output_iv_copyback() local
135 sreq = container_of(areq, struct skcipher_request, base); in output_iv_copyback()
136 stfm = crypto_skcipher_reqtfm(sreq); in output_iv_copyback()
140 rctx = skcipher_request_ctx(sreq); in output_iv_copyback()
143 start = sreq->cryptlen - ivsize; in output_iv_copyback()
146 scatterwalk_map_and_copy(sreq->iv, sreq->dst, start, in output_iv_copyback()
149 if (sreq->src != sreq->dst) { in output_iv_copyback()
150 scatterwalk_map_and_copy(sreq->iv, sreq->src, in output_iv_copyback()
153 memcpy(sreq->iv, req_info->iv_out, ivsize); in output_iv_copyback()
/drivers/crypto/qat/qat_common/
Dqat_algs.c866 struct skcipher_request *sreq = qat_req->skcipher_req; in qat_alg_update_iv_ctr_mode() local
871 memcpy(qat_req->iv, sreq->iv, AES_BLOCK_SIZE); in qat_alg_update_iv_ctr_mode()
877 iv_lo += DIV_ROUND_UP(sreq->cryptlen, AES_BLOCK_SIZE); in qat_alg_update_iv_ctr_mode()
887 struct skcipher_request *sreq = qat_req->skcipher_req; in qat_alg_update_iv_cbc_mode() local
888 int offset = sreq->cryptlen - AES_BLOCK_SIZE; in qat_alg_update_iv_cbc_mode()
892 sgl = sreq->dst; in qat_alg_update_iv_cbc_mode()
894 sgl = sreq->src; in qat_alg_update_iv_cbc_mode()
924 struct skcipher_request *sreq = qat_req->skcipher_req; in qat_skcipher_alg_callback() local
935 memcpy(sreq->iv, qat_req->iv, AES_BLOCK_SIZE); in qat_skcipher_alg_callback()
937 sreq->base.complete(&sreq->base, res); in qat_skcipher_alg_callback()