• Home
  • Raw
  • Download

Lines Matching +full:sl3516 +full:- +full:crypto

1 // SPDX-License-Identifier: GPL-2.0
3 * sl3516-ce-cipher.c - hardware cryptographic offloader for Storlink SL3516 SoC
11 #include <crypto/engine.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/dma-mapping.h>
22 #include "sl3516-ce.h"
24 /* sl3516_ce_need_fallback - check if a request can be handled by the CE */
29 struct sl3516_ce_dev *ce = op->ce; in sl3516_ce_need_fallback()
34 if (areq->cryptlen == 0 || areq->cryptlen % 16) { in sl3516_ce_need_fallback()
35 ce->fallback_mod16++; in sl3516_ce_need_fallback()
43 if (sg_nents(areq->src) > MAXDESC / 2) { in sl3516_ce_need_fallback()
44 ce->fallback_sg_count_tx++; in sl3516_ce_need_fallback()
48 if (sg_nents(areq->dst) > MAXDESC) { in sl3516_ce_need_fallback()
49 ce->fallback_sg_count_rx++; in sl3516_ce_need_fallback()
53 sg = areq->src; in sl3516_ce_need_fallback()
55 if ((sg->length % 16) != 0) { in sl3516_ce_need_fallback()
56 ce->fallback_mod16++; in sl3516_ce_need_fallback()
60 ce->fallback_mod16++; in sl3516_ce_need_fallback()
63 if (!IS_ALIGNED(sg->offset, 16)) { in sl3516_ce_need_fallback()
64 ce->fallback_align16++; in sl3516_ce_need_fallback()
69 sg = areq->dst; in sl3516_ce_need_fallback()
71 if ((sg->length % 16) != 0) { in sl3516_ce_need_fallback()
72 ce->fallback_mod16++; in sl3516_ce_need_fallback()
76 ce->fallback_mod16++; in sl3516_ce_need_fallback()
79 if (!IS_ALIGNED(sg->offset, 16)) { in sl3516_ce_need_fallback()
80 ce->fallback_align16++; in sl3516_ce_need_fallback()
87 in_sg = areq->src; in sl3516_ce_need_fallback()
88 out_sg = areq->dst; in sl3516_ce_need_fallback()
90 if (in_sg->length != out_sg->length) { in sl3516_ce_need_fallback()
91 ce->fallback_not_same_len++; in sl3516_ce_need_fallback()
113 algt->stat_fb++; in sl3516_ce_cipher_fallback()
115 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in sl3516_ce_cipher_fallback()
116 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in sl3516_ce_cipher_fallback()
117 areq->base.complete, areq->base.data); in sl3516_ce_cipher_fallback()
118 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in sl3516_ce_cipher_fallback()
119 areq->cryptlen, areq->iv); in sl3516_ce_cipher_fallback()
120 if (rctx->op_dir == CE_DECRYPTION) in sl3516_ce_cipher_fallback()
121 err = crypto_skcipher_decrypt(&rctx->fallback_req); in sl3516_ce_cipher_fallback()
123 err = crypto_skcipher_encrypt(&rctx->fallback_req); in sl3516_ce_cipher_fallback()
131 struct sl3516_ce_dev *ce = op->ce; in sl3516_ce_cipher()
145 dev_dbg(ce->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__, in sl3516_ce_cipher()
146 crypto_tfm_alg_name(areq->base.tfm), in sl3516_ce_cipher()
147 areq->cryptlen, in sl3516_ce_cipher()
148 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), in sl3516_ce_cipher()
149 op->keylen); in sl3516_ce_cipher()
151 algt->stat_req++; in sl3516_ce_cipher()
153 if (areq->src == areq->dst) { in sl3516_ce_cipher()
154 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src), in sl3516_ce_cipher()
157 dev_err(ce->dev, "Invalid sg number %d\n", nr_sgs); in sl3516_ce_cipher()
158 err = -EINVAL; in sl3516_ce_cipher()
163 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src), in sl3516_ce_cipher()
166 dev_err(ce->dev, "Invalid sg number %d\n", nr_sgs); in sl3516_ce_cipher()
167 err = -EINVAL; in sl3516_ce_cipher()
170 nr_sgd = dma_map_sg(ce->dev, areq->dst, sg_nents(areq->dst), in sl3516_ce_cipher()
173 dev_err(ce->dev, "Invalid sg number %d\n", nr_sgd); in sl3516_ce_cipher()
174 err = -EINVAL; in sl3516_ce_cipher()
179 len = areq->cryptlen; in sl3516_ce_cipher()
181 sg = areq->src; in sl3516_ce_cipher()
185 rctx->t_src[i].addr = sg_dma_address(sg); in sl3516_ce_cipher()
187 rctx->t_src[i].len = todo; in sl3516_ce_cipher()
188 dev_dbg(ce->dev, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__, in sl3516_ce_cipher()
189 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); in sl3516_ce_cipher()
190 len -= todo; in sl3516_ce_cipher()
196 dev_err(ce->dev, "remaining len %d/%u nr_sgs=%d\n", len, areq->cryptlen, nr_sgs); in sl3516_ce_cipher()
197 err = -EINVAL; in sl3516_ce_cipher()
201 len = areq->cryptlen; in sl3516_ce_cipher()
203 sg = areq->dst; in sl3516_ce_cipher()
207 rctx->t_dst[i].addr = sg_dma_address(sg); in sl3516_ce_cipher()
209 rctx->t_dst[i].len = todo; in sl3516_ce_cipher()
210 dev_dbg(ce->dev, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__, in sl3516_ce_cipher()
211 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); in sl3516_ce_cipher()
212 len -= todo; in sl3516_ce_cipher()
219 dev_err(ce->dev, "remaining len %d\n", len); in sl3516_ce_cipher()
220 err = -EINVAL; in sl3516_ce_cipher()
224 switch (algt->mode) { in sl3516_ce_cipher()
226 rctx->pctrllen = sizeof(struct pkt_control_ecb); in sl3516_ce_cipher()
227 ecb = (struct pkt_control_ecb *)ce->pctrl; in sl3516_ce_cipher()
229 rctx->tqflag = TQ0_TYPE_CTRL; in sl3516_ce_cipher()
230 rctx->tqflag |= TQ1_CIPHER; in sl3516_ce_cipher()
231 ecb->control.op_mode = rctx->op_dir; in sl3516_ce_cipher()
232 ecb->control.cipher_algorithm = ECB_AES; in sl3516_ce_cipher()
233 ecb->cipher.header_len = 0; in sl3516_ce_cipher()
234 ecb->cipher.algorithm_len = areq->cryptlen; in sl3516_ce_cipher()
235 cpu_to_be32_array((__be32 *)ecb->key, (u32 *)op->key, op->keylen / 4); in sl3516_ce_cipher()
236 rctx->h = &ecb->cipher; in sl3516_ce_cipher()
238 rctx->tqflag |= TQ4_KEY0; in sl3516_ce_cipher()
239 rctx->tqflag |= TQ5_KEY4; in sl3516_ce_cipher()
240 rctx->tqflag |= TQ6_KEY6; in sl3516_ce_cipher()
241 ecb->control.aesnk = op->keylen / 4; in sl3516_ce_cipher()
245 rctx->nr_sgs = nr_sgs; in sl3516_ce_cipher()
246 rctx->nr_sgd = nr_sgd; in sl3516_ce_cipher()
247 err = sl3516_ce_run_task(ce, rctx, crypto_tfm_alg_name(areq->base.tfm)); in sl3516_ce_cipher()
250 if (areq->src == areq->dst) { in sl3516_ce_cipher()
251 dma_unmap_sg(ce->dev, areq->src, sg_nents(areq->src), in sl3516_ce_cipher()
254 dma_unmap_sg(ce->dev, areq->src, sg_nents(areq->src), in sl3516_ce_cipher()
256 dma_unmap_sg(ce->dev, areq->dst, sg_nents(areq->dst), in sl3516_ce_cipher()
286 rctx->op_dir = CE_DECRYPTION; in sl3516_ce_skdecrypt()
291 engine = op->ce->engine; in sl3516_ce_skdecrypt()
304 rctx->op_dir = CE_ENCRYPTION; in sl3516_ce_skencrypt()
309 engine = op->ce->engine; in sl3516_ce_skencrypt()
326 op->ce = algt->ce; in sl3516_ce_cipher_init()
328 op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK); in sl3516_ce_cipher_init()
329 if (IS_ERR(op->fallback_tfm)) { in sl3516_ce_cipher_init()
330 dev_err(op->ce->dev, "ERROR: Cannot allocate fallback for %s %ld\n", in sl3516_ce_cipher_init()
331 name, PTR_ERR(op->fallback_tfm)); in sl3516_ce_cipher_init()
332 return PTR_ERR(op->fallback_tfm); in sl3516_ce_cipher_init()
335 sktfm->reqsize = sizeof(struct sl3516_ce_cipher_req_ctx) + in sl3516_ce_cipher_init()
336 crypto_skcipher_reqsize(op->fallback_tfm); in sl3516_ce_cipher_init()
338 dev_info(op->ce->dev, "Fallback for %s is %s\n", in sl3516_ce_cipher_init()
339 crypto_tfm_alg_driver_name(&sktfm->base), in sl3516_ce_cipher_init()
340 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm))); in sl3516_ce_cipher_init()
342 err = pm_runtime_get_sync(op->ce->dev); in sl3516_ce_cipher_init()
348 pm_runtime_put_noidle(op->ce->dev); in sl3516_ce_cipher_init()
349 crypto_free_skcipher(op->fallback_tfm); in sl3516_ce_cipher_init()
357 kfree_sensitive(op->key); in sl3516_ce_cipher_exit()
358 crypto_free_skcipher(op->fallback_tfm); in sl3516_ce_cipher_exit()
359 pm_runtime_put_sync_suspend(op->ce->dev); in sl3516_ce_cipher_exit()
366 struct sl3516_ce_dev *ce = op->ce; in sl3516_ce_aes_setkey()
376 dev_dbg(ce->dev, "ERROR: Invalid keylen %u\n", keylen); in sl3516_ce_aes_setkey()
377 return -EINVAL; in sl3516_ce_aes_setkey()
379 kfree_sensitive(op->key); in sl3516_ce_aes_setkey()
380 op->keylen = keylen; in sl3516_ce_aes_setkey()
381 op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA); in sl3516_ce_aes_setkey()
382 if (!op->key) in sl3516_ce_aes_setkey()
383 return -ENOMEM; in sl3516_ce_aes_setkey()
385 crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK); in sl3516_ce_aes_setkey()
386 crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); in sl3516_ce_aes_setkey()
388 return crypto_skcipher_setkey(op->fallback_tfm, key, keylen); in sl3516_ce_aes_setkey()