• Home
  • Raw
  • Download

Lines Matching +full:stm32mp1 +full:- +full:cryp

23 #define DRIVER_NAME             "stm32-cryp"
107 #define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
108 #define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset)
118 struct stm32_cryp *cryp; member
180 static inline bool is_aes(struct stm32_cryp *cryp) in is_aes() argument
182 return cryp->flags & FLG_AES; in is_aes()
185 static inline bool is_des(struct stm32_cryp *cryp) in is_des() argument
187 return cryp->flags & FLG_DES; in is_des()
190 static inline bool is_tdes(struct stm32_cryp *cryp) in is_tdes() argument
192 return cryp->flags & FLG_TDES; in is_tdes()
195 static inline bool is_ecb(struct stm32_cryp *cryp) in is_ecb() argument
197 return cryp->flags & FLG_ECB; in is_ecb()
200 static inline bool is_cbc(struct stm32_cryp *cryp) in is_cbc() argument
202 return cryp->flags & FLG_CBC; in is_cbc()
205 static inline bool is_ctr(struct stm32_cryp *cryp) in is_ctr() argument
207 return cryp->flags & FLG_CTR; in is_ctr()
210 static inline bool is_gcm(struct stm32_cryp *cryp) in is_gcm() argument
212 return cryp->flags & FLG_GCM; in is_gcm()
215 static inline bool is_ccm(struct stm32_cryp *cryp) in is_ccm() argument
217 return cryp->flags & FLG_CCM; in is_ccm()
220 static inline bool is_encrypt(struct stm32_cryp *cryp) in is_encrypt() argument
222 return cryp->flags & FLG_ENCRYPT; in is_encrypt()
225 static inline bool is_decrypt(struct stm32_cryp *cryp) in is_decrypt() argument
227 return !is_encrypt(cryp); in is_decrypt()
230 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst) in stm32_cryp_read() argument
232 return readl_relaxed(cryp->regs + ofst); in stm32_cryp_read()
235 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val) in stm32_cryp_write() argument
237 writel_relaxed(val, cryp->regs + ofst); in stm32_cryp_write()
240 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp) in stm32_cryp_wait_busy() argument
244 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status, in stm32_cryp_wait_busy()
248 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp) in stm32_cryp_wait_enable() argument
252 return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status, in stm32_cryp_wait_enable()
256 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp) in stm32_cryp_wait_output() argument
260 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status, in stm32_cryp_wait_output()
264 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
268 struct stm32_cryp *tmp, *cryp = NULL; in stm32_cryp_find_dev() local
271 if (!ctx->cryp) { in stm32_cryp_find_dev()
273 cryp = tmp; in stm32_cryp_find_dev()
276 ctx->cryp = cryp; in stm32_cryp_find_dev()
278 cryp = ctx->cryp; in stm32_cryp_find_dev()
283 return cryp; in stm32_cryp_find_dev()
295 return -EINVAL; in stm32_cryp_check_aligned()
298 if (!IS_ALIGNED(sg->offset, sizeof(u32))) in stm32_cryp_check_aligned()
299 return -EINVAL; in stm32_cryp_check_aligned()
301 if (!IS_ALIGNED(sg->length, align)) in stm32_cryp_check_aligned()
302 return -EINVAL; in stm32_cryp_check_aligned()
304 len += sg->length; in stm32_cryp_check_aligned()
309 return -EINVAL; in stm32_cryp_check_aligned()
314 static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp) in stm32_cryp_check_io_aligned() argument
318 ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in, in stm32_cryp_check_io_aligned()
319 cryp->hw_blocksize); in stm32_cryp_check_io_aligned()
323 ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out, in stm32_cryp_check_io_aligned()
324 cryp->hw_blocksize); in stm32_cryp_check_io_aligned()
343 static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp) in stm32_cryp_copy_sgs() argument
348 if (!stm32_cryp_check_io_aligned(cryp)) { in stm32_cryp_copy_sgs()
349 cryp->sgs_copied = 0; in stm32_cryp_copy_sgs()
353 total_in = ALIGN(cryp->total_in, cryp->hw_blocksize); in stm32_cryp_copy_sgs()
357 total_out = ALIGN(cryp->total_out, cryp->hw_blocksize); in stm32_cryp_copy_sgs()
362 dev_err(cryp->dev, "Can't allocate pages when unaligned\n"); in stm32_cryp_copy_sgs()
363 cryp->sgs_copied = 0; in stm32_cryp_copy_sgs()
364 return -EFAULT; in stm32_cryp_copy_sgs()
367 sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0); in stm32_cryp_copy_sgs()
369 sg_init_one(&cryp->in_sgl, buf_in, total_in); in stm32_cryp_copy_sgs()
370 cryp->in_sg = &cryp->in_sgl; in stm32_cryp_copy_sgs()
371 cryp->in_sg_len = 1; in stm32_cryp_copy_sgs()
373 sg_init_one(&cryp->out_sgl, buf_out, total_out); in stm32_cryp_copy_sgs()
374 cryp->out_sg_save = cryp->out_sg; in stm32_cryp_copy_sgs()
375 cryp->out_sg = &cryp->out_sgl; in stm32_cryp_copy_sgs()
376 cryp->out_sg_len = 1; in stm32_cryp_copy_sgs()
378 cryp->sgs_copied = 1; in stm32_cryp_copy_sgs()
383 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv) in stm32_cryp_hw_write_iv() argument
388 stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++)); in stm32_cryp_hw_write_iv()
389 stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++)); in stm32_cryp_hw_write_iv()
391 if (is_aes(cryp)) { in stm32_cryp_hw_write_iv()
392 stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++)); in stm32_cryp_hw_write_iv()
393 stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++)); in stm32_cryp_hw_write_iv()
403 stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0])); in stm32_cryp_hw_write_key()
404 stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1])); in stm32_cryp_hw_write_key()
407 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4) in stm32_cryp_hw_write_key()
409 cpu_to_be32(c->ctx->key[i - 1])); in stm32_cryp_hw_write_key()
413 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp) in stm32_cryp_get_hw_mode() argument
415 if (is_aes(cryp) && is_ecb(cryp)) in stm32_cryp_get_hw_mode()
418 if (is_aes(cryp) && is_cbc(cryp)) in stm32_cryp_get_hw_mode()
421 if (is_aes(cryp) && is_ctr(cryp)) in stm32_cryp_get_hw_mode()
424 if (is_aes(cryp) && is_gcm(cryp)) in stm32_cryp_get_hw_mode()
427 if (is_aes(cryp) && is_ccm(cryp)) in stm32_cryp_get_hw_mode()
430 if (is_des(cryp) && is_ecb(cryp)) in stm32_cryp_get_hw_mode()
433 if (is_des(cryp) && is_cbc(cryp)) in stm32_cryp_get_hw_mode()
436 if (is_tdes(cryp) && is_ecb(cryp)) in stm32_cryp_get_hw_mode()
439 if (is_tdes(cryp) && is_cbc(cryp)) in stm32_cryp_get_hw_mode()
442 dev_err(cryp->dev, "Unknown mode\n"); in stm32_cryp_get_hw_mode()
446 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp) in stm32_cryp_get_input_text_len() argument
448 return is_encrypt(cryp) ? cryp->areq->cryptlen : in stm32_cryp_get_input_text_len()
449 cryp->areq->cryptlen - cryp->authsize; in stm32_cryp_get_input_text_len()
452 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg) in stm32_cryp_gcm_init() argument
458 memcpy(iv, cryp->areq->iv, 12); in stm32_cryp_gcm_init()
460 cryp->gcm_ctr = GCM_CTR_INIT; in stm32_cryp_gcm_init()
461 stm32_cryp_hw_write_iv(cryp, iv); in stm32_cryp_gcm_init()
463 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN); in stm32_cryp_gcm_init()
466 ret = stm32_cryp_wait_enable(cryp); in stm32_cryp_gcm_init()
468 dev_err(cryp->dev, "Timeout (gcm init)\n"); in stm32_cryp_gcm_init()
473 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg) in stm32_cryp_ccm_init() argument
481 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); in stm32_cryp_ccm_init()
482 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); in stm32_cryp_ccm_init()
483 iv[AES_BLOCK_SIZE - 1] = 1; in stm32_cryp_ccm_init()
484 stm32_cryp_hw_write_iv(cryp, (u32 *)iv); in stm32_cryp_ccm_init()
489 b0[0] |= (8 * ((cryp->authsize - 2) / 2)); in stm32_cryp_ccm_init()
491 if (cryp->areq->assoclen) in stm32_cryp_ccm_init()
494 textlen = stm32_cryp_get_input_text_len(cryp); in stm32_cryp_ccm_init()
496 b0[AES_BLOCK_SIZE - 2] = textlen >> 8; in stm32_cryp_ccm_init()
497 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF; in stm32_cryp_ccm_init()
500 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN); in stm32_cryp_ccm_init()
506 if (!cryp->caps->padding_wa) in stm32_cryp_ccm_init()
508 stm32_cryp_write(cryp, CRYP_DIN, *d++); in stm32_cryp_ccm_init()
512 ret = stm32_cryp_wait_enable(cryp); in stm32_cryp_ccm_init()
514 dev_err(cryp->dev, "Timeout (ccm init)\n"); in stm32_cryp_ccm_init()
519 static int stm32_cryp_hw_init(struct stm32_cryp *cryp) in stm32_cryp_hw_init() argument
524 pm_runtime_get_sync(cryp->dev); in stm32_cryp_hw_init()
527 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_cryp_hw_init()
530 stm32_cryp_hw_write_key(cryp); in stm32_cryp_hw_init()
535 switch (cryp->ctx->keylen) { in stm32_cryp_hw_init()
550 hw_mode = stm32_cryp_get_hw_mode(cryp); in stm32_cryp_hw_init()
552 return -EINVAL; in stm32_cryp_hw_init()
555 if (is_decrypt(cryp) && in stm32_cryp_hw_init()
557 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN); in stm32_cryp_hw_init()
560 ret = stm32_cryp_wait_busy(cryp); in stm32_cryp_hw_init()
562 dev_err(cryp->dev, "Timeout (key preparation)\n"); in stm32_cryp_hw_init()
569 if (is_decrypt(cryp)) in stm32_cryp_hw_init()
573 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_hw_init()
580 ret = stm32_cryp_ccm_init(cryp, cfg); in stm32_cryp_hw_init()
582 ret = stm32_cryp_gcm_init(cryp, cfg); in stm32_cryp_hw_init()
588 if (cryp->areq->assoclen) { in stm32_cryp_hw_init()
590 } else if (stm32_cryp_get_input_text_len(cryp)) { in stm32_cryp_hw_init()
592 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_hw_init()
603 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->info); in stm32_cryp_hw_init()
613 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_hw_init()
615 cryp->flags &= ~FLG_CCM_PADDED_WA; in stm32_cryp_hw_init()
620 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err) in stm32_cryp_finish_req() argument
622 if (!err && (is_gcm(cryp) || is_ccm(cryp))) in stm32_cryp_finish_req()
624 err = stm32_cryp_read_auth_tag(cryp); in stm32_cryp_finish_req()
626 if (cryp->sgs_copied) { in stm32_cryp_finish_req()
630 buf_in = sg_virt(&cryp->in_sgl); in stm32_cryp_finish_req()
631 buf_out = sg_virt(&cryp->out_sgl); in stm32_cryp_finish_req()
633 sg_copy_buf(buf_out, cryp->out_sg_save, 0, in stm32_cryp_finish_req()
634 cryp->total_out_save, 1); in stm32_cryp_finish_req()
636 len = ALIGN(cryp->total_in_save, cryp->hw_blocksize); in stm32_cryp_finish_req()
640 len = ALIGN(cryp->total_out_save, cryp->hw_blocksize); in stm32_cryp_finish_req()
645 pm_runtime_mark_last_busy(cryp->dev); in stm32_cryp_finish_req()
646 pm_runtime_put_autosuspend(cryp->dev); in stm32_cryp_finish_req()
648 if (is_gcm(cryp) || is_ccm(cryp)) { in stm32_cryp_finish_req()
649 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); in stm32_cryp_finish_req()
650 cryp->areq = NULL; in stm32_cryp_finish_req()
652 crypto_finalize_ablkcipher_request(cryp->engine, cryp->req, in stm32_cryp_finish_req()
654 cryp->req = NULL; in stm32_cryp_finish_req()
657 memset(cryp->ctx->key, 0, cryp->ctx->keylen); in stm32_cryp_finish_req()
659 mutex_unlock(&cryp->lock); in stm32_cryp_finish_req()
662 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp) in stm32_cryp_cpu_start() argument
665 stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT); in stm32_cryp_cpu_start()
678 tfm->crt_ablkcipher.reqsize = sizeof(struct stm32_cryp_reqctx); in stm32_cryp_cra_init()
680 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req; in stm32_cryp_cra_init()
681 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req; in stm32_cryp_cra_init()
682 ctx->enginectx.op.unprepare_request = NULL; in stm32_cryp_cra_init()
694 tfm->reqsize = sizeof(struct stm32_cryp_reqctx); in stm32_cryp_aes_aead_init()
696 ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req; in stm32_cryp_aes_aead_init()
697 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req; in stm32_cryp_aes_aead_init()
698 ctx->enginectx.op.unprepare_request = NULL; in stm32_cryp_aes_aead_init()
708 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); in stm32_cryp_crypt() local
710 if (!cryp) in stm32_cryp_crypt()
711 return -ENODEV; in stm32_cryp_crypt()
713 rctx->mode = mode; in stm32_cryp_crypt()
715 return crypto_transfer_ablkcipher_request_to_engine(cryp->engine, req); in stm32_cryp_crypt()
722 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); in stm32_cryp_aead_crypt() local
724 if (!cryp) in stm32_cryp_aead_crypt()
725 return -ENODEV; in stm32_cryp_aead_crypt()
727 rctx->mode = mode; in stm32_cryp_aead_crypt()
729 return crypto_transfer_aead_request_to_engine(cryp->engine, req); in stm32_cryp_aead_crypt()
737 memcpy(ctx->key, key, keylen); in stm32_cryp_setkey()
738 ctx->keylen = keylen; in stm32_cryp_setkey()
748 return -EINVAL; in stm32_cryp_aes_setkey()
757 return -EINVAL; in stm32_cryp_des_setkey()
766 return -EINVAL; in stm32_cryp_tdes_setkey()
778 return -EINVAL; in stm32_cryp_aes_aead_setkey()
780 memcpy(ctx->key, key, keylen); in stm32_cryp_aes_aead_setkey()
781 ctx->keylen = keylen; in stm32_cryp_aes_aead_setkey()
789 return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL; in stm32_cryp_aes_gcm_setauthsize()
805 return -EINVAL; in stm32_cryp_aes_ccm_setauthsize()
905 struct stm32_cryp *cryp; in stm32_cryp_prepare_req() local
910 return -EINVAL; in stm32_cryp_prepare_req()
915 cryp = ctx->cryp; in stm32_cryp_prepare_req()
917 if (!cryp) in stm32_cryp_prepare_req()
918 return -ENODEV; in stm32_cryp_prepare_req()
920 mutex_lock(&cryp->lock); in stm32_cryp_prepare_req()
923 rctx->mode &= FLG_MODE_MASK; in stm32_cryp_prepare_req()
925 ctx->cryp = cryp; in stm32_cryp_prepare_req()
927 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode; in stm32_cryp_prepare_req()
928 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE; in stm32_cryp_prepare_req()
929 cryp->ctx = ctx; in stm32_cryp_prepare_req()
932 cryp->req = req; in stm32_cryp_prepare_req()
933 cryp->total_in = req->nbytes; in stm32_cryp_prepare_req()
934 cryp->total_out = cryp->total_in; in stm32_cryp_prepare_req()
940 * <- assoclen -> <- cryptlen -> in stm32_cryp_prepare_req()
941 * <------- total_in -----------> in stm32_cryp_prepare_req()
944 * <- assoclen -> <- cryptlen -> <- authsize -> in stm32_cryp_prepare_req()
945 * <---------------- total_out -----------------> in stm32_cryp_prepare_req()
949 * <- assoclen -> <--------- cryptlen ---------> in stm32_cryp_prepare_req()
950 * <- authsize -> in stm32_cryp_prepare_req()
951 * <---------------- total_in ------------------> in stm32_cryp_prepare_req()
954 * <- assoclen -> <- crypten - authsize -> in stm32_cryp_prepare_req()
955 * <---------- total_out -----------------> in stm32_cryp_prepare_req()
957 cryp->areq = areq; in stm32_cryp_prepare_req()
958 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); in stm32_cryp_prepare_req()
959 cryp->total_in = areq->assoclen + areq->cryptlen; in stm32_cryp_prepare_req()
960 if (is_encrypt(cryp)) in stm32_cryp_prepare_req()
962 cryp->total_out = cryp->total_in + cryp->authsize; in stm32_cryp_prepare_req()
965 cryp->total_out = cryp->total_in - cryp->authsize; in stm32_cryp_prepare_req()
968 cryp->total_in_save = cryp->total_in; in stm32_cryp_prepare_req()
969 cryp->total_out_save = cryp->total_out; in stm32_cryp_prepare_req()
971 cryp->in_sg = req ? req->src : areq->src; in stm32_cryp_prepare_req()
972 cryp->out_sg = req ? req->dst : areq->dst; in stm32_cryp_prepare_req()
973 cryp->out_sg_save = cryp->out_sg; in stm32_cryp_prepare_req()
975 cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in); in stm32_cryp_prepare_req()
976 if (cryp->in_sg_len < 0) { in stm32_cryp_prepare_req()
977 dev_err(cryp->dev, "Cannot get in_sg_len\n"); in stm32_cryp_prepare_req()
978 ret = cryp->in_sg_len; in stm32_cryp_prepare_req()
982 cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out); in stm32_cryp_prepare_req()
983 if (cryp->out_sg_len < 0) { in stm32_cryp_prepare_req()
984 dev_err(cryp->dev, "Cannot get out_sg_len\n"); in stm32_cryp_prepare_req()
985 ret = cryp->out_sg_len; in stm32_cryp_prepare_req()
989 ret = stm32_cryp_copy_sgs(cryp); in stm32_cryp_prepare_req()
993 scatterwalk_start(&cryp->in_walk, cryp->in_sg); in stm32_cryp_prepare_req()
994 scatterwalk_start(&cryp->out_walk, cryp->out_sg); in stm32_cryp_prepare_req()
996 if (is_gcm(cryp) || is_ccm(cryp)) { in stm32_cryp_prepare_req()
998 scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen); in stm32_cryp_prepare_req()
999 cryp->total_out -= cryp->areq->assoclen; in stm32_cryp_prepare_req()
1002 ret = stm32_cryp_hw_init(cryp); in stm32_cryp_prepare_req()
1005 mutex_unlock(&cryp->lock); in stm32_cryp_prepare_req()
1027 struct stm32_cryp *cryp = ctx->cryp; in stm32_cryp_cipher_one_req() local
1029 if (!cryp) in stm32_cryp_cipher_one_req()
1030 return -ENODEV; in stm32_cryp_cipher_one_req()
1032 return stm32_cryp_cpu_start(cryp); in stm32_cryp_cipher_one_req()
1048 struct stm32_cryp *cryp = ctx->cryp; in stm32_cryp_aead_one_req() local
1050 if (!cryp) in stm32_cryp_aead_one_req()
1051 return -ENODEV; in stm32_cryp_aead_one_req()
1053 if (unlikely(!cryp->areq->assoclen && in stm32_cryp_aead_one_req()
1054 !stm32_cryp_get_input_text_len(cryp))) { in stm32_cryp_aead_one_req()
1056 stm32_cryp_finish_req(cryp, 0); in stm32_cryp_aead_one_req()
1060 return stm32_cryp_cpu_start(cryp); in stm32_cryp_aead_one_req()
1063 static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst, in stm32_cryp_next_out() argument
1066 scatterwalk_advance(&cryp->out_walk, n); in stm32_cryp_next_out()
1068 if (unlikely(cryp->out_sg->length == _walked_out)) { in stm32_cryp_next_out()
1069 cryp->out_sg = sg_next(cryp->out_sg); in stm32_cryp_next_out()
1070 if (cryp->out_sg) { in stm32_cryp_next_out()
1071 scatterwalk_start(&cryp->out_walk, cryp->out_sg); in stm32_cryp_next_out()
1072 return (sg_virt(cryp->out_sg) + _walked_out); in stm32_cryp_next_out()
1079 static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src, in stm32_cryp_next_in() argument
1082 scatterwalk_advance(&cryp->in_walk, n); in stm32_cryp_next_in()
1084 if (unlikely(cryp->in_sg->length == _walked_in)) { in stm32_cryp_next_in()
1085 cryp->in_sg = sg_next(cryp->in_sg); in stm32_cryp_next_in()
1086 if (cryp->in_sg) { in stm32_cryp_next_in()
1087 scatterwalk_start(&cryp->in_walk, cryp->in_sg); in stm32_cryp_next_in()
1088 return (sg_virt(cryp->in_sg) + _walked_in); in stm32_cryp_next_in()
1095 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp) in stm32_cryp_read_auth_tag() argument
1103 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_read_auth_tag()
1110 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_read_auth_tag()
1112 if (is_gcm(cryp)) { in stm32_cryp_read_auth_tag()
1114 size_bit = cryp->areq->assoclen * 8; in stm32_cryp_read_auth_tag()
1115 if (cryp->caps->swap_final) in stm32_cryp_read_auth_tag()
1118 stm32_cryp_write(cryp, CRYP_DIN, 0); in stm32_cryp_read_auth_tag()
1119 stm32_cryp_write(cryp, CRYP_DIN, size_bit); in stm32_cryp_read_auth_tag()
1121 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : in stm32_cryp_read_auth_tag()
1122 cryp->areq->cryptlen - AES_BLOCK_SIZE; in stm32_cryp_read_auth_tag()
1124 if (cryp->caps->swap_final) in stm32_cryp_read_auth_tag()
1127 stm32_cryp_write(cryp, CRYP_DIN, 0); in stm32_cryp_read_auth_tag()
1128 stm32_cryp_write(cryp, CRYP_DIN, size_bit); in stm32_cryp_read_auth_tag()
1134 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); in stm32_cryp_read_auth_tag()
1135 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); in stm32_cryp_read_auth_tag()
1138 if (!cryp->caps->padding_wa) in stm32_cryp_read_auth_tag()
1140 stm32_cryp_write(cryp, CRYP_DIN, *iv32++); in stm32_cryp_read_auth_tag()
1145 ret = stm32_cryp_wait_output(cryp); in stm32_cryp_read_auth_tag()
1147 dev_err(cryp->dev, "Timeout (read tag)\n"); in stm32_cryp_read_auth_tag()
1151 if (is_encrypt(cryp)) { in stm32_cryp_read_auth_tag()
1153 dst = sg_virt(cryp->out_sg) + _walked_out; in stm32_cryp_read_auth_tag()
1156 if (cryp->total_out >= sizeof(u32)) { in stm32_cryp_read_auth_tag()
1158 *dst = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_read_auth_tag()
1160 dst = stm32_cryp_next_out(cryp, dst, in stm32_cryp_read_auth_tag()
1162 cryp->total_out -= sizeof(u32); in stm32_cryp_read_auth_tag()
1163 } else if (!cryp->total_out) { in stm32_cryp_read_auth_tag()
1165 stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_read_auth_tag()
1168 d32 = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_read_auth_tag()
1171 for (j = 0; j < cryp->total_out; j++) { in stm32_cryp_read_auth_tag()
1173 dst = stm32_cryp_next_out(cryp, dst, 1); in stm32_cryp_read_auth_tag()
1175 cryp->total_out = 0; in stm32_cryp_read_auth_tag()
1182 scatterwalk_map_and_copy(in_tag, cryp->in_sg, in stm32_cryp_read_auth_tag()
1183 cryp->total_in_save - cryp->authsize, in stm32_cryp_read_auth_tag()
1184 cryp->authsize, 0); in stm32_cryp_read_auth_tag()
1187 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_read_auth_tag()
1189 if (crypto_memneq(in_tag, out_tag, cryp->authsize)) in stm32_cryp_read_auth_tag()
1190 ret = -EBADMSG; in stm32_cryp_read_auth_tag()
1193 /* Disable cryp */ in stm32_cryp_read_auth_tag()
1195 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_read_auth_tag()
1200 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp) in stm32_cryp_check_ctr_counter() argument
1204 if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) { in stm32_cryp_check_ctr_counter()
1205 cryp->last_ctr[3] = 0; in stm32_cryp_check_ctr_counter()
1206 cryp->last_ctr[2]++; in stm32_cryp_check_ctr_counter()
1207 if (!cryp->last_ctr[2]) { in stm32_cryp_check_ctr_counter()
1208 cryp->last_ctr[1]++; in stm32_cryp_check_ctr_counter()
1209 if (!cryp->last_ctr[1]) in stm32_cryp_check_ctr_counter()
1210 cryp->last_ctr[0]++; in stm32_cryp_check_ctr_counter()
1213 cr = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_check_ctr_counter()
1214 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN); in stm32_cryp_check_ctr_counter()
1216 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr); in stm32_cryp_check_ctr_counter()
1218 stm32_cryp_write(cryp, CRYP_CR, cr); in stm32_cryp_check_ctr_counter()
1221 cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR); in stm32_cryp_check_ctr_counter()
1222 cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR); in stm32_cryp_check_ctr_counter()
1223 cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR); in stm32_cryp_check_ctr_counter()
1224 cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR); in stm32_cryp_check_ctr_counter()
1227 static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp) in stm32_cryp_irq_read_data() argument
1235 if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp))) in stm32_cryp_irq_read_data()
1236 tag_size = cryp->authsize; in stm32_cryp_irq_read_data()
1240 dst = sg_virt(cryp->out_sg) + _walked_out; in stm32_cryp_irq_read_data()
1242 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) { in stm32_cryp_irq_read_data()
1243 if (likely(cryp->total_out - tag_size >= sizeof(u32))) { in stm32_cryp_irq_read_data()
1245 *dst = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_irq_read_data()
1247 dst = stm32_cryp_next_out(cryp, dst, sizeof(u32)); in stm32_cryp_irq_read_data()
1248 cryp->total_out -= sizeof(u32); in stm32_cryp_irq_read_data()
1249 } else if (cryp->total_out == tag_size) { in stm32_cryp_irq_read_data()
1251 d32 = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_irq_read_data()
1254 d32 = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_irq_read_data()
1257 for (j = 0; j < cryp->total_out - tag_size; j++) { in stm32_cryp_irq_read_data()
1259 dst = stm32_cryp_next_out(cryp, dst, 1); in stm32_cryp_irq_read_data()
1261 cryp->total_out = tag_size; in stm32_cryp_irq_read_data()
1265 return !(cryp->total_out - tag_size) || !cryp->total_in; in stm32_cryp_irq_read_data()
1268 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp) in stm32_cryp_irq_write_block() argument
1276 if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp))) in stm32_cryp_irq_write_block()
1277 tag_size = cryp->authsize; in stm32_cryp_irq_write_block()
1281 src = sg_virt(cryp->in_sg) + _walked_in; in stm32_cryp_irq_write_block()
1283 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) { in stm32_cryp_irq_write_block()
1284 if (likely(cryp->total_in - tag_size >= sizeof(u32))) { in stm32_cryp_irq_write_block()
1286 stm32_cryp_write(cryp, CRYP_DIN, *src); in stm32_cryp_irq_write_block()
1288 src = stm32_cryp_next_in(cryp, src, sizeof(u32)); in stm32_cryp_irq_write_block()
1289 cryp->total_in -= sizeof(u32); in stm32_cryp_irq_write_block()
1290 } else if (cryp->total_in == tag_size) { in stm32_cryp_irq_write_block()
1292 stm32_cryp_write(cryp, CRYP_DIN, 0); in stm32_cryp_irq_write_block()
1296 for (j = 0; j < cryp->total_in - tag_size; j++) { in stm32_cryp_irq_write_block()
1298 src = stm32_cryp_next_in(cryp, src, 1); in stm32_cryp_irq_write_block()
1301 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); in stm32_cryp_irq_write_block()
1302 cryp->total_in = tag_size; in stm32_cryp_irq_write_block()
1307 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp) in stm32_cryp_irq_write_gcm_padded_data() argument
1311 size_t total_in_ori = cryp->total_in; in stm32_cryp_irq_write_gcm_padded_data()
1312 struct scatterlist *out_sg_ori = cryp->out_sg; in stm32_cryp_irq_write_gcm_padded_data()
1318 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_cryp_irq_write_gcm_padded_data()
1319 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_irq_write_gcm_padded_data()
1321 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1324 stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2); in stm32_cryp_irq_write_gcm_padded_data()
1329 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1333 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1336 stm32_cryp_irq_write_block(cryp); in stm32_cryp_irq_write_gcm_padded_data()
1337 cryp->total_in = total_in_ori; in stm32_cryp_irq_write_gcm_padded_data()
1338 err = stm32_cryp_wait_output(cryp); in stm32_cryp_irq_write_gcm_padded_data()
1340 dev_err(cryp->dev, "Timeout (write gcm header)\n"); in stm32_cryp_irq_write_gcm_padded_data()
1341 return stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_gcm_padded_data()
1345 stm32_cryp_irq_read_data(cryp); in stm32_cryp_irq_write_gcm_padded_data()
1347 cryp->total_in_save - total_in_ori, in stm32_cryp_irq_write_gcm_padded_data()
1353 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1358 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1362 if (cryp->total_in) in stm32_cryp_irq_write_gcm_padded_data()
1363 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]); in stm32_cryp_irq_write_gcm_padded_data()
1365 stm32_cryp_write(cryp, CRYP_DIN, 0); in stm32_cryp_irq_write_gcm_padded_data()
1367 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in); in stm32_cryp_irq_write_gcm_padded_data()
1371 err = stm32_cryp_wait_output(cryp); in stm32_cryp_irq_write_gcm_padded_data()
1373 dev_err(cryp->dev, "Timeout (write gcm header)\n"); in stm32_cryp_irq_write_gcm_padded_data()
1374 return stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_gcm_padded_data()
1378 stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_irq_write_gcm_padded_data()
1381 stm32_cryp_finish_req(cryp, 0); in stm32_cryp_irq_write_gcm_padded_data()
1384 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp) in stm32_cryp_irq_set_npblb() argument
1389 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_irq_set_npblb()
1391 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_set_npblb()
1393 payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize : in stm32_cryp_irq_set_npblb()
1394 cryp->total_in; in stm32_cryp_irq_set_npblb()
1395 cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT; in stm32_cryp_irq_set_npblb()
1397 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_set_npblb()
1400 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp) in stm32_cryp_irq_write_ccm_padded_data() argument
1405 size_t last_total_out, total_in_ori = cryp->total_in; in stm32_cryp_irq_write_ccm_padded_data()
1406 struct scatterlist *out_sg_ori = cryp->out_sg; in stm32_cryp_irq_write_ccm_padded_data()
1410 cryp->flags |= FLG_CCM_PADDED_WA; in stm32_cryp_irq_write_ccm_padded_data()
1413 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_cryp_irq_write_ccm_padded_data()
1415 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_irq_write_ccm_padded_data()
1417 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1420 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4); in stm32_cryp_irq_write_ccm_padded_data()
1424 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); in stm32_cryp_irq_write_ccm_padded_data()
1427 stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp); in stm32_cryp_irq_write_ccm_padded_data()
1432 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1436 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1439 stm32_cryp_irq_write_block(cryp); in stm32_cryp_irq_write_ccm_padded_data()
1440 cryp->total_in = total_in_ori; in stm32_cryp_irq_write_ccm_padded_data()
1441 err = stm32_cryp_wait_output(cryp); in stm32_cryp_irq_write_ccm_padded_data()
1443 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n"); in stm32_cryp_irq_write_ccm_padded_data()
1444 return stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_ccm_padded_data()
1448 last_total_out = cryp->total_out; in stm32_cryp_irq_write_ccm_padded_data()
1449 stm32_cryp_irq_read_data(cryp); in stm32_cryp_irq_write_ccm_padded_data()
1453 cryp->total_out_save - last_total_out, in stm32_cryp_irq_write_ccm_padded_data()
1458 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); in stm32_cryp_irq_write_ccm_padded_data()
1463 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1468 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1474 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]); in stm32_cryp_irq_write_ccm_padded_data()
1478 err = stm32_cryp_wait_busy(cryp); in stm32_cryp_irq_write_ccm_padded_data()
1480 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n"); in stm32_cryp_irq_write_ccm_padded_data()
1483 stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_ccm_padded_data()
1486 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp) in stm32_cryp_irq_write_data() argument
1488 if (unlikely(!cryp->total_in)) { in stm32_cryp_irq_write_data()
1489 dev_warn(cryp->dev, "No more data to process\n"); in stm32_cryp_irq_write_data()
1493 if (unlikely(cryp->total_in < AES_BLOCK_SIZE && in stm32_cryp_irq_write_data()
1494 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) && in stm32_cryp_irq_write_data()
1495 is_encrypt(cryp))) { in stm32_cryp_irq_write_data()
1497 if (cryp->caps->padding_wa) in stm32_cryp_irq_write_data()
1499 return stm32_cryp_irq_write_gcm_padded_data(cryp); in stm32_cryp_irq_write_data()
1502 stm32_cryp_irq_set_npblb(cryp); in stm32_cryp_irq_write_data()
1505 if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) && in stm32_cryp_irq_write_data()
1506 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) && in stm32_cryp_irq_write_data()
1507 is_decrypt(cryp))) { in stm32_cryp_irq_write_data()
1509 if (cryp->caps->padding_wa) in stm32_cryp_irq_write_data()
1511 return stm32_cryp_irq_write_ccm_padded_data(cryp); in stm32_cryp_irq_write_data()
1514 stm32_cryp_irq_set_npblb(cryp); in stm32_cryp_irq_write_data()
1517 if (is_aes(cryp) && is_ctr(cryp)) in stm32_cryp_irq_write_data()
1518 stm32_cryp_check_ctr_counter(cryp); in stm32_cryp_irq_write_data()
1520 stm32_cryp_irq_write_block(cryp); in stm32_cryp_irq_write_data()
1523 static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp) in stm32_cryp_irq_write_gcm_header() argument
1529 src = sg_virt(cryp->in_sg) + _walked_in; in stm32_cryp_irq_write_gcm_header()
1532 stm32_cryp_write(cryp, CRYP_DIN, *src); in stm32_cryp_irq_write_gcm_header()
1534 src = stm32_cryp_next_in(cryp, src, sizeof(u32)); in stm32_cryp_irq_write_gcm_header()
1535 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in); in stm32_cryp_irq_write_gcm_header()
1538 if ((cryp->total_in_save - cryp->total_in) == in stm32_cryp_irq_write_gcm_header()
1539 cryp->areq->assoclen) { in stm32_cryp_irq_write_gcm_header()
1542 stm32_cryp_write(cryp, CRYP_DIN, 0); in stm32_cryp_irq_write_gcm_header()
1545 err = stm32_cryp_wait_busy(cryp); in stm32_cryp_irq_write_gcm_header()
1547 dev_err(cryp->dev, "Timeout (gcm header)\n"); in stm32_cryp_irq_write_gcm_header()
1548 return stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_gcm_header()
1551 if (stm32_cryp_get_input_text_len(cryp)) { in stm32_cryp_irq_write_gcm_header()
1553 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_irq_write_gcm_header()
1555 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_header()
1560 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_header()
1563 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_cryp_irq_write_gcm_header()
1564 stm32_cryp_finish_req(cryp, 0); in stm32_cryp_irq_write_gcm_header()
1570 if (!cryp->total_in) in stm32_cryp_irq_write_gcm_header()
1575 static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp) in stm32_cryp_irq_write_ccm_header() argument
1582 src = sg_virt(cryp->in_sg) + _walked_in; in stm32_cryp_irq_write_ccm_header()
1583 alen = cryp->areq->assoclen; in stm32_cryp_irq_write_ccm_header()
1586 if (cryp->areq->assoclen <= 65280) { in stm32_cryp_irq_write_ccm_header()
1591 src = stm32_cryp_next_in(cryp, src, 1); in stm32_cryp_irq_write_ccm_header()
1593 src = stm32_cryp_next_in(cryp, src, 1); in stm32_cryp_irq_write_ccm_header()
1595 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); in stm32_cryp_irq_write_ccm_header()
1598 cryp->total_in -= min_t(size_t, 2, cryp->total_in); in stm32_cryp_irq_write_ccm_header()
1606 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); in stm32_cryp_irq_write_ccm_header()
1612 src = stm32_cryp_next_in(cryp, src, 1); in stm32_cryp_irq_write_ccm_header()
1614 src = stm32_cryp_next_in(cryp, src, 1); in stm32_cryp_irq_write_ccm_header()
1616 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); in stm32_cryp_irq_write_ccm_header()
1619 cryp->total_in -= min_t(size_t, 2, cryp->total_in); in stm32_cryp_irq_write_ccm_header()
1629 src = stm32_cryp_next_in(cryp, src, 1); in stm32_cryp_irq_write_ccm_header()
1631 cryp->total_in -= min_t(size_t, 1, cryp->total_in); in stm32_cryp_irq_write_ccm_header()
1632 if ((cryp->total_in_save - cryp->total_in) == alen) in stm32_cryp_irq_write_ccm_header()
1636 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); in stm32_cryp_irq_write_ccm_header()
1638 if ((cryp->total_in_save - cryp->total_in) == alen) { in stm32_cryp_irq_write_ccm_header()
1641 stm32_cryp_write(cryp, CRYP_DIN, 0); in stm32_cryp_irq_write_ccm_header()
1644 err = stm32_cryp_wait_busy(cryp); in stm32_cryp_irq_write_ccm_header()
1646 dev_err(cryp->dev, "Timeout (ccm header)\n"); in stm32_cryp_irq_write_ccm_header()
1647 return stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_ccm_header()
1650 if (stm32_cryp_get_input_text_len(cryp)) { in stm32_cryp_irq_write_ccm_header()
1652 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_irq_write_ccm_header()
1654 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_header()
1659 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_header()
1662 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_cryp_irq_write_ccm_header()
1663 stm32_cryp_finish_req(cryp, 0); in stm32_cryp_irq_write_ccm_header()
1673 struct stm32_cryp *cryp = arg; in stm32_cryp_irq_thread() local
1676 if (cryp->irq_status & MISR_OUT) in stm32_cryp_irq_thread()
1678 if (unlikely(stm32_cryp_irq_read_data(cryp))) { in stm32_cryp_irq_thread()
1680 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_cryp_irq_thread()
1681 stm32_cryp_finish_req(cryp, 0); in stm32_cryp_irq_thread()
1685 if (cryp->irq_status & MISR_IN) { in stm32_cryp_irq_thread()
1686 if (is_gcm(cryp)) { in stm32_cryp_irq_thread()
1687 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK; in stm32_cryp_irq_thread()
1690 stm32_cryp_irq_write_gcm_header(cryp); in stm32_cryp_irq_thread()
1693 stm32_cryp_irq_write_data(cryp); in stm32_cryp_irq_thread()
1694 cryp->gcm_ctr++; in stm32_cryp_irq_thread()
1695 } else if (is_ccm(cryp)) { in stm32_cryp_irq_thread()
1696 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK; in stm32_cryp_irq_thread()
1699 stm32_cryp_irq_write_ccm_header(cryp); in stm32_cryp_irq_thread()
1702 stm32_cryp_irq_write_data(cryp); in stm32_cryp_irq_thread()
1705 stm32_cryp_irq_write_data(cryp); in stm32_cryp_irq_thread()
1714 struct stm32_cryp *cryp = arg; in stm32_cryp_irq() local
1716 cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR); in stm32_cryp_irq()
1724 .cra_driver_name = "stm32-ecb-aes",
1744 .cra_driver_name = "stm32-cbc-aes",
1765 .cra_driver_name = "stm32-ctr-aes",
1786 .cra_driver_name = "stm32-ecb-des",
1806 .cra_driver_name = "stm32-cbc-des",
1827 .cra_driver_name = "stm32-ecb-des3",
1847 .cra_driver_name = "stm32-cbc-des3",
1880 .cra_driver_name = "stm32-gcm-aes",
1900 .cra_driver_name = "stm32-ccm-aes",
1922 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1923 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1930 struct device *dev = &pdev->dev; in stm32_cryp_probe()
1931 struct stm32_cryp *cryp; in stm32_cryp_probe() local
1936 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL); in stm32_cryp_probe()
1937 if (!cryp) in stm32_cryp_probe()
1938 return -ENOMEM; in stm32_cryp_probe()
1940 cryp->caps = of_device_get_match_data(dev); in stm32_cryp_probe()
1941 if (!cryp->caps) in stm32_cryp_probe()
1942 return -ENODEV; in stm32_cryp_probe()
1944 cryp->dev = dev; in stm32_cryp_probe()
1946 mutex_init(&cryp->lock); in stm32_cryp_probe()
1949 cryp->regs = devm_ioremap_resource(dev, res); in stm32_cryp_probe()
1950 if (IS_ERR(cryp->regs)) in stm32_cryp_probe()
1951 return PTR_ERR(cryp->regs); in stm32_cryp_probe()
1961 dev_name(dev), cryp); in stm32_cryp_probe()
1967 cryp->clk = devm_clk_get(dev, NULL); in stm32_cryp_probe()
1968 if (IS_ERR(cryp->clk)) { in stm32_cryp_probe()
1970 return PTR_ERR(cryp->clk); in stm32_cryp_probe()
1973 ret = clk_prepare_enable(cryp->clk); in stm32_cryp_probe()
1975 dev_err(cryp->dev, "Failed to enable clock\n"); in stm32_cryp_probe()
1993 platform_set_drvdata(pdev, cryp); in stm32_cryp_probe()
1996 list_add(&cryp->list, &cryp_list.dev_list); in stm32_cryp_probe()
2000 cryp->engine = crypto_engine_alloc_init(dev, 1); in stm32_cryp_probe()
2001 if (!cryp->engine) { in stm32_cryp_probe()
2003 ret = -ENOMEM; in stm32_cryp_probe()
2007 ret = crypto_engine_start(cryp->engine); in stm32_cryp_probe()
2033 crypto_engine_exit(cryp->engine); in stm32_cryp_probe()
2036 list_del(&cryp->list); in stm32_cryp_probe()
2044 clk_disable_unprepare(cryp->clk); in stm32_cryp_probe()
2051 struct stm32_cryp *cryp = platform_get_drvdata(pdev); in stm32_cryp_remove() local
2054 if (!cryp) in stm32_cryp_remove()
2055 return -ENODEV; in stm32_cryp_remove()
2057 ret = pm_runtime_get_sync(cryp->dev); in stm32_cryp_remove()
2064 crypto_engine_exit(cryp->engine); in stm32_cryp_remove()
2067 list_del(&cryp->list); in stm32_cryp_remove()
2070 pm_runtime_disable(cryp->dev); in stm32_cryp_remove()
2071 pm_runtime_put_noidle(cryp->dev); in stm32_cryp_remove()
2073 clk_disable_unprepare(cryp->clk); in stm32_cryp_remove()
2081 struct stm32_cryp *cryp = dev_get_drvdata(dev); in stm32_cryp_runtime_suspend() local
2083 clk_disable_unprepare(cryp->clk); in stm32_cryp_runtime_suspend()
2090 struct stm32_cryp *cryp = dev_get_drvdata(dev); in stm32_cryp_runtime_resume() local
2093 ret = clk_prepare_enable(cryp->clk); in stm32_cryp_runtime_resume()
2095 dev_err(cryp->dev, "Failed to prepare_enable clock\n"); in stm32_cryp_runtime_resume()
2123 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");