Lines Matching +full:stm32mp1 +full:- +full:cryp
1 // SPDX-License-Identifier: GPL-2.0-only
24 #define DRIVER_NAME "stm32-cryp"
116 struct stm32_cryp *cryp; member
167 static inline bool is_aes(struct stm32_cryp *cryp) in is_aes() argument
169 return cryp->flags & FLG_AES; in is_aes()
172 static inline bool is_des(struct stm32_cryp *cryp) in is_des() argument
174 return cryp->flags & FLG_DES; in is_des()
177 static inline bool is_tdes(struct stm32_cryp *cryp) in is_tdes() argument
179 return cryp->flags & FLG_TDES; in is_tdes()
182 static inline bool is_ecb(struct stm32_cryp *cryp) in is_ecb() argument
184 return cryp->flags & FLG_ECB; in is_ecb()
187 static inline bool is_cbc(struct stm32_cryp *cryp) in is_cbc() argument
189 return cryp->flags & FLG_CBC; in is_cbc()
192 static inline bool is_ctr(struct stm32_cryp *cryp) in is_ctr() argument
194 return cryp->flags & FLG_CTR; in is_ctr()
197 static inline bool is_gcm(struct stm32_cryp *cryp) in is_gcm() argument
199 return cryp->flags & FLG_GCM; in is_gcm()
202 static inline bool is_ccm(struct stm32_cryp *cryp) in is_ccm() argument
204 return cryp->flags & FLG_CCM; in is_ccm()
207 static inline bool is_encrypt(struct stm32_cryp *cryp) in is_encrypt() argument
209 return cryp->flags & FLG_ENCRYPT; in is_encrypt()
212 static inline bool is_decrypt(struct stm32_cryp *cryp) in is_decrypt() argument
214 return !is_encrypt(cryp); in is_decrypt()
217 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst) in stm32_cryp_read() argument
219 return readl_relaxed(cryp->regs + ofst); in stm32_cryp_read()
222 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val) in stm32_cryp_write() argument
224 writel_relaxed(val, cryp->regs + ofst); in stm32_cryp_write()
227 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp) in stm32_cryp_wait_busy() argument
231 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status, in stm32_cryp_wait_busy()
235 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp) in stm32_cryp_wait_enable() argument
239 return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status, in stm32_cryp_wait_enable()
243 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp) in stm32_cryp_wait_output() argument
247 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status, in stm32_cryp_wait_output()
251 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
252 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
256 struct stm32_cryp *tmp, *cryp = NULL; in stm32_cryp_find_dev() local
259 if (!ctx->cryp) { in stm32_cryp_find_dev()
261 cryp = tmp; in stm32_cryp_find_dev()
264 ctx->cryp = cryp; in stm32_cryp_find_dev()
266 cryp = ctx->cryp; in stm32_cryp_find_dev()
271 return cryp; in stm32_cryp_find_dev()
274 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv) in stm32_cryp_hw_write_iv() argument
279 stm32_cryp_write(cryp, CRYP_IV0LR, be32_to_cpu(*iv++)); in stm32_cryp_hw_write_iv()
280 stm32_cryp_write(cryp, CRYP_IV0RR, be32_to_cpu(*iv++)); in stm32_cryp_hw_write_iv()
282 if (is_aes(cryp)) { in stm32_cryp_hw_write_iv()
283 stm32_cryp_write(cryp, CRYP_IV1LR, be32_to_cpu(*iv++)); in stm32_cryp_hw_write_iv()
284 stm32_cryp_write(cryp, CRYP_IV1RR, be32_to_cpu(*iv++)); in stm32_cryp_hw_write_iv()
288 static void stm32_cryp_get_iv(struct stm32_cryp *cryp) in stm32_cryp_get_iv() argument
290 struct skcipher_request *req = cryp->req; in stm32_cryp_get_iv()
291 __be32 *tmp = (void *)req->iv; in stm32_cryp_get_iv()
296 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR)); in stm32_cryp_get_iv()
297 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR)); in stm32_cryp_get_iv()
299 if (is_aes(cryp)) { in stm32_cryp_get_iv()
300 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR)); in stm32_cryp_get_iv()
301 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR)); in stm32_cryp_get_iv()
311 stm32_cryp_write(c, CRYP_K1LR, be32_to_cpu(c->ctx->key[0])); in stm32_cryp_hw_write_key()
312 stm32_cryp_write(c, CRYP_K1RR, be32_to_cpu(c->ctx->key[1])); in stm32_cryp_hw_write_key()
315 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4) in stm32_cryp_hw_write_key()
317 be32_to_cpu(c->ctx->key[i - 1])); in stm32_cryp_hw_write_key()
321 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp) in stm32_cryp_get_hw_mode() argument
323 if (is_aes(cryp) && is_ecb(cryp)) in stm32_cryp_get_hw_mode()
326 if (is_aes(cryp) && is_cbc(cryp)) in stm32_cryp_get_hw_mode()
329 if (is_aes(cryp) && is_ctr(cryp)) in stm32_cryp_get_hw_mode()
332 if (is_aes(cryp) && is_gcm(cryp)) in stm32_cryp_get_hw_mode()
335 if (is_aes(cryp) && is_ccm(cryp)) in stm32_cryp_get_hw_mode()
338 if (is_des(cryp) && is_ecb(cryp)) in stm32_cryp_get_hw_mode()
341 if (is_des(cryp) && is_cbc(cryp)) in stm32_cryp_get_hw_mode()
344 if (is_tdes(cryp) && is_ecb(cryp)) in stm32_cryp_get_hw_mode()
347 if (is_tdes(cryp) && is_cbc(cryp)) in stm32_cryp_get_hw_mode()
350 dev_err(cryp->dev, "Unknown mode\n"); in stm32_cryp_get_hw_mode()
354 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp) in stm32_cryp_get_input_text_len() argument
356 return is_encrypt(cryp) ? cryp->areq->cryptlen : in stm32_cryp_get_input_text_len()
357 cryp->areq->cryptlen - cryp->authsize; in stm32_cryp_get_input_text_len()
360 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg) in stm32_cryp_gcm_init() argument
366 memcpy(iv, cryp->areq->iv, 12); in stm32_cryp_gcm_init()
368 cryp->gcm_ctr = GCM_CTR_INIT; in stm32_cryp_gcm_init()
369 stm32_cryp_hw_write_iv(cryp, iv); in stm32_cryp_gcm_init()
371 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN); in stm32_cryp_gcm_init()
374 ret = stm32_cryp_wait_enable(cryp); in stm32_cryp_gcm_init()
376 dev_err(cryp->dev, "Timeout (gcm init)\n"); in stm32_cryp_gcm_init()
381 if (cryp->areq->assoclen) { in stm32_cryp_gcm_init()
383 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_gcm_init()
384 } else if (stm32_cryp_get_input_text_len(cryp)) { in stm32_cryp_gcm_init()
386 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_gcm_init()
392 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp) in stm32_crypt_gcmccm_end_header() argument
398 if (!cryp->header_in) { in stm32_crypt_gcmccm_end_header()
400 err = stm32_cryp_wait_busy(cryp); in stm32_crypt_gcmccm_end_header()
402 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n"); in stm32_crypt_gcmccm_end_header()
403 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_crypt_gcmccm_end_header()
404 stm32_cryp_finish_req(cryp, err); in stm32_crypt_gcmccm_end_header()
408 if (stm32_cryp_get_input_text_len(cryp)) { in stm32_crypt_gcmccm_end_header()
410 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_crypt_gcmccm_end_header()
412 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_crypt_gcmccm_end_header()
416 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_crypt_gcmccm_end_header()
427 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp) in stm32_cryp_write_ccm_first_header() argument
432 u32 alen = cryp->areq->assoclen; in stm32_cryp_write_ccm_first_header()
452 written = min_t(size_t, AES_BLOCK_SIZE - len, alen); in stm32_cryp_write_ccm_first_header()
454 scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0); in stm32_cryp_write_ccm_first_header()
456 stm32_cryp_write(cryp, CRYP_DIN, block[i]); in stm32_cryp_write_ccm_first_header()
458 cryp->header_in -= written; in stm32_cryp_write_ccm_first_header()
460 stm32_crypt_gcmccm_end_header(cryp); in stm32_cryp_write_ccm_first_header()
463 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg) in stm32_cryp_ccm_init() argument
473 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); in stm32_cryp_ccm_init()
474 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); in stm32_cryp_ccm_init()
475 iv[AES_BLOCK_SIZE - 1] = 1; in stm32_cryp_ccm_init()
476 stm32_cryp_hw_write_iv(cryp, (__be32 *)iv); in stm32_cryp_ccm_init()
481 b0[0] |= (8 * ((cryp->authsize - 2) / 2)); in stm32_cryp_ccm_init()
483 if (cryp->areq->assoclen) in stm32_cryp_ccm_init()
486 textlen = stm32_cryp_get_input_text_len(cryp); in stm32_cryp_ccm_init()
488 b0[AES_BLOCK_SIZE - 2] = textlen >> 8; in stm32_cryp_ccm_init()
489 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF; in stm32_cryp_ccm_init()
492 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN); in stm32_cryp_ccm_init()
501 if (!cryp->caps->padding_wa) in stm32_cryp_ccm_init()
503 stm32_cryp_write(cryp, CRYP_DIN, xd); in stm32_cryp_ccm_init()
507 ret = stm32_cryp_wait_enable(cryp); in stm32_cryp_ccm_init()
509 dev_err(cryp->dev, "Timeout (ccm init)\n"); in stm32_cryp_ccm_init()
514 if (cryp->areq->assoclen) { in stm32_cryp_ccm_init()
516 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_ccm_init()
519 stm32_cryp_write_ccm_first_header(cryp); in stm32_cryp_ccm_init()
520 } else if (stm32_cryp_get_input_text_len(cryp)) { in stm32_cryp_ccm_init()
522 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_ccm_init()
528 static int stm32_cryp_hw_init(struct stm32_cryp *cryp) in stm32_cryp_hw_init() argument
533 pm_runtime_get_sync(cryp->dev); in stm32_cryp_hw_init()
536 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_cryp_hw_init()
539 stm32_cryp_hw_write_key(cryp); in stm32_cryp_hw_init()
544 switch (cryp->ctx->keylen) { in stm32_cryp_hw_init()
559 hw_mode = stm32_cryp_get_hw_mode(cryp); in stm32_cryp_hw_init()
561 return -EINVAL; in stm32_cryp_hw_init()
564 if (is_decrypt(cryp) && in stm32_cryp_hw_init()
566 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN); in stm32_cryp_hw_init()
569 ret = stm32_cryp_wait_busy(cryp); in stm32_cryp_hw_init()
571 dev_err(cryp->dev, "Timeout (key preparation)\n"); in stm32_cryp_hw_init()
578 if (is_decrypt(cryp)) in stm32_cryp_hw_init()
582 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_hw_init()
589 ret = stm32_cryp_ccm_init(cryp, cfg); in stm32_cryp_hw_init()
591 ret = stm32_cryp_gcm_init(cryp, cfg); in stm32_cryp_hw_init()
602 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv); in stm32_cryp_hw_init()
612 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_hw_init()
617 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err) in stm32_cryp_finish_req() argument
619 if (!err && (is_gcm(cryp) || is_ccm(cryp))) in stm32_cryp_finish_req()
621 err = stm32_cryp_read_auth_tag(cryp); in stm32_cryp_finish_req()
623 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp)))) in stm32_cryp_finish_req()
624 stm32_cryp_get_iv(cryp); in stm32_cryp_finish_req()
626 pm_runtime_mark_last_busy(cryp->dev); in stm32_cryp_finish_req()
627 pm_runtime_put_autosuspend(cryp->dev); in stm32_cryp_finish_req()
629 if (is_gcm(cryp) || is_ccm(cryp)) in stm32_cryp_finish_req()
630 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); in stm32_cryp_finish_req()
632 crypto_finalize_skcipher_request(cryp->engine, cryp->req, in stm32_cryp_finish_req()
636 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp) in stm32_cryp_cpu_start() argument
639 stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT); in stm32_cryp_cpu_start()
654 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req; in stm32_cryp_init_tfm()
655 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req; in stm32_cryp_init_tfm()
656 ctx->enginectx.op.unprepare_request = NULL; in stm32_cryp_init_tfm()
668 tfm->reqsize = sizeof(struct stm32_cryp_reqctx); in stm32_cryp_aes_aead_init()
670 ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req; in stm32_cryp_aes_aead_init()
671 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req; in stm32_cryp_aes_aead_init()
672 ctx->enginectx.op.unprepare_request = NULL; in stm32_cryp_aes_aead_init()
682 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); in stm32_cryp_crypt() local
684 if (!cryp) in stm32_cryp_crypt()
685 return -ENODEV; in stm32_cryp_crypt()
687 rctx->mode = mode; in stm32_cryp_crypt()
689 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req); in stm32_cryp_crypt()
696 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); in stm32_cryp_aead_crypt() local
698 if (!cryp) in stm32_cryp_aead_crypt()
699 return -ENODEV; in stm32_cryp_aead_crypt()
701 rctx->mode = mode; in stm32_cryp_aead_crypt()
703 return crypto_transfer_aead_request_to_engine(cryp->engine, req); in stm32_cryp_aead_crypt()
711 memcpy(ctx->key, key, keylen); in stm32_cryp_setkey()
712 ctx->keylen = keylen; in stm32_cryp_setkey()
722 return -EINVAL; in stm32_cryp_aes_setkey()
748 return -EINVAL; in stm32_cryp_aes_aead_setkey()
750 memcpy(ctx->key, key, keylen); in stm32_cryp_aes_aead_setkey()
751 ctx->keylen = keylen; in stm32_cryp_aes_aead_setkey()
769 return -EINVAL; in stm32_cryp_aes_gcm_setauthsize()
788 return -EINVAL; in stm32_cryp_aes_ccm_setauthsize()
796 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_ecb_encrypt()
797 return -EINVAL; in stm32_cryp_aes_ecb_encrypt()
799 if (req->cryptlen == 0) in stm32_cryp_aes_ecb_encrypt()
807 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_ecb_decrypt()
808 return -EINVAL; in stm32_cryp_aes_ecb_decrypt()
810 if (req->cryptlen == 0) in stm32_cryp_aes_ecb_decrypt()
818 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_cbc_encrypt()
819 return -EINVAL; in stm32_cryp_aes_cbc_encrypt()
821 if (req->cryptlen == 0) in stm32_cryp_aes_cbc_encrypt()
829 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_cbc_decrypt()
830 return -EINVAL; in stm32_cryp_aes_cbc_decrypt()
832 if (req->cryptlen == 0) in stm32_cryp_aes_cbc_decrypt()
840 if (req->cryptlen == 0) in stm32_cryp_aes_ctr_encrypt()
848 if (req->cryptlen == 0) in stm32_cryp_aes_ctr_decrypt()
868 return -EINVAL; in crypto_ccm_check_iv()
877 err = crypto_ccm_check_iv(req->iv); in stm32_cryp_aes_ccm_encrypt()
888 err = crypto_ccm_check_iv(req->iv); in stm32_cryp_aes_ccm_decrypt()
897 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_des_ecb_encrypt()
898 return -EINVAL; in stm32_cryp_des_ecb_encrypt()
900 if (req->cryptlen == 0) in stm32_cryp_des_ecb_encrypt()
908 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_des_ecb_decrypt()
909 return -EINVAL; in stm32_cryp_des_ecb_decrypt()
911 if (req->cryptlen == 0) in stm32_cryp_des_ecb_decrypt()
919 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_des_cbc_encrypt()
920 return -EINVAL; in stm32_cryp_des_cbc_encrypt()
922 if (req->cryptlen == 0) in stm32_cryp_des_cbc_encrypt()
930 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_des_cbc_decrypt()
931 return -EINVAL; in stm32_cryp_des_cbc_decrypt()
933 if (req->cryptlen == 0) in stm32_cryp_des_cbc_decrypt()
941 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_tdes_ecb_encrypt()
942 return -EINVAL; in stm32_cryp_tdes_ecb_encrypt()
944 if (req->cryptlen == 0) in stm32_cryp_tdes_ecb_encrypt()
952 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_tdes_ecb_decrypt()
953 return -EINVAL; in stm32_cryp_tdes_ecb_decrypt()
955 if (req->cryptlen == 0) in stm32_cryp_tdes_ecb_decrypt()
963 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_tdes_cbc_encrypt()
964 return -EINVAL; in stm32_cryp_tdes_cbc_encrypt()
966 if (req->cryptlen == 0) in stm32_cryp_tdes_cbc_encrypt()
974 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_tdes_cbc_decrypt()
975 return -EINVAL; in stm32_cryp_tdes_cbc_decrypt()
977 if (req->cryptlen == 0) in stm32_cryp_tdes_cbc_decrypt()
987 struct stm32_cryp *cryp; in stm32_cryp_prepare_req() local
993 return -EINVAL; in stm32_cryp_prepare_req()
998 cryp = ctx->cryp; in stm32_cryp_prepare_req()
1000 if (!cryp) in stm32_cryp_prepare_req()
1001 return -ENODEV; in stm32_cryp_prepare_req()
1004 rctx->mode &= FLG_MODE_MASK; in stm32_cryp_prepare_req()
1006 ctx->cryp = cryp; in stm32_cryp_prepare_req()
1008 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode; in stm32_cryp_prepare_req()
1009 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE; in stm32_cryp_prepare_req()
1010 cryp->ctx = ctx; in stm32_cryp_prepare_req()
1013 cryp->req = req; in stm32_cryp_prepare_req()
1014 cryp->areq = NULL; in stm32_cryp_prepare_req()
1015 cryp->header_in = 0; in stm32_cryp_prepare_req()
1016 cryp->payload_in = req->cryptlen; in stm32_cryp_prepare_req()
1017 cryp->payload_out = req->cryptlen; in stm32_cryp_prepare_req()
1018 cryp->authsize = 0; in stm32_cryp_prepare_req()
1024 * <- assoclen -> <- cryptlen -> in stm32_cryp_prepare_req()
1027 * <- assoclen -> <-- cryptlen --> <- authsize -> in stm32_cryp_prepare_req()
1031 * <- assoclen ---> <---------- cryptlen ----------> in stm32_cryp_prepare_req()
1034 * <- assoclen -> <- cryptlen - authsize -> in stm32_cryp_prepare_req()
1036 cryp->areq = areq; in stm32_cryp_prepare_req()
1037 cryp->req = NULL; in stm32_cryp_prepare_req()
1038 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); in stm32_cryp_prepare_req()
1039 if (is_encrypt(cryp)) { in stm32_cryp_prepare_req()
1040 cryp->payload_in = areq->cryptlen; in stm32_cryp_prepare_req()
1041 cryp->header_in = areq->assoclen; in stm32_cryp_prepare_req()
1042 cryp->payload_out = areq->cryptlen; in stm32_cryp_prepare_req()
1044 cryp->payload_in = areq->cryptlen - cryp->authsize; in stm32_cryp_prepare_req()
1045 cryp->header_in = areq->assoclen; in stm32_cryp_prepare_req()
1046 cryp->payload_out = cryp->payload_in; in stm32_cryp_prepare_req()
1050 in_sg = req ? req->src : areq->src; in stm32_cryp_prepare_req()
1051 scatterwalk_start(&cryp->in_walk, in_sg); in stm32_cryp_prepare_req()
1053 cryp->out_sg = req ? req->dst : areq->dst; in stm32_cryp_prepare_req()
1054 scatterwalk_start(&cryp->out_walk, cryp->out_sg); in stm32_cryp_prepare_req()
1056 if (is_gcm(cryp) || is_ccm(cryp)) { in stm32_cryp_prepare_req()
1058 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2); in stm32_cryp_prepare_req()
1061 if (is_ctr(cryp)) in stm32_cryp_prepare_req()
1062 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr)); in stm32_cryp_prepare_req()
1064 ret = stm32_cryp_hw_init(cryp); in stm32_cryp_prepare_req()
1085 struct stm32_cryp *cryp = ctx->cryp; in stm32_cryp_cipher_one_req() local
1087 if (!cryp) in stm32_cryp_cipher_one_req()
1088 return -ENODEV; in stm32_cryp_cipher_one_req()
1090 return stm32_cryp_cpu_start(cryp); in stm32_cryp_cipher_one_req()
1106 struct stm32_cryp *cryp = ctx->cryp; in stm32_cryp_aead_one_req() local
1108 if (!cryp) in stm32_cryp_aead_one_req()
1109 return -ENODEV; in stm32_cryp_aead_one_req()
1111 if (unlikely(!cryp->payload_in && !cryp->header_in)) { in stm32_cryp_aead_one_req()
1113 stm32_cryp_finish_req(cryp, 0); in stm32_cryp_aead_one_req()
1117 return stm32_cryp_cpu_start(cryp); in stm32_cryp_aead_one_req()
1120 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp) in stm32_cryp_read_auth_tag() argument
1127 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_read_auth_tag()
1134 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_read_auth_tag()
1136 if (is_gcm(cryp)) { in stm32_cryp_read_auth_tag()
1138 size_bit = cryp->areq->assoclen * 8; in stm32_cryp_read_auth_tag()
1139 if (cryp->caps->swap_final) in stm32_cryp_read_auth_tag()
1142 stm32_cryp_write(cryp, CRYP_DIN, 0); in stm32_cryp_read_auth_tag()
1143 stm32_cryp_write(cryp, CRYP_DIN, size_bit); in stm32_cryp_read_auth_tag()
1145 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : in stm32_cryp_read_auth_tag()
1146 cryp->areq->cryptlen - cryp->authsize; in stm32_cryp_read_auth_tag()
1148 if (cryp->caps->swap_final) in stm32_cryp_read_auth_tag()
1151 stm32_cryp_write(cryp, CRYP_DIN, 0); in stm32_cryp_read_auth_tag()
1152 stm32_cryp_write(cryp, CRYP_DIN, size_bit); in stm32_cryp_read_auth_tag()
1159 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); in stm32_cryp_read_auth_tag()
1160 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); in stm32_cryp_read_auth_tag()
1165 if (!cryp->caps->padding_wa) in stm32_cryp_read_auth_tag()
1167 stm32_cryp_write(cryp, CRYP_DIN, xiv); in stm32_cryp_read_auth_tag()
1172 ret = stm32_cryp_wait_output(cryp); in stm32_cryp_read_auth_tag()
1174 dev_err(cryp->dev, "Timeout (read tag)\n"); in stm32_cryp_read_auth_tag()
1178 if (is_encrypt(cryp)) { in stm32_cryp_read_auth_tag()
1183 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_read_auth_tag()
1185 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1); in stm32_cryp_read_auth_tag()
1190 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0); in stm32_cryp_read_auth_tag()
1193 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_read_auth_tag()
1195 if (crypto_memneq(in_tag, out_tag, cryp->authsize)) in stm32_cryp_read_auth_tag()
1196 ret = -EBADMSG; in stm32_cryp_read_auth_tag()
1199 /* Disable cryp */ in stm32_cryp_read_auth_tag()
1201 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_read_auth_tag()
1206 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp) in stm32_cryp_check_ctr_counter() argument
1210 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) { in stm32_cryp_check_ctr_counter()
1215 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr)); in stm32_cryp_check_ctr_counter()
1217 cr = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_check_ctr_counter()
1218 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN); in stm32_cryp_check_ctr_counter()
1220 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr); in stm32_cryp_check_ctr_counter()
1222 stm32_cryp_write(cryp, CRYP_CR, cr); in stm32_cryp_check_ctr_counter()
1226 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR)); in stm32_cryp_check_ctr_counter()
1227 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR)); in stm32_cryp_check_ctr_counter()
1228 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR)); in stm32_cryp_check_ctr_counter()
1229 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR)); in stm32_cryp_check_ctr_counter()
1232 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp) in stm32_cryp_irq_read_data() argument
1237 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) in stm32_cryp_irq_read_data()
1238 block[i] = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_irq_read_data()
1240 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_read_data()
1241 cryp->payload_out), 1); in stm32_cryp_irq_read_data()
1242 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_read_data()
1243 cryp->payload_out); in stm32_cryp_irq_read_data()
1246 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp) in stm32_cryp_irq_write_block() argument
1251 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_write_block()
1252 cryp->payload_in), 0); in stm32_cryp_irq_write_block()
1253 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) in stm32_cryp_irq_write_block()
1254 stm32_cryp_write(cryp, CRYP_DIN, block[i]); in stm32_cryp_irq_write_block()
1256 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in); in stm32_cryp_irq_write_block()
1259 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp) in stm32_cryp_irq_write_gcm_padded_data() argument
1268 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_cryp_irq_write_gcm_padded_data()
1269 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_irq_write_gcm_padded_data()
1271 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1274 stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2); in stm32_cryp_irq_write_gcm_padded_data()
1279 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1283 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1286 stm32_cryp_irq_write_block(cryp); in stm32_cryp_irq_write_gcm_padded_data()
1288 err = stm32_cryp_wait_output(cryp); in stm32_cryp_irq_write_gcm_padded_data()
1290 dev_err(cryp->dev, "Timeout (write gcm last data)\n"); in stm32_cryp_irq_write_gcm_padded_data()
1291 return stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_gcm_padded_data()
1299 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) in stm32_cryp_irq_write_gcm_padded_data()
1300 block[i] = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_irq_write_gcm_padded_data()
1302 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_write_gcm_padded_data()
1303 cryp->payload_out), 1); in stm32_cryp_irq_write_gcm_padded_data()
1304 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_write_gcm_padded_data()
1305 cryp->payload_out); in stm32_cryp_irq_write_gcm_padded_data()
1310 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1315 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1319 stm32_cryp_write(cryp, CRYP_DIN, block[i]); in stm32_cryp_irq_write_gcm_padded_data()
1322 err = stm32_cryp_wait_output(cryp); in stm32_cryp_irq_write_gcm_padded_data()
1324 dev_err(cryp->dev, "Timeout (write gcm padded data)\n"); in stm32_cryp_irq_write_gcm_padded_data()
1325 return stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_gcm_padded_data()
1329 stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_irq_write_gcm_padded_data()
1332 stm32_cryp_finish_req(cryp, 0); in stm32_cryp_irq_write_gcm_padded_data()
1335 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp) in stm32_cryp_irq_set_npblb() argument
1340 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_irq_set_npblb()
1342 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_set_npblb()
1344 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT; in stm32_cryp_irq_set_npblb()
1346 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_set_npblb()
1349 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp) in stm32_cryp_irq_write_ccm_padded_data() argument
1360 stm32_cryp_write(cryp, CRYP_IMSCR, 0); in stm32_cryp_irq_write_ccm_padded_data()
1362 cfg = stm32_cryp_read(cryp, CRYP_CR); in stm32_cryp_irq_write_ccm_padded_data()
1364 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1367 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4); in stm32_cryp_irq_write_ccm_padded_data()
1371 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); in stm32_cryp_irq_write_ccm_padded_data()
1374 stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp); in stm32_cryp_irq_write_ccm_padded_data()
1379 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1383 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1386 stm32_cryp_irq_write_block(cryp); in stm32_cryp_irq_write_ccm_padded_data()
1388 err = stm32_cryp_wait_output(cryp); in stm32_cryp_irq_write_ccm_padded_data()
1390 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n"); in stm32_cryp_irq_write_ccm_padded_data()
1391 return stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_ccm_padded_data()
1399 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) in stm32_cryp_irq_write_ccm_padded_data()
1400 block[i] = stm32_cryp_read(cryp, CRYP_DOUT); in stm32_cryp_irq_write_ccm_padded_data()
1402 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_write_ccm_padded_data()
1403 cryp->payload_out), 1); in stm32_cryp_irq_write_ccm_padded_data()
1404 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out); in stm32_cryp_irq_write_ccm_padded_data()
1408 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); in stm32_cryp_irq_write_ccm_padded_data()
1413 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1418 stm32_cryp_write(cryp, CRYP_CR, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1424 stm32_cryp_write(cryp, CRYP_DIN, block[i]); in stm32_cryp_irq_write_ccm_padded_data()
1428 err = stm32_cryp_wait_busy(cryp); in stm32_cryp_irq_write_ccm_padded_data()
1430 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n"); in stm32_cryp_irq_write_ccm_padded_data()
1433 stm32_cryp_finish_req(cryp, err); in stm32_cryp_irq_write_ccm_padded_data()
1436 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp) in stm32_cryp_irq_write_data() argument
1438 if (unlikely(!cryp->payload_in)) { in stm32_cryp_irq_write_data()
1439 dev_warn(cryp->dev, "No more data to process\n"); in stm32_cryp_irq_write_data()
1443 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE && in stm32_cryp_irq_write_data()
1444 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) && in stm32_cryp_irq_write_data()
1445 is_encrypt(cryp))) { in stm32_cryp_irq_write_data()
1447 if (cryp->caps->padding_wa) { in stm32_cryp_irq_write_data()
1449 stm32_cryp_irq_write_gcm_padded_data(cryp); in stm32_cryp_irq_write_data()
1454 stm32_cryp_irq_set_npblb(cryp); in stm32_cryp_irq_write_data()
1457 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) && in stm32_cryp_irq_write_data()
1458 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) && in stm32_cryp_irq_write_data()
1459 is_decrypt(cryp))) { in stm32_cryp_irq_write_data()
1461 if (cryp->caps->padding_wa) { in stm32_cryp_irq_write_data()
1463 stm32_cryp_irq_write_ccm_padded_data(cryp); in stm32_cryp_irq_write_data()
1468 stm32_cryp_irq_set_npblb(cryp); in stm32_cryp_irq_write_data()
1471 if (is_aes(cryp) && is_ctr(cryp)) in stm32_cryp_irq_write_data()
1472 stm32_cryp_check_ctr_counter(cryp); in stm32_cryp_irq_write_data()
1474 stm32_cryp_irq_write_block(cryp); in stm32_cryp_irq_write_data()
1477 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp) in stm32_cryp_irq_write_gcmccm_header() argument
1483 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in); in stm32_cryp_irq_write_gcmccm_header()
1485 scatterwalk_copychunks(block, &cryp->in_walk, written, 0); in stm32_cryp_irq_write_gcmccm_header()
1487 stm32_cryp_write(cryp, CRYP_DIN, block[i]); in stm32_cryp_irq_write_gcmccm_header()
1489 cryp->header_in -= written; in stm32_cryp_irq_write_gcmccm_header()
1491 stm32_crypt_gcmccm_end_header(cryp); in stm32_cryp_irq_write_gcmccm_header()
1496 struct stm32_cryp *cryp = arg; in stm32_cryp_irq_thread() local
1498 u32 it_mask = stm32_cryp_read(cryp, CRYP_IMSCR); in stm32_cryp_irq_thread()
1500 if (cryp->irq_status & MISR_OUT) in stm32_cryp_irq_thread()
1502 stm32_cryp_irq_read_data(cryp); in stm32_cryp_irq_thread()
1504 if (cryp->irq_status & MISR_IN) { in stm32_cryp_irq_thread()
1505 if (is_gcm(cryp) || is_ccm(cryp)) { in stm32_cryp_irq_thread()
1506 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK; in stm32_cryp_irq_thread()
1509 stm32_cryp_irq_write_gcmccm_header(cryp); in stm32_cryp_irq_thread()
1512 stm32_cryp_irq_write_data(cryp); in stm32_cryp_irq_thread()
1513 if (is_gcm(cryp)) in stm32_cryp_irq_thread()
1514 cryp->gcm_ctr++; in stm32_cryp_irq_thread()
1517 stm32_cryp_irq_write_data(cryp); in stm32_cryp_irq_thread()
1522 if (!cryp->payload_in && !cryp->header_in) in stm32_cryp_irq_thread()
1524 if (!cryp->payload_out) in stm32_cryp_irq_thread()
1526 stm32_cryp_write(cryp, CRYP_IMSCR, it_mask); in stm32_cryp_irq_thread()
1528 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) in stm32_cryp_irq_thread()
1529 stm32_cryp_finish_req(cryp, 0); in stm32_cryp_irq_thread()
1536 struct stm32_cryp *cryp = arg; in stm32_cryp_irq() local
1538 cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR); in stm32_cryp_irq()
1546 .base.cra_driver_name = "stm32-ecb-aes",
1563 .base.cra_driver_name = "stm32-cbc-aes",
1581 .base.cra_driver_name = "stm32-ctr-aes",
1599 .base.cra_driver_name = "stm32-ecb-des",
1616 .base.cra_driver_name = "stm32-cbc-des",
1634 .base.cra_driver_name = "stm32-ecb-des3",
1651 .base.cra_driver_name = "stm32-cbc-des3",
1681 .cra_driver_name = "stm32-gcm-aes",
1701 .cra_driver_name = "stm32-ccm-aes",
1723 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1724 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1731 struct device *dev = &pdev->dev; in stm32_cryp_probe()
1732 struct stm32_cryp *cryp; in stm32_cryp_probe() local
1736 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL); in stm32_cryp_probe()
1737 if (!cryp) in stm32_cryp_probe()
1738 return -ENOMEM; in stm32_cryp_probe()
1740 cryp->caps = of_device_get_match_data(dev); in stm32_cryp_probe()
1741 if (!cryp->caps) in stm32_cryp_probe()
1742 return -ENODEV; in stm32_cryp_probe()
1744 cryp->dev = dev; in stm32_cryp_probe()
1746 cryp->regs = devm_platform_ioremap_resource(pdev, 0); in stm32_cryp_probe()
1747 if (IS_ERR(cryp->regs)) in stm32_cryp_probe()
1748 return PTR_ERR(cryp->regs); in stm32_cryp_probe()
1756 dev_name(dev), cryp); in stm32_cryp_probe()
1762 cryp->clk = devm_clk_get(dev, NULL); in stm32_cryp_probe()
1763 if (IS_ERR(cryp->clk)) { in stm32_cryp_probe()
1765 return PTR_ERR(cryp->clk); in stm32_cryp_probe()
1768 ret = clk_prepare_enable(cryp->clk); in stm32_cryp_probe()
1770 dev_err(cryp->dev, "Failed to enable clock\n"); in stm32_cryp_probe()
1788 platform_set_drvdata(pdev, cryp); in stm32_cryp_probe()
1791 list_add(&cryp->list, &cryp_list.dev_list); in stm32_cryp_probe()
1795 cryp->engine = crypto_engine_alloc_init(dev, 1); in stm32_cryp_probe()
1796 if (!cryp->engine) { in stm32_cryp_probe()
1798 ret = -ENOMEM; in stm32_cryp_probe()
1802 ret = crypto_engine_start(cryp->engine); in stm32_cryp_probe()
1828 crypto_engine_exit(cryp->engine); in stm32_cryp_probe()
1831 list_del(&cryp->list); in stm32_cryp_probe()
1837 clk_disable_unprepare(cryp->clk); in stm32_cryp_probe()
1844 struct stm32_cryp *cryp = platform_get_drvdata(pdev); in stm32_cryp_remove() local
1847 if (!cryp) in stm32_cryp_remove()
1848 return -ENODEV; in stm32_cryp_remove()
1850 ret = pm_runtime_resume_and_get(cryp->dev); in stm32_cryp_remove()
1857 crypto_engine_exit(cryp->engine); in stm32_cryp_remove()
1860 list_del(&cryp->list); in stm32_cryp_remove()
1863 pm_runtime_disable(cryp->dev); in stm32_cryp_remove()
1864 pm_runtime_put_noidle(cryp->dev); in stm32_cryp_remove()
1866 clk_disable_unprepare(cryp->clk); in stm32_cryp_remove()
1874 struct stm32_cryp *cryp = dev_get_drvdata(dev); in stm32_cryp_runtime_suspend() local
1876 clk_disable_unprepare(cryp->clk); in stm32_cryp_runtime_suspend()
1883 struct stm32_cryp *cryp = dev_get_drvdata(dev); in stm32_cryp_runtime_resume() local
1886 ret = clk_prepare_enable(cryp->clk); in stm32_cryp_runtime_resume()
1888 dev_err(cryp->dev, "Failed to prepare_enable clock\n"); in stm32_cryp_runtime_resume()
1916 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");