Lines Matching +full:j721e +full:- +full:sa2ul
1 // SPDX-License-Identifier: GPL-2.0
3 * K3 SA2UL crypto accelerator driver
5 * Copyright (C) 2018-2020 Texas Instruments Incorporated - http://www.ti.com
28 #include "sa2ul.h"
32 /* Byte offset for Aux-1 in encryption security context */
59 /* Make 32-bit word from 4 bytes */
75 * struct sa_cmdl_cfg - Command label configuration descriptor
95 * struct algo_data - Crypto algorithm specific data
347 * The SA2UL Expects the security context to
359 in[i + j] = data[15 - j]; in sa_swiz_128()
416 SHASH_DESC_ON_STACK(shash, data->ctx->shash); in sa_prepare_iopads()
417 int block_size = crypto_shash_blocksize(data->ctx->shash); in sa_prepare_iopads()
418 int digest_size = crypto_shash_digestsize(data->ctx->shash); in sa_prepare_iopads()
422 shash->tfm = data->ctx->shash; in sa_prepare_iopads()
439 /* Derive the inverse key used in AES-CBC decryption operation */
447 return -EINVAL; in sa_aes_inv_key()
464 key_pos = key_sz + 24 - 4; in sa_aes_inv_key()
469 return -EINVAL; in sa_aes_inv_key()
486 mci = ad->mci_enc; in sa_set_sc_enc()
488 mci = ad->mci_dec; in sa_set_sc_enc()
493 /* For AES-CBC decryption get the inverse key */ in sa_set_sc_enc()
494 if (ad->inv_key && !enc) { in sa_set_sc_enc()
496 return -EINVAL; in sa_set_sc_enc()
515 sc_buf[1] |= ad->auth_ctrl; in sa_set_sc_auth()
518 if (ad->keyed_mac) { in sa_set_sc_auth()
519 ad->prep_iopad(ad, key, key_sz, ipad, opad); in sa_set_sc_auth()
522 memcpy(&sc_buf[32], ipad, ad->hash_size); in sa_set_sc_auth()
523 /* Copy opad to Aux-1 */ in sa_set_sc_auth()
524 memcpy(&sc_buf[64], opad, ad->hash_size); in sa_set_sc_auth()
558 if (cfg->enc_eng_id && cfg->auth_eng_id) { in sa_format_cmdl_gen()
559 if (cfg->enc) { in sa_format_cmdl_gen()
561 enc_next_eng = cfg->auth_eng_id; in sa_format_cmdl_gen()
563 if (cfg->iv_size) in sa_format_cmdl_gen()
564 auth_offset += cfg->iv_size; in sa_format_cmdl_gen()
567 auth_next_eng = cfg->enc_eng_id; in sa_format_cmdl_gen()
571 if (cfg->enc_eng_id) { in sa_format_cmdl_gen()
572 upd_info->flags |= SA_CMDL_UPD_ENC; in sa_format_cmdl_gen()
573 upd_info->enc_size.index = enc_offset >> 2; in sa_format_cmdl_gen()
574 upd_info->enc_offset.index = upd_info->enc_size.index + 1; in sa_format_cmdl_gen()
579 if (cfg->iv_size) { in sa_format_cmdl_gen()
580 upd_info->flags |= SA_CMDL_UPD_ENC_IV; in sa_format_cmdl_gen()
581 upd_info->enc_iv.index = in sa_format_cmdl_gen()
583 upd_info->enc_iv.size = cfg->iv_size; in sa_format_cmdl_gen()
586 SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size; in sa_format_cmdl_gen()
589 (SA_CTX_ENC_AUX2_OFFSET | (cfg->iv_size >> 3)); in sa_format_cmdl_gen()
590 total += SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size; in sa_format_cmdl_gen()
598 if (cfg->auth_eng_id) { in sa_format_cmdl_gen()
599 upd_info->flags |= SA_CMDL_UPD_AUTH; in sa_format_cmdl_gen()
600 upd_info->auth_size.index = auth_offset >> 2; in sa_format_cmdl_gen()
601 upd_info->auth_offset.index = upd_info->auth_size.index + 1; in sa_format_cmdl_gen()
622 if (likely(upd_info->flags & SA_CMDL_UPD_ENC)) { in sa_update_cmdl()
623 cmdl[upd_info->enc_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK; in sa_update_cmdl()
624 cmdl[upd_info->enc_size.index] |= req->enc_size; in sa_update_cmdl()
625 cmdl[upd_info->enc_offset.index] &= in sa_update_cmdl()
627 cmdl[upd_info->enc_offset.index] |= in sa_update_cmdl()
628 ((u32)req->enc_offset << in sa_update_cmdl()
631 if (likely(upd_info->flags & SA_CMDL_UPD_ENC_IV)) { in sa_update_cmdl()
632 __be32 *data = (__be32 *)&cmdl[upd_info->enc_iv.index]; in sa_update_cmdl()
633 u32 *enc_iv = (u32 *)req->enc_iv; in sa_update_cmdl()
635 for (j = 0; i < upd_info->enc_iv.size; i += 4, j++) { in sa_update_cmdl()
642 if (likely(upd_info->flags & SA_CMDL_UPD_AUTH)) { in sa_update_cmdl()
643 cmdl[upd_info->auth_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK; in sa_update_cmdl()
644 cmdl[upd_info->auth_size.index] |= req->auth_size; in sa_update_cmdl()
645 cmdl[upd_info->auth_offset.index] &= in sa_update_cmdl()
647 cmdl[upd_info->auth_offset.index] |= in sa_update_cmdl()
648 ((u32)req->auth_offset << in sa_update_cmdl()
650 if (upd_info->flags & SA_CMDL_UPD_AUTH_IV) { in sa_update_cmdl()
651 sa_copy_iv((void *)&cmdl[upd_info->auth_iv.index], in sa_update_cmdl()
652 req->auth_iv, in sa_update_cmdl()
653 (upd_info->auth_iv.size > 8)); in sa_update_cmdl()
655 if (upd_info->flags & SA_CMDL_UPD_AUX_KEY) { in sa_update_cmdl()
656 int offset = (req->auth_size & 0xF) ? 4 : 0; in sa_update_cmdl()
658 memcpy(&cmdl[upd_info->aux_key_info.index], in sa_update_cmdl()
659 &upd_info->aux_key[offset], 16); in sa_update_cmdl()
700 u8 *sc_buf = ctx->sc; in sa_init_sc()
701 u16 sc_id = ctx->sc_id; in sa_init_sc()
706 if (ad->auth_eng.eng_id) { in sa_init_sc()
708 first_engine = ad->enc_eng.eng_id; in sa_init_sc()
710 first_engine = ad->auth_eng.eng_id; in sa_init_sc()
713 auth_sc_offset = enc_sc_offset + ad->enc_eng.sc_size; in sa_init_sc()
715 if (!ad->hash_size) in sa_init_sc()
716 return -EINVAL; in sa_init_sc()
717 ad->hash_size = roundup(ad->hash_size, 8); in sa_init_sc()
719 } else if (ad->enc_eng.eng_id && !ad->auth_eng.eng_id) { in sa_init_sc()
721 first_engine = ad->enc_eng.eng_id; in sa_init_sc()
723 ad->hash_size = ad->iv_out_size; in sa_init_sc()
735 if (ad->enc_eng.sc_size) { in sa_init_sc()
738 return -EINVAL; in sa_init_sc()
742 if (ad->auth_eng.sc_size) in sa_init_sc()
752 sa_set_swinfo(first_engine, ctx->sc_id, ctx->sc_phys, 1, 0, in sa_init_sc()
753 SA_SW_INFO_FLAG_EVICT, ad->hash_size, swinfo); in sa_init_sc()
755 sa_dump_sc(sc_buf, ctx->sc_phys); in sa_init_sc()
766 bn = ctx->sc_id - data->sc_id_start; in sa_free_ctx_info()
767 spin_lock(&data->scid_lock); in sa_free_ctx_info()
768 __clear_bit(bn, data->ctx_bm); in sa_free_ctx_info()
769 data->sc_id--; in sa_free_ctx_info()
770 spin_unlock(&data->scid_lock); in sa_free_ctx_info()
772 if (ctx->sc) { in sa_free_ctx_info()
773 dma_pool_free(data->sc_pool, ctx->sc, ctx->sc_phys); in sa_free_ctx_info()
774 ctx->sc = NULL; in sa_free_ctx_info()
784 spin_lock(&data->scid_lock); in sa_init_ctx_info()
785 bn = find_first_zero_bit(data->ctx_bm, SA_MAX_NUM_CTX); in sa_init_ctx_info()
786 __set_bit(bn, data->ctx_bm); in sa_init_ctx_info()
787 data->sc_id++; in sa_init_ctx_info()
788 spin_unlock(&data->scid_lock); in sa_init_ctx_info()
790 ctx->sc_id = (u16)(data->sc_id_start + bn); in sa_init_ctx_info()
792 ctx->sc = dma_pool_alloc(data->sc_pool, GFP_KERNEL, &ctx->sc_phys); in sa_init_ctx_info()
793 if (!ctx->sc) { in sa_init_ctx_info()
794 dev_err(&data->pdev->dev, "Failed to allocate SC memory\n"); in sa_init_ctx_info()
795 err = -ENOMEM; in sa_init_ctx_info()
802 spin_lock(&data->scid_lock); in sa_init_ctx_info()
803 __clear_bit(bn, data->ctx_bm); in sa_init_ctx_info()
804 data->sc_id--; in sa_init_ctx_info()
805 spin_unlock(&data->scid_lock); in sa_init_ctx_info()
815 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_cipher_cra_exit()
816 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_cipher_cra_exit()
817 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_cipher_cra_exit()
819 sa_free_ctx_info(&ctx->enc, data); in sa_cipher_cra_exit()
820 sa_free_ctx_info(&ctx->dec, data); in sa_cipher_cra_exit()
822 crypto_free_sync_skcipher(ctx->fallback.skcipher); in sa_cipher_cra_exit()
829 const char *name = crypto_tfm_alg_name(&tfm->base); in sa_cipher_cra_init()
833 ctx->dev_data = data; in sa_cipher_cra_init()
835 ret = sa_init_ctx_info(&ctx->enc, data); in sa_cipher_cra_init()
838 ret = sa_init_ctx_info(&ctx->dec, data); in sa_cipher_cra_init()
840 sa_free_ctx_info(&ctx->enc, data); in sa_cipher_cra_init()
844 ctx->fallback.skcipher = in sa_cipher_cra_init()
847 if (IS_ERR(ctx->fallback.skcipher)) { in sa_cipher_cra_init()
849 return PTR_ERR(ctx->fallback.skcipher); in sa_cipher_cra_init()
852 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_cipher_cra_init()
853 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_cipher_cra_init()
854 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_cipher_cra_init()
868 return -EINVAL; in sa_cipher_setkey()
870 ad->enc_eng.eng_id = SA_ENG_ID_EM1; in sa_cipher_setkey()
871 ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ; in sa_cipher_setkey()
874 cfg.enc_eng_id = ad->enc_eng.eng_id; in sa_cipher_setkey()
877 crypto_sync_skcipher_clear_flags(ctx->fallback.skcipher, in sa_cipher_setkey()
879 crypto_sync_skcipher_set_flags(ctx->fallback.skcipher, in sa_cipher_setkey()
880 tfm->base.crt_flags & in sa_cipher_setkey()
882 ret = crypto_sync_skcipher_setkey(ctx->fallback.skcipher, key, keylen); in sa_cipher_setkey()
887 if (sa_init_sc(&ctx->enc, key, keylen, NULL, 0, ad, 1, in sa_cipher_setkey()
888 &ctx->enc.epib[1])) in sa_cipher_setkey()
892 (u8 *)ctx->enc.cmdl, in sa_cipher_setkey()
893 &ctx->enc.cmdl_upd_info); in sa_cipher_setkey()
897 ctx->enc.cmdl_size = cmdl_len; in sa_cipher_setkey()
900 if (sa_init_sc(&ctx->dec, key, keylen, NULL, 0, ad, 0, in sa_cipher_setkey()
901 &ctx->dec.epib[1])) in sa_cipher_setkey()
904 cfg.enc_eng_id = ad->enc_eng.eng_id; in sa_cipher_setkey()
905 cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl, in sa_cipher_setkey()
906 &ctx->dec.cmdl_upd_info); in sa_cipher_setkey()
911 ctx->dec.cmdl_size = cmdl_len; in sa_cipher_setkey()
912 ctx->iv_idx = ad->iv_idx; in sa_cipher_setkey()
918 return -EINVAL; in sa_cipher_setkey()
926 int key_idx = (keylen >> 3) - 2; in sa_aes_cbc_setkey()
929 return -EINVAL; in sa_aes_cbc_setkey()
946 int key_idx = (keylen >> 3) - 2; in sa_aes_ecb_setkey()
949 return -EINVAL; in sa_aes_ecb_setkey()
988 if (rxd->mapped_sg[0].dir == DMA_BIDIRECTIONAL) in sa_sync_from_device()
989 sgt = &rxd->mapped_sg[0].sgt; in sa_sync_from_device()
991 sgt = &rxd->mapped_sg[1].sgt; in sa_sync_from_device()
993 dma_sync_sgtable_for_cpu(rxd->ddev, sgt, DMA_FROM_DEVICE); in sa_sync_from_device()
1000 for (i = 0; i < ARRAY_SIZE(rxd->mapped_sg); i++) { in sa_free_sa_rx_data()
1001 struct sa_mapped_sg *mapped_sg = &rxd->mapped_sg[i]; in sa_free_sa_rx_data()
1003 if (mapped_sg->mapped) { in sa_free_sa_rx_data()
1004 dma_unmap_sgtable(rxd->ddev, &mapped_sg->sgt, in sa_free_sa_rx_data()
1005 mapped_sg->dir, 0); in sa_free_sa_rx_data()
1006 kfree(mapped_sg->split_sg); in sa_free_sa_rx_data()
1023 req = container_of(rxd->req, struct skcipher_request, base); in sa_aes_dma_in_callback()
1025 if (req->iv) { in sa_aes_dma_in_callback()
1026 mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, in sa_aes_dma_in_callback()
1028 result = (u32 *)req->iv; in sa_aes_dma_in_callback()
1030 for (i = 0; i < (rxd->enc_iv_size / 4); i++) in sa_aes_dma_in_callback()
1031 result[i] = be32_to_cpu(mdptr[i + rxd->iv_idx]); in sa_aes_dma_in_callback()
1065 struct sa_ctx_info *sa_ctx = req->enc ? &req->ctx->enc : &req->ctx->dec; in sa_run()
1073 gfp_flags = req->base->flags & CRYPTO_TFM_REQ_MAY_SLEEP ? in sa_run()
1078 return -ENOMEM; in sa_run()
1080 if (req->src != req->dst) { in sa_run()
1089 * SA2UL has an interesting feature where the receive DMA channel in sa_run()
1096 if (req->size >= 256) in sa_run()
1097 dma_rx = pdata->dma_rx2; in sa_run()
1099 dma_rx = pdata->dma_rx1; in sa_run()
1101 ddev = dma_rx->device->dev; in sa_run()
1102 rxd->ddev = ddev; in sa_run()
1104 memcpy(cmdl, sa_ctx->cmdl, sa_ctx->cmdl_size); in sa_run()
1106 sa_update_cmdl(req, cmdl, &sa_ctx->cmdl_upd_info); in sa_run()
1108 if (req->type != CRYPTO_ALG_TYPE_AHASH) { in sa_run()
1109 if (req->enc) in sa_run()
1110 req->type |= in sa_run()
1113 req->type |= in sa_run()
1117 cmdl[sa_ctx->cmdl_size / sizeof(u32)] = req->type; in sa_run()
1125 src = req->src; in sa_run()
1126 sg_nents = sg_nents_for_len(src, req->size); in sa_run()
1128 split_size = req->size; in sa_run()
1130 mapped_sg = &rxd->mapped_sg[0]; in sa_run()
1131 if (sg_nents == 1 && split_size <= req->src->length) { in sa_run()
1132 src = &mapped_sg->static_sg; in sa_run()
1135 sg_set_page(src, sg_page(req->src), split_size, in sa_run()
1136 req->src->offset); in sa_run()
1138 mapped_sg->sgt.sgl = src; in sa_run()
1139 mapped_sg->sgt.orig_nents = src_nents; in sa_run()
1140 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0); in sa_run()
1146 mapped_sg->dir = dir_src; in sa_run()
1147 mapped_sg->mapped = true; in sa_run()
1149 mapped_sg->sgt.sgl = req->src; in sa_run()
1150 mapped_sg->sgt.orig_nents = sg_nents; in sa_run()
1151 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0); in sa_run()
1157 mapped_sg->dir = dir_src; in sa_run()
1158 mapped_sg->mapped = true; in sa_run()
1160 ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents, 0, 1, in sa_run()
1163 src_nents = mapped_sg->sgt.nents; in sa_run()
1164 src = mapped_sg->sgt.sgl; in sa_run()
1166 mapped_sg->split_sg = src; in sa_run()
1170 dma_sync_sgtable_for_device(ddev, &mapped_sg->sgt, DMA_TO_DEVICE); in sa_run()
1176 dst_nents = sg_nents_for_len(req->dst, req->size); in sa_run()
1177 mapped_sg = &rxd->mapped_sg[1]; in sa_run()
1179 if (dst_nents == 1 && split_size <= req->dst->length) { in sa_run()
1180 dst = &mapped_sg->static_sg; in sa_run()
1183 sg_set_page(dst, sg_page(req->dst), split_size, in sa_run()
1184 req->dst->offset); in sa_run()
1186 mapped_sg->sgt.sgl = dst; in sa_run()
1187 mapped_sg->sgt.orig_nents = dst_nents; in sa_run()
1188 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, in sa_run()
1193 mapped_sg->dir = DMA_FROM_DEVICE; in sa_run()
1194 mapped_sg->mapped = true; in sa_run()
1196 mapped_sg->sgt.sgl = req->dst; in sa_run()
1197 mapped_sg->sgt.orig_nents = dst_nents; in sa_run()
1198 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, in sa_run()
1203 mapped_sg->dir = DMA_FROM_DEVICE; in sa_run()
1204 mapped_sg->mapped = true; in sa_run()
1206 ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents, in sa_run()
1210 dst_nents = mapped_sg->sgt.nents; in sa_run()
1211 dst = mapped_sg->sgt.sgl; in sa_run()
1213 mapped_sg->split_sg = dst; in sa_run()
1218 rxd->tx_in = dmaengine_prep_slave_sg(dma_rx, dst, dst_nents, in sa_run()
1221 if (!rxd->tx_in) { in sa_run()
1222 dev_err(pdata->dev, "IN prep_slave_sg() failed\n"); in sa_run()
1223 ret = -EINVAL; in sa_run()
1227 rxd->req = (void *)req->base; in sa_run()
1228 rxd->enc = req->enc; in sa_run()
1229 rxd->iv_idx = req->ctx->iv_idx; in sa_run()
1230 rxd->enc_iv_size = sa_ctx->cmdl_upd_info.enc_iv.size; in sa_run()
1231 rxd->tx_in->callback = req->callback; in sa_run()
1232 rxd->tx_in->callback_param = rxd; in sa_run()
1234 tx_out = dmaengine_prep_slave_sg(pdata->dma_tx, src, in sa_run()
1239 dev_err(pdata->dev, "OUT prep_slave_sg() failed\n"); in sa_run()
1240 ret = -EINVAL; in sa_run()
1250 sa_prepare_tx_desc(mdptr, (sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS * in sa_run()
1251 sizeof(u32))), cmdl, sizeof(sa_ctx->epib), in sa_run()
1252 sa_ctx->epib); in sa_run()
1254 ml = sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS * sizeof(u32)); in sa_run()
1255 dmaengine_desc_set_metadata_len(tx_out, req->mdata_size); in sa_run()
1258 dmaengine_submit(rxd->tx_in); in sa_run()
1261 dma_async_issue_pending(pdata->dma_tx); in sa_run()
1263 return -EINPROGRESS; in sa_run()
1275 struct crypto_alg *alg = req->base.tfm->__crt_alg; in sa_cipher_run()
1279 if (!req->cryptlen) in sa_cipher_run()
1282 if (req->cryptlen % alg->cra_blocksize) in sa_cipher_run()
1283 return -EINVAL; in sa_cipher_run()
1286 if (req->cryptlen > SA_MAX_DATA_SZ || in sa_cipher_run()
1287 (req->cryptlen >= SA_UNSAFE_DATA_SZ_MIN && in sa_cipher_run()
1288 req->cryptlen <= SA_UNSAFE_DATA_SZ_MAX)) { in sa_cipher_run()
1289 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback.skcipher); in sa_cipher_run()
1291 skcipher_request_set_sync_tfm(subreq, ctx->fallback.skcipher); in sa_cipher_run()
1292 skcipher_request_set_callback(subreq, req->base.flags, in sa_cipher_run()
1294 skcipher_request_set_crypt(subreq, req->src, req->dst, in sa_cipher_run()
1295 req->cryptlen, req->iv); in sa_cipher_run()
1305 sa_req.size = req->cryptlen; in sa_cipher_run()
1306 sa_req.enc_size = req->cryptlen; in sa_cipher_run()
1307 sa_req.src = req->src; in sa_cipher_run()
1308 sa_req.dst = req->dst; in sa_cipher_run()
1314 sa_req.base = &req->base; in sa_cipher_run()
1322 return sa_cipher_run(req, req->iv, 1); in sa_encrypt()
1327 return sa_cipher_run(req, req->iv, 0); in sa_decrypt()
1342 req = container_of(rxd->req, struct ahash_request, base); in sa_sha_dma_in_callback()
1346 mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml); in sa_sha_dma_in_callback()
1347 result = (u32 *)req->result; in sa_sha_dma_in_callback()
1364 memcpy(req->result, sha1_zero_message_hash, sa_digest_size); in zero_message_process()
1367 memcpy(req->result, sha256_zero_message_hash, sa_digest_size); in zero_message_process()
1370 memcpy(req->result, sha512_zero_message_hash, sa_digest_size); in zero_message_process()
1373 return -EINVAL; in zero_message_process()
1386 auth_len = req->nbytes; in sa_sha_run()
1394 struct ahash_request *subreq = &rctx->fallback_req; in sa_sha_run()
1397 ahash_request_set_tfm(subreq, ctx->fallback.ahash); in sa_sha_run()
1398 subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_run()
1402 subreq->nbytes = auth_len; in sa_sha_run()
1403 subreq->src = req->src; in sa_sha_run()
1404 subreq->result = req->result; in sa_sha_run()
1408 subreq->nbytes = 0; in sa_sha_run()
1417 sa_req.src = req->src; in sa_sha_run()
1418 sa_req.dst = req->src; in sa_sha_run()
1424 sa_req.base = &req->base; in sa_sha_run()
1431 int bs = crypto_shash_blocksize(ctx->shash); in sa_sha_setup()
1435 ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ; in sa_sha_setup()
1436 ad->auth_eng.eng_id = SA_ENG_ID_AM1; in sa_sha_setup()
1437 ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ; in sa_sha_setup()
1439 memset(ctx->authkey, 0, bs); in sa_sha_setup()
1441 cfg.aalg = ad->aalg_id; in sa_sha_setup()
1442 cfg.enc_eng_id = ad->enc_eng.eng_id; in sa_sha_setup()
1443 cfg.auth_eng_id = ad->auth_eng.eng_id; in sa_sha_setup()
1449 if (sa_init_sc(&ctx->enc, NULL, 0, NULL, 0, ad, 0, in sa_sha_setup()
1450 &ctx->enc.epib[1])) in sa_sha_setup()
1454 (u8 *)ctx->enc.cmdl, in sa_sha_setup()
1455 &ctx->enc.cmdl_upd_info); in sa_sha_setup()
1459 ctx->enc.cmdl_size = cmdl_len; in sa_sha_setup()
1465 return -EINVAL; in sa_sha_setup()
1475 ctx->dev_data = data; in sa_sha_cra_init_alg()
1476 ret = sa_init_ctx_info(&ctx->enc, data); in sa_sha_cra_init_alg()
1481 ctx->shash = crypto_alloc_shash(alg_base, 0, in sa_sha_cra_init_alg()
1483 if (IS_ERR(ctx->shash)) { in sa_sha_cra_init_alg()
1486 return PTR_ERR(ctx->shash); in sa_sha_cra_init_alg()
1489 ctx->fallback.ahash = in sa_sha_cra_init_alg()
1492 if (IS_ERR(ctx->fallback.ahash)) { in sa_sha_cra_init_alg()
1493 dev_err(ctx->dev_data->dev, in sa_sha_cra_init_alg()
1495 return PTR_ERR(ctx->fallback.ahash); in sa_sha_cra_init_alg()
1499 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_sha_cra_init_alg()
1500 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_sha_cra_init_alg()
1501 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_sha_cra_init_alg()
1505 crypto_ahash_reqsize(ctx->fallback.ahash)); in sa_sha_cra_init_alg()
1524 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_init()
1525 rctx->fallback_req.base.flags = in sa_sha_init()
1526 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_init()
1528 return crypto_ahash_init(&rctx->fallback_req); in sa_sha_init()
1537 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_update()
1538 rctx->fallback_req.base.flags = in sa_sha_update()
1539 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_update()
1540 rctx->fallback_req.nbytes = req->nbytes; in sa_sha_update()
1541 rctx->fallback_req.src = req->src; in sa_sha_update()
1543 return crypto_ahash_update(&rctx->fallback_req); in sa_sha_update()
1552 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_final()
1553 rctx->fallback_req.base.flags = in sa_sha_final()
1554 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_final()
1555 rctx->fallback_req.result = req->result; in sa_sha_final()
1557 return crypto_ahash_final(&rctx->fallback_req); in sa_sha_final()
1566 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_finup()
1567 rctx->fallback_req.base.flags = in sa_sha_finup()
1568 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_finup()
1570 rctx->fallback_req.nbytes = req->nbytes; in sa_sha_finup()
1571 rctx->fallback_req.src = req->src; in sa_sha_finup()
1572 rctx->fallback_req.result = req->result; in sa_sha_finup()
1574 return crypto_ahash_finup(&rctx->fallback_req); in sa_sha_finup()
1583 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_import()
1584 rctx->fallback_req.base.flags = req->base.flags & in sa_sha_import()
1587 return crypto_ahash_import(&rctx->fallback_req, in); in sa_sha_import()
1595 struct ahash_request *subreq = &rctx->fallback_req; in sa_sha_export()
1597 ahash_request_set_tfm(subreq, ctx->fallback.ahash); in sa_sha_export()
1598 subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_export()
1656 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_sha_cra_exit()
1657 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_sha_cra_exit()
1658 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_sha_cra_exit()
1661 sa_free_ctx_info(&ctx->enc, data); in sa_sha_cra_exit()
1663 crypto_free_shash(ctx->shash); in sa_sha_cra_exit()
1664 crypto_free_ahash(ctx->fallback.ahash); in sa_sha_cra_exit()
1682 req = container_of(rxd->req, struct aead_request, base); in sa_aead_dma_in_callback()
1684 start = req->assoclen + req->cryptlen; in sa_aead_dma_in_callback()
1687 mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml); in sa_aead_dma_in_callback()
1691 auth_len = req->assoclen + req->cryptlen; in sa_aead_dma_in_callback()
1693 if (rxd->enc) { in sa_aead_dma_in_callback()
1694 scatterwalk_map_and_copy(&mdptr[4], req->dst, start, authsize, in sa_aead_dma_in_callback()
1697 auth_len -= authsize; in sa_aead_dma_in_callback()
1698 start -= authsize; in sa_aead_dma_in_callback()
1699 scatterwalk_map_and_copy(auth_tag, req->src, start, authsize, in sa_aead_dma_in_callback()
1702 err = memcmp(&mdptr[4], auth_tag, authsize) ? -EBADMSG : 0; in sa_aead_dma_in_callback()
1719 ctx->shash = crypto_alloc_shash(hash, 0, CRYPTO_ALG_NEED_FALLBACK); in sa_cra_init_aead()
1720 if (IS_ERR(ctx->shash)) { in sa_cra_init_aead()
1722 return PTR_ERR(ctx->shash); in sa_cra_init_aead()
1725 ctx->fallback.aead = crypto_alloc_aead(fallback, 0, in sa_cra_init_aead()
1728 if (IS_ERR(ctx->fallback.aead)) { in sa_cra_init_aead()
1731 return PTR_ERR(ctx->fallback.aead); in sa_cra_init_aead()
1735 crypto_aead_reqsize(ctx->fallback.aead)); in sa_cra_init_aead()
1737 ret = sa_init_ctx_info(&ctx->enc, data); in sa_cra_init_aead()
1741 ret = sa_init_ctx_info(&ctx->dec, data); in sa_cra_init_aead()
1743 sa_free_ctx_info(&ctx->enc, data); in sa_cra_init_aead()
1747 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_cra_init_aead()
1748 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_cra_init_aead()
1749 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_cra_init_aead()
1757 "authenc(hmac(sha1-ce),cbc(aes-ce))"); in sa_cra_init_aead_sha1()
1763 "authenc(hmac(sha256-ce),cbc(aes-ce))"); in sa_cra_init_aead_sha256()
1771 crypto_free_shash(ctx->shash); in sa_exit_tfm_aead()
1772 crypto_free_aead(ctx->fallback.aead); in sa_exit_tfm_aead()
1774 sa_free_ctx_info(&ctx->enc, data); in sa_exit_tfm_aead()
1775 sa_free_ctx_info(&ctx->dec, data); in sa_exit_tfm_aead()
1790 return -EINVAL; in sa_aead_setkey()
1793 key_idx = (keys.enckeylen >> 3) - 2; in sa_aead_setkey()
1795 return -EINVAL; in sa_aead_setkey()
1797 ad->ctx = ctx; in sa_aead_setkey()
1798 ad->enc_eng.eng_id = SA_ENG_ID_EM1; in sa_aead_setkey()
1799 ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ; in sa_aead_setkey()
1800 ad->auth_eng.eng_id = SA_ENG_ID_AM1; in sa_aead_setkey()
1801 ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ; in sa_aead_setkey()
1802 ad->mci_enc = mci_cbc_enc_no_iv_array[key_idx]; in sa_aead_setkey()
1803 ad->mci_dec = mci_cbc_dec_no_iv_array[key_idx]; in sa_aead_setkey()
1804 ad->inv_key = true; in sa_aead_setkey()
1805 ad->keyed_mac = true; in sa_aead_setkey()
1806 ad->ealg_id = SA_EALG_ID_AES_CBC; in sa_aead_setkey()
1807 ad->prep_iopad = sa_prepare_iopads; in sa_aead_setkey()
1811 cfg.aalg = ad->aalg_id; in sa_aead_setkey()
1812 cfg.enc_eng_id = ad->enc_eng.eng_id; in sa_aead_setkey()
1813 cfg.auth_eng_id = ad->auth_eng.eng_id; in sa_aead_setkey()
1819 if (sa_init_sc(&ctx->enc, keys.enckey, keys.enckeylen, in sa_aead_setkey()
1821 ad, 1, &ctx->enc.epib[1])) in sa_aead_setkey()
1822 return -EINVAL; in sa_aead_setkey()
1825 (u8 *)ctx->enc.cmdl, in sa_aead_setkey()
1826 &ctx->enc.cmdl_upd_info); in sa_aead_setkey()
1828 return -EINVAL; in sa_aead_setkey()
1830 ctx->enc.cmdl_size = cmdl_len; in sa_aead_setkey()
1833 if (sa_init_sc(&ctx->dec, keys.enckey, keys.enckeylen, in sa_aead_setkey()
1835 ad, 0, &ctx->dec.epib[1])) in sa_aead_setkey()
1836 return -EINVAL; in sa_aead_setkey()
1839 cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl, in sa_aead_setkey()
1840 &ctx->dec.cmdl_upd_info); in sa_aead_setkey()
1843 return -EINVAL; in sa_aead_setkey()
1845 ctx->dec.cmdl_size = cmdl_len; in sa_aead_setkey()
1847 crypto_aead_clear_flags(ctx->fallback.aead, CRYPTO_TFM_REQ_MASK); in sa_aead_setkey()
1848 crypto_aead_set_flags(ctx->fallback.aead, in sa_aead_setkey()
1852 return crypto_aead_setkey(ctx->fallback.aead, key, keylen); in sa_aead_setkey()
1859 return crypto_aead_setauthsize(ctx->fallback.aead, authsize); in sa_aead_setauthsize()
1895 enc_size = req->cryptlen; in sa_aead_run()
1896 auth_size = req->assoclen + req->cryptlen; in sa_aead_run()
1899 enc_size -= crypto_aead_authsize(tfm); in sa_aead_run()
1900 auth_size -= crypto_aead_authsize(tfm); in sa_aead_run()
1909 aead_request_set_tfm(subreq, ctx->fallback.aead); in sa_aead_run()
1910 aead_request_set_callback(subreq, req->base.flags, in sa_aead_run()
1911 req->base.complete, req->base.data); in sa_aead_run()
1912 aead_request_set_crypt(subreq, req->src, req->dst, in sa_aead_run()
1913 req->cryptlen, req->iv); in sa_aead_run()
1914 aead_request_set_ad(subreq, req->assoclen); in sa_aead_run()
1921 sa_req.enc_offset = req->assoclen; in sa_aead_run()
1930 sa_req.base = &req->base; in sa_aead_run()
1932 sa_req.src = req->src; in sa_aead_run()
1933 sa_req.dst = req->dst; in sa_aead_run()
1941 return sa_aead_run(req, req->iv, 1); in sa_aead_encrypt()
1947 return sa_aead_run(req, req->iv, 0); in sa_aead_decrypt()
1955 .base.cra_driver_name = "cbc-aes-sa2ul",
1978 .base.cra_driver_name = "ecb-aes-sa2ul",
2000 .base.cra_driver_name = "cbc-des3-sa2ul",
2023 .base.cra_driver_name = "ecb-des3-sa2ul",
2046 .cra_driver_name = "sha1-sa2ul",
2075 .cra_driver_name = "sha256-sa2ul",
2104 .cra_driver_name = "sha512-sa2ul",
2134 "authenc(hmac(sha1),cbc(aes))-sa2ul",
2161 "authenc(hmac(sha256),cbc(aes))-sa2ul",
2205 "un-supported crypto algorithm (%d)", in sa_register_algos()
2240 struct device *dev = &dev_data->pdev->dev; in sa_init_mem()
2242 dev_data->sc_pool = dma_pool_create("keystone-sc", dev, in sa_init_mem()
2244 if (!dev_data->sc_pool) { in sa_init_mem()
2246 return -ENOMEM; in sa_init_mem()
2257 dd->dma_rx1 = NULL; in sa_dma_init()
2258 dd->dma_tx = NULL; in sa_dma_init()
2259 dd->dma_rx2 = NULL; in sa_dma_init()
2261 ret = dma_coerce_mask_and_coherent(dd->dev, DMA_BIT_MASK(48)); in sa_dma_init()
2265 dd->dma_rx1 = dma_request_chan(dd->dev, "rx1"); in sa_dma_init()
2266 if (IS_ERR(dd->dma_rx1)) in sa_dma_init()
2267 return dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx1), in sa_dma_init()
2270 dd->dma_rx2 = dma_request_chan(dd->dev, "rx2"); in sa_dma_init()
2271 if (IS_ERR(dd->dma_rx2)) { in sa_dma_init()
2272 ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx2), in sa_dma_init()
2277 dd->dma_tx = dma_request_chan(dd->dev, "tx"); in sa_dma_init()
2278 if (IS_ERR(dd->dma_tx)) { in sa_dma_init()
2279 ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_tx), in sa_dma_init()
2291 ret = dmaengine_slave_config(dd->dma_rx1, &cfg); in sa_dma_init()
2293 dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n", in sa_dma_init()
2298 ret = dmaengine_slave_config(dd->dma_rx2, &cfg); in sa_dma_init()
2300 dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n", in sa_dma_init()
2305 ret = dmaengine_slave_config(dd->dma_tx, &cfg); in sa_dma_init()
2307 dev_err(dd->dev, "can't configure OUT dmaengine slave: %d\n", in sa_dma_init()
2315 dma_release_channel(dd->dma_tx); in sa_dma_init()
2317 dma_release_channel(dd->dma_rx2); in sa_dma_init()
2319 dma_release_channel(dd->dma_rx1); in sa_dma_init()
2335 struct device *dev = &pdev->dev; in sa_ul_probe()
2336 struct device_node *node = dev->of_node; in sa_ul_probe()
2345 return -ENOMEM; in sa_ul_probe()
2348 dev_data->dev = dev; in sa_ul_probe()
2349 dev_data->pdev = pdev; in sa_ul_probe()
2356 dev_err(&pdev->dev, "%s: failed to get sync: %d\n", __func__, in sa_ul_probe()
2367 spin_lock_init(&dev_data->scid_lock); in sa_ul_probe()
2371 dev_data->base = saul_base; in sa_ul_probe()
2380 ret = of_platform_populate(node, NULL, NULL, &pdev->dev); in sa_ul_probe()
2384 device_for_each_child(&pdev->dev, &pdev->dev, sa_link_child); in sa_ul_probe()
2389 sa_unregister_algos(&pdev->dev); in sa_ul_probe()
2391 dma_release_channel(dev_data->dma_rx2); in sa_ul_probe()
2392 dma_release_channel(dev_data->dma_rx1); in sa_ul_probe()
2393 dma_release_channel(dev_data->dma_tx); in sa_ul_probe()
2396 dma_pool_destroy(dev_data->sc_pool); in sa_ul_probe()
2398 pm_runtime_put_sync(&pdev->dev); in sa_ul_probe()
2399 pm_runtime_disable(&pdev->dev); in sa_ul_probe()
2408 sa_unregister_algos(&pdev->dev); in sa_ul_remove()
2410 dma_release_channel(dev_data->dma_rx2); in sa_ul_remove()
2411 dma_release_channel(dev_data->dma_rx1); in sa_ul_remove()
2412 dma_release_channel(dev_data->dma_tx); in sa_ul_remove()
2414 dma_pool_destroy(dev_data->sc_pool); in sa_ul_remove()
2418 pm_runtime_put_sync(&pdev->dev); in sa_ul_remove()
2419 pm_runtime_disable(&pdev->dev); in sa_ul_remove()
2425 {.compatible = "ti,j721e-sa2ul",},
2426 {.compatible = "ti,am654-sa2ul",},
2435 .name = "saul-crypto",