• Home
  • Raw
  • Download

Lines Matching +full:inside +full:- +full:secure

1 // SPDX-License-Identifier: GPL-2.0
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
10 #include <linux/dma-mapping.h>
35 if (priv->version == EIP197B) { in eip197_trc_cache_init()
48 val = readl(priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_init()
51 writel(val, priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_init()
54 writel(0, priv->base + EIP197_TRC_ECCCTRL); in eip197_trc_cache_init()
60 val = readl(priv->base + EIP197_TRC_PARAMS); in eip197_trc_cache_init()
63 writel(val, priv->base + EIP197_TRC_PARAMS); in eip197_trc_cache_init()
71 priv->base + offset); in eip197_trc_cache_init()
73 val = EIP197_CS_RC_NEXT(i+1) | EIP197_CS_RC_PREV(i-1); in eip197_trc_cache_init()
76 else if (i == cs_rc_max - 1) in eip197_trc_cache_init()
78 writel(val, priv->base + offset + sizeof(u32)); in eip197_trc_cache_init()
85 priv->base + EIP197_CLASSIFICATION_RAMS + htable_offset + i * sizeof(u32)); in eip197_trc_cache_init()
88 val = readl(priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_init()
90 writel(val, priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_init()
94 EIP197_TRC_FREECHAIN_TAIL_PTR(cs_rc_max - 1); in eip197_trc_cache_init()
95 writel(val, priv->base + EIP197_TRC_FREECHAIN); in eip197_trc_cache_init()
100 writel(val, priv->base + EIP197_TRC_PARAMS2); in eip197_trc_cache_init()
106 writel(val, priv->base + EIP197_TRC_PARAMS); in eip197_trc_cache_init()
113 const u32 *data = (const u32 *)fw->data; in eip197_write_firmware()
127 for (i = 0; i < fw->size / sizeof(u32); i++) in eip197_write_firmware()
129 priv->base + EIP197_CLASSIFICATION_RAMS + i * sizeof(u32)); in eip197_write_firmware()
148 switch (priv->version) { in eip197_load_firmwares()
161 snprintf(fw_path, 31, "inside-secure/%s/%s", dir, fw_name[i]); in eip197_load_firmwares()
162 ret = request_firmware(&fw[i], fw_path, priv->dev); in eip197_load_firmwares()
164 if (priv->version != EIP197B) in eip197_load_firmwares()
170 ret = request_firmware(&fw[i], fw_name[i], priv->dev); in eip197_load_firmwares()
172 dev_err(priv->dev, in eip197_load_firmwares()
180 for (pe = 0; pe < priv->config.pes; pe++) { in eip197_load_firmwares()
217 cd_size_rnd = (priv->config.cd_size + (BIT(hdw) - 1)) >> hdw; in safexcel_hw_setup_cdesc_rings()
219 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_setup_cdesc_rings()
221 writel(lower_32_bits(priv->ring[i].cdr.base_dma), in safexcel_hw_setup_cdesc_rings()
223 writel(upper_32_bits(priv->ring[i].cdr.base_dma), in safexcel_hw_setup_cdesc_rings()
226 writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.cd_offset << 16) | in safexcel_hw_setup_cdesc_rings()
227 priv->config.cd_size, in safexcel_hw_setup_cdesc_rings()
230 (EIP197_FETCH_COUNT * priv->config.cd_offset), in safexcel_hw_setup_cdesc_rings()
255 rd_size_rnd = (priv->config.rd_size + (BIT(hdw) - 1)) >> hdw; in safexcel_hw_setup_rdesc_rings()
257 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_setup_rdesc_rings()
259 writel(lower_32_bits(priv->ring[i].rdr.base_dma), in safexcel_hw_setup_rdesc_rings()
261 writel(upper_32_bits(priv->ring[i].rdr.base_dma), in safexcel_hw_setup_rdesc_rings()
264 writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.rd_offset << 16) | in safexcel_hw_setup_rdesc_rings()
265 priv->config.rd_size, in safexcel_hw_setup_rdesc_rings()
269 (EIP197_FETCH_COUNT * priv->config.rd_offset), in safexcel_hw_setup_rdesc_rings()
307 if (priv->version == EIP197B || priv->version == EIP197D) in safexcel_hw_init()
326 for (pe = 0; pe < priv->config.pes; pe++) { in safexcel_hw_init()
333 if (priv->version == EIP197B || priv->version == EIP197D) { in safexcel_hw_init()
360 if (priv->version == EIP197B || priv->version == EIP197D) { in safexcel_hw_init()
363 GENMASK(priv->config.rings - 1, 0), in safexcel_hw_init()
387 if (priv->version == EIP197B || priv->version == EIP197D) in safexcel_hw_init()
415 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_init()
436 writel((EIP197_DEFAULT_RING_SIZE * priv->config.cd_offset) << 2, in safexcel_hw_init()
441 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_init()
459 writel((EIP197_DEFAULT_RING_SIZE * priv->config.rd_offset) << 2, in safexcel_hw_init()
463 for (pe = 0; pe < priv->config.pes; pe++) { in safexcel_hw_init()
465 writel(EIP197_DxE_THR_CTRL_EN | GENMASK(priv->config.rings - 1, 0), in safexcel_hw_init()
469 writel(EIP197_DxE_THR_CTRL_EN | GENMASK(priv->config.rings - 1, 0), in safexcel_hw_init()
476 if (priv->version == EIP197B || priv->version == EIP197D) { in safexcel_hw_init()
494 int coal = min_t(int, priv->ring[ring].requests, EIP197_MAX_BATCH_SZ); in safexcel_try_push_requests()
514 req = priv->ring[ring].req; in safexcel_dequeue()
515 backlog = priv->ring[ring].backlog; in safexcel_dequeue()
520 spin_lock_bh(&priv->ring[ring].queue_lock); in safexcel_dequeue()
521 backlog = crypto_get_backlog(&priv->ring[ring].queue); in safexcel_dequeue()
522 req = crypto_dequeue_request(&priv->ring[ring].queue); in safexcel_dequeue()
523 spin_unlock_bh(&priv->ring[ring].queue_lock); in safexcel_dequeue()
526 priv->ring[ring].req = NULL; in safexcel_dequeue()
527 priv->ring[ring].backlog = NULL; in safexcel_dequeue()
532 ctx = crypto_tfm_ctx(req->tfm); in safexcel_dequeue()
533 ret = ctx->send(req, ring, &commands, &results); in safexcel_dequeue()
538 backlog->complete(backlog, -EINPROGRESS); in safexcel_dequeue()
554 * the request and the backlog for the next dequeue call (per-ring). in safexcel_dequeue()
556 priv->ring[ring].req = req; in safexcel_dequeue()
557 priv->ring[ring].backlog = backlog; in safexcel_dequeue()
563 spin_lock_bh(&priv->ring[ring].lock); in safexcel_dequeue()
565 priv->ring[ring].requests += nreq; in safexcel_dequeue()
567 if (!priv->ring[ring].busy) { in safexcel_dequeue()
569 priv->ring[ring].busy = true; in safexcel_dequeue()
572 spin_unlock_bh(&priv->ring[ring].lock); in safexcel_dequeue()
575 writel((rdesc * priv->config.rd_offset) << 2, in safexcel_dequeue()
579 writel((cdesc * priv->config.cd_offset) << 2, in safexcel_dequeue()
586 if (likely(!rdesc->result_data.error_code)) in safexcel_rdesc_check_errors()
589 if (rdesc->result_data.error_code & 0x407f) { in safexcel_rdesc_check_errors()
590 /* Fatal error (bits 0-7, 14) */ in safexcel_rdesc_check_errors()
591 dev_err(priv->dev, in safexcel_rdesc_check_errors()
593 rdesc->result_data.error_code); in safexcel_rdesc_check_errors()
594 return -EIO; in safexcel_rdesc_check_errors()
595 } else if (rdesc->result_data.error_code == BIT(9)) { in safexcel_rdesc_check_errors()
597 return -EBADMSG; in safexcel_rdesc_check_errors()
600 /* All other non-fatal errors */ in safexcel_rdesc_check_errors()
601 return -EINVAL; in safexcel_rdesc_check_errors()
611 priv->ring[ring].rdr_req[i] = req; in safexcel_rdr_req_set()
619 return priv->ring[ring].rdr_req[i]; in safexcel_rdr_req_get()
628 cdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].cdr); in safexcel_complete()
630 dev_err(priv->dev, in safexcel_complete()
634 } while (!cdesc->last_seg); in safexcel_complete()
639 struct safexcel_inv_result *result = req->data; in safexcel_inv_complete()
641 if (error == -EINPROGRESS) in safexcel_inv_complete()
644 result->error = error; in safexcel_inv_complete()
645 complete(&result->completion); in safexcel_inv_complete()
661 cdesc->control_data.type = EIP197_TYPE_EXTENDED; in safexcel_invalidate_cache()
662 cdesc->control_data.options = 0; in safexcel_invalidate_cache()
663 cdesc->control_data.refresh = 0; in safexcel_invalidate_cache()
664 cdesc->control_data.control0 = CONTEXT_CONTROL_INV_TR; in safexcel_invalidate_cache()
679 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr); in safexcel_invalidate_cache()
704 ctx = crypto_tfm_ctx(req->tfm); in safexcel_handle_result_descriptor()
705 ndesc = ctx->handle_result(priv, ring, req, in safexcel_handle_result_descriptor()
708 dev_err(priv->dev, "failed to handle result (%d)", ndesc); in safexcel_handle_result_descriptor()
714 req->complete(req, ret); in safexcel_handle_result_descriptor()
725 EIP197_xDR_PROC_xD_COUNT(tot_descs * priv->config.rd_offset), in safexcel_handle_result_descriptor()
736 spin_lock_bh(&priv->ring[ring].lock); in safexcel_handle_result_descriptor()
738 priv->ring[ring].requests -= handled; in safexcel_handle_result_descriptor()
741 if (!priv->ring[ring].requests) in safexcel_handle_result_descriptor()
742 priv->ring[ring].busy = false; in safexcel_handle_result_descriptor()
744 spin_unlock_bh(&priv->ring[ring].lock); in safexcel_handle_result_descriptor()
752 safexcel_dequeue(data->priv, data->ring); in safexcel_dequeue_work()
763 struct safexcel_crypto_priv *priv = irq_data->priv; in safexcel_irq_ring()
764 int ring = irq_data->ring, rc = IRQ_NONE; in safexcel_irq_ring()
781 dev_err(priv->dev, "RDR: fatal error."); in safexcel_irq_ring()
800 struct safexcel_crypto_priv *priv = irq_data->priv; in safexcel_irq_ring_thread()
801 int ring = irq_data->ring; in safexcel_irq_ring_thread()
805 queue_work(priv->ring[ring].workqueue, in safexcel_irq_ring_thread()
806 &priv->ring[ring].work_data.work); in safexcel_irq_ring_thread()
819 dev_err(&pdev->dev, "unable to get IRQ '%s'\n", name); in safexcel_request_ring_irq()
823 ret = devm_request_threaded_irq(&pdev->dev, irq, handler, in safexcel_request_ring_irq()
825 dev_name(&pdev->dev), ring_irq_priv); in safexcel_request_ring_irq()
827 dev_err(&pdev->dev, "unable to request IRQ %d\n", irq); in safexcel_request_ring_irq()
865 safexcel_algs[i]->priv = priv; in safexcel_register_algorithms()
867 if (!(safexcel_algs[i]->engines & priv->version)) in safexcel_register_algorithms()
870 if (safexcel_algs[i]->type == SAFEXCEL_ALG_TYPE_SKCIPHER) in safexcel_register_algorithms()
871 ret = crypto_register_skcipher(&safexcel_algs[i]->alg.skcipher); in safexcel_register_algorithms()
872 else if (safexcel_algs[i]->type == SAFEXCEL_ALG_TYPE_AEAD) in safexcel_register_algorithms()
873 ret = crypto_register_aead(&safexcel_algs[i]->alg.aead); in safexcel_register_algorithms()
875 ret = crypto_register_ahash(&safexcel_algs[i]->alg.ahash); in safexcel_register_algorithms()
885 if (!(safexcel_algs[j]->engines & priv->version)) in safexcel_register_algorithms()
888 if (safexcel_algs[j]->type == SAFEXCEL_ALG_TYPE_SKCIPHER) in safexcel_register_algorithms()
889 crypto_unregister_skcipher(&safexcel_algs[j]->alg.skcipher); in safexcel_register_algorithms()
890 else if (safexcel_algs[j]->type == SAFEXCEL_ALG_TYPE_AEAD) in safexcel_register_algorithms()
891 crypto_unregister_aead(&safexcel_algs[j]->alg.aead); in safexcel_register_algorithms()
893 crypto_unregister_ahash(&safexcel_algs[j]->alg.ahash); in safexcel_register_algorithms()
904 if (!(safexcel_algs[i]->engines & priv->version)) in safexcel_unregister_algorithms()
907 if (safexcel_algs[i]->type == SAFEXCEL_ALG_TYPE_SKCIPHER) in safexcel_unregister_algorithms()
908 crypto_unregister_skcipher(&safexcel_algs[i]->alg.skcipher); in safexcel_unregister_algorithms()
909 else if (safexcel_algs[i]->type == SAFEXCEL_ALG_TYPE_AEAD) in safexcel_unregister_algorithms()
910 crypto_unregister_aead(&safexcel_algs[i]->alg.aead); in safexcel_unregister_algorithms()
912 crypto_unregister_ahash(&safexcel_algs[i]->alg.ahash); in safexcel_unregister_algorithms()
923 switch (priv->version) { in safexcel_configure()
931 priv->config.pes = (val >> EIP197_N_PES_OFFSET) & mask; in safexcel_configure()
934 mask = BIT(val) - 1; in safexcel_configure()
937 priv->config.rings = min_t(u32, val & GENMASK(3, 0), max_rings); in safexcel_configure()
939 priv->config.cd_size = (sizeof(struct safexcel_command_desc) / sizeof(u32)); in safexcel_configure()
940 priv->config.cd_offset = (priv->config.cd_size + mask) & ~mask; in safexcel_configure()
942 priv->config.rd_size = (sizeof(struct safexcel_result_desc) / sizeof(u32)); in safexcel_configure()
943 priv->config.rd_offset = (priv->config.rd_size + mask) & ~mask; in safexcel_configure()
948 struct safexcel_register_offsets *offsets = &priv->offsets; in safexcel_init_register_offsets()
950 switch (priv->version) { in safexcel_init_register_offsets()
953 offsets->hia_aic = EIP197_HIA_AIC_BASE; in safexcel_init_register_offsets()
954 offsets->hia_aic_g = EIP197_HIA_AIC_G_BASE; in safexcel_init_register_offsets()
955 offsets->hia_aic_r = EIP197_HIA_AIC_R_BASE; in safexcel_init_register_offsets()
956 offsets->hia_aic_xdr = EIP197_HIA_AIC_xDR_BASE; in safexcel_init_register_offsets()
957 offsets->hia_dfe = EIP197_HIA_DFE_BASE; in safexcel_init_register_offsets()
958 offsets->hia_dfe_thr = EIP197_HIA_DFE_THR_BASE; in safexcel_init_register_offsets()
959 offsets->hia_dse = EIP197_HIA_DSE_BASE; in safexcel_init_register_offsets()
960 offsets->hia_dse_thr = EIP197_HIA_DSE_THR_BASE; in safexcel_init_register_offsets()
961 offsets->hia_gen_cfg = EIP197_HIA_GEN_CFG_BASE; in safexcel_init_register_offsets()
962 offsets->pe = EIP197_PE_BASE; in safexcel_init_register_offsets()
965 offsets->hia_aic = EIP97_HIA_AIC_BASE; in safexcel_init_register_offsets()
966 offsets->hia_aic_g = EIP97_HIA_AIC_G_BASE; in safexcel_init_register_offsets()
967 offsets->hia_aic_r = EIP97_HIA_AIC_R_BASE; in safexcel_init_register_offsets()
968 offsets->hia_aic_xdr = EIP97_HIA_AIC_xDR_BASE; in safexcel_init_register_offsets()
969 offsets->hia_dfe = EIP97_HIA_DFE_BASE; in safexcel_init_register_offsets()
970 offsets->hia_dfe_thr = EIP97_HIA_DFE_THR_BASE; in safexcel_init_register_offsets()
971 offsets->hia_dse = EIP97_HIA_DSE_BASE; in safexcel_init_register_offsets()
972 offsets->hia_dse_thr = EIP97_HIA_DSE_THR_BASE; in safexcel_init_register_offsets()
973 offsets->hia_gen_cfg = EIP97_HIA_GEN_CFG_BASE; in safexcel_init_register_offsets()
974 offsets->pe = EIP97_PE_BASE; in safexcel_init_register_offsets()
981 struct device *dev = &pdev->dev; in safexcel_probe()
988 return -ENOMEM; in safexcel_probe()
990 priv->dev = dev; in safexcel_probe()
991 priv->version = (enum safexcel_eip_version)of_device_get_match_data(dev); in safexcel_probe()
993 if (priv->version == EIP197B || priv->version == EIP197D) in safexcel_probe()
994 priv->flags |= EIP197_TRC_CACHE; in safexcel_probe()
999 priv->base = devm_ioremap_resource(dev, res); in safexcel_probe()
1000 if (IS_ERR(priv->base)) { in safexcel_probe()
1002 return PTR_ERR(priv->base); in safexcel_probe()
1005 priv->clk = devm_clk_get(&pdev->dev, NULL); in safexcel_probe()
1006 ret = PTR_ERR_OR_ZERO(priv->clk); in safexcel_probe()
1008 if (ret != -ENOENT) { in safexcel_probe()
1012 ret = clk_prepare_enable(priv->clk); in safexcel_probe()
1019 priv->reg_clk = devm_clk_get(&pdev->dev, "reg"); in safexcel_probe()
1020 ret = PTR_ERR_OR_ZERO(priv->reg_clk); in safexcel_probe()
1022 if (ret != -ENOENT) { in safexcel_probe()
1026 ret = clk_prepare_enable(priv->reg_clk); in safexcel_probe()
1037 priv->context_pool = dmam_pool_create("safexcel-context", dev, in safexcel_probe()
1040 if (!priv->context_pool) { in safexcel_probe()
1041 ret = -ENOMEM; in safexcel_probe()
1047 priv->ring = devm_kcalloc(dev, priv->config.rings, in safexcel_probe()
1048 sizeof(*priv->ring), in safexcel_probe()
1050 if (!priv->ring) { in safexcel_probe()
1051 ret = -ENOMEM; in safexcel_probe()
1055 for (i = 0; i < priv->config.rings; i++) { in safexcel_probe()
1062 &priv->ring[i].cdr, in safexcel_probe()
1063 &priv->ring[i].rdr); in safexcel_probe()
1067 priv->ring[i].rdr_req = devm_kcalloc(dev, in safexcel_probe()
1069 sizeof(priv->ring[i].rdr_req), in safexcel_probe()
1071 if (!priv->ring[i].rdr_req) { in safexcel_probe()
1072 ret = -ENOMEM; in safexcel_probe()
1078 ret = -ENOMEM; in safexcel_probe()
1082 ring_irq->priv = priv; in safexcel_probe()
1083 ring_irq->ring = i; in safexcel_probe()
1094 priv->ring[i].work_data.priv = priv; in safexcel_probe()
1095 priv->ring[i].work_data.ring = i; in safexcel_probe()
1096 INIT_WORK(&priv->ring[i].work_data.work, safexcel_dequeue_work); in safexcel_probe()
1099 priv->ring[i].workqueue = create_singlethread_workqueue(wq_name); in safexcel_probe()
1100 if (!priv->ring[i].workqueue) { in safexcel_probe()
1101 ret = -ENOMEM; in safexcel_probe()
1105 priv->ring[i].requests = 0; in safexcel_probe()
1106 priv->ring[i].busy = false; in safexcel_probe()
1108 crypto_init_queue(&priv->ring[i].queue, in safexcel_probe()
1111 spin_lock_init(&priv->ring[i].lock); in safexcel_probe()
1112 spin_lock_init(&priv->ring[i].queue_lock); in safexcel_probe()
1116 atomic_set(&priv->ring_used, 0); in safexcel_probe()
1133 clk_disable_unprepare(priv->reg_clk); in safexcel_probe()
1135 clk_disable_unprepare(priv->clk); in safexcel_probe()
1143 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_reset_rings()
1166 clk_disable_unprepare(priv->clk); in safexcel_remove()
1168 for (i = 0; i < priv->config.rings; i++) in safexcel_remove()
1169 destroy_workqueue(priv->ring[i].workqueue); in safexcel_remove()
1176 .compatible = "inside-secure,safexcel-eip97ies",
1180 .compatible = "inside-secure,safexcel-eip197b",
1184 .compatible = "inside-secure,safexcel-eip197d",
1189 .compatible = "inside-secure,safexcel-eip97",
1194 .compatible = "inside-secure,safexcel-eip197",
1205 .name = "crypto-safexcel",
1211 MODULE_AUTHOR("Antoine Tenart <antoine.tenart@free-electrons.com>");