• Home
  • Raw
  • Download

Lines Matching refs:dd

181 	struct tegra_aes_dev *dd;  member
204 static inline u32 aes_readl(struct tegra_aes_dev *dd, u32 offset) in aes_readl() argument
206 return readl(dd->io_base + offset); in aes_readl()
209 static inline void aes_writel(struct tegra_aes_dev *dd, u32 val, u32 offset) in aes_writel() argument
211 writel(val, dd->io_base + offset); in aes_writel()
214 static int aes_start_crypt(struct tegra_aes_dev *dd, u32 in_addr, u32 out_addr, in aes_start_crypt() argument
222 aes_writel(dd, 0xFFFFFFFF, TEGRA_AES_INTR_STATUS); in aes_start_crypt()
225 aes_writel(dd, 0x33, TEGRA_AES_INT_ENB); in aes_start_crypt()
232 value = aes_readl(dd, TEGRA_AES_CMDQUE_CONTROL); in aes_start_crypt()
239 aes_writel(dd, value, TEGRA_AES_CMDQUE_CONTROL); in aes_start_crypt()
240 dev_dbg(dd->dev, "cmd_q_ctrl=0x%x", value); in aes_start_crypt()
243 ((dd->ctx->keylen * 8) << in aes_start_crypt()
267 dev_dbg(dd->dev, "secure_in_sel=0x%x", value); in aes_start_crypt()
268 aes_writel(dd, value, TEGRA_AES_SECURE_INPUT_SELECT); in aes_start_crypt()
270 aes_writel(dd, out_addr, TEGRA_AES_SECURE_DEST_ADDR); in aes_start_crypt()
271 INIT_COMPLETION(dd->op_complete); in aes_start_crypt()
275 value = aes_readl(dd, TEGRA_AES_INTR_STATUS); in aes_start_crypt()
279 aes_writel(dd, cmdq[i], TEGRA_AES_ICMDQUE_WR); in aes_start_crypt()
282 ret = wait_for_completion_timeout(&dd->op_complete, in aes_start_crypt()
285 dev_err(dd->dev, "timed out (0x%x)\n", in aes_start_crypt()
286 aes_readl(dd, TEGRA_AES_INTR_STATUS)); in aes_start_crypt()
290 aes_writel(dd, cmdq[AES_HW_MAX_ICQ_LENGTH - 1], TEGRA_AES_ICMDQUE_WR); in aes_start_crypt()
325 static int aes_set_key(struct tegra_aes_dev *dd) in aes_set_key() argument
328 struct tegra_aes_ctx *ctx = dd->ctx; in aes_set_key()
333 if (!dd->ctx->slot) { in aes_set_key()
334 dev_dbg(dd->dev, "using ssk"); in aes_set_key()
335 dd->ctx->slot = &ssk; in aes_set_key()
340 value = aes_readl(dd, TEGRA_AES_SECURE_CONFIG_EXT); in aes_set_key()
342 aes_writel(dd, value, TEGRA_AES_SECURE_CONFIG_EXT); in aes_set_key()
345 value = aes_readl(dd, TEGRA_AES_SECURE_CONFIG); in aes_set_key()
348 aes_writel(dd, value, TEGRA_AES_SECURE_CONFIG); in aes_set_key()
358 cmdq[1] = (u32)dd->ivkey_phys_base; in aes_set_key()
360 aes_writel(dd, cmdq[0], TEGRA_AES_ICMDQUE_WR); in aes_set_key()
361 aes_writel(dd, cmdq[1], TEGRA_AES_ICMDQUE_WR); in aes_set_key()
364 value = aes_readl(dd, TEGRA_AES_INTR_STATUS); in aes_set_key()
376 aes_writel(dd, value, TEGRA_AES_ICMDQUE_WR); in aes_set_key()
379 value = aes_readl(dd, TEGRA_AES_INTR_STATUS); in aes_set_key()
387 static int tegra_aes_handle_req(struct tegra_aes_dev *dd) in tegra_aes_handle_req() argument
401 if (!dd) in tegra_aes_handle_req()
404 spin_lock_irqsave(&dd->lock, flags); in tegra_aes_handle_req()
405 backlog = crypto_get_backlog(&dd->queue); in tegra_aes_handle_req()
406 async_req = crypto_dequeue_request(&dd->queue); in tegra_aes_handle_req()
408 clear_bit(FLAGS_BUSY, &dd->flags); in tegra_aes_handle_req()
409 spin_unlock_irqrestore(&dd->lock, flags); in tegra_aes_handle_req()
419 dev_dbg(dd->dev, "%s: get new req\n", __func__); in tegra_aes_handle_req()
428 dd->req = req; in tegra_aes_handle_req()
429 dd->total = req->nbytes; in tegra_aes_handle_req()
430 dd->in_offset = 0; in tegra_aes_handle_req()
431 dd->in_sg = req->src; in tegra_aes_handle_req()
432 dd->out_offset = 0; in tegra_aes_handle_req()
433 dd->out_sg = req->dst; in tegra_aes_handle_req()
435 in_sg = dd->in_sg; in tegra_aes_handle_req()
436 out_sg = dd->out_sg; in tegra_aes_handle_req()
438 total = dd->total; in tegra_aes_handle_req()
444 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; in tegra_aes_handle_req()
446 dd->iv = (u8 *)req->info; in tegra_aes_handle_req()
447 dd->ivlen = crypto_ablkcipher_ivsize(tfm); in tegra_aes_handle_req()
450 ctx->dd = dd; in tegra_aes_handle_req()
451 dd->ctx = ctx; in tegra_aes_handle_req()
455 memcpy(dd->ivkey_base, ctx->key, ctx->keylen); in tegra_aes_handle_req()
456 memset(dd->ivkey_base + ctx->keylen, 0, AES_HW_KEY_TABLE_LENGTH_BYTES - ctx->keylen); in tegra_aes_handle_req()
457 aes_set_key(dd); in tegra_aes_handle_req()
461 if (((dd->flags & FLAGS_CBC) || (dd->flags & FLAGS_OFB)) && dd->iv) { in tegra_aes_handle_req()
466 memcpy(dd->buf_in, dd->iv, dd->ivlen); in tegra_aes_handle_req()
468 ret = aes_start_crypt(dd, (u32)dd->dma_buf_in, in tegra_aes_handle_req()
469 dd->dma_buf_out, 1, FLAGS_CBC, false); in tegra_aes_handle_req()
471 dev_err(dd->dev, "aes_start_crypt fail(%d)\n", ret); in tegra_aes_handle_req()
477 dev_dbg(dd->dev, "remain: %d\n", total); in tegra_aes_handle_req()
478 ret = dma_map_sg(dd->dev, in_sg, 1, DMA_TO_DEVICE); in tegra_aes_handle_req()
480 dev_err(dd->dev, "dma_map_sg() error\n"); in tegra_aes_handle_req()
484 ret = dma_map_sg(dd->dev, out_sg, 1, DMA_FROM_DEVICE); in tegra_aes_handle_req()
486 dev_err(dd->dev, "dma_map_sg() error\n"); in tegra_aes_handle_req()
487 dma_unmap_sg(dd->dev, dd->in_sg, in tegra_aes_handle_req()
494 dd->flags |= FLAGS_FAST; in tegra_aes_handle_req()
499 ret = aes_start_crypt(dd, addr_in, addr_out, nblocks, in tegra_aes_handle_req()
500 dd->flags, true); in tegra_aes_handle_req()
502 dma_unmap_sg(dd->dev, out_sg, 1, DMA_FROM_DEVICE); in tegra_aes_handle_req()
503 dma_unmap_sg(dd->dev, in_sg, 1, DMA_TO_DEVICE); in tegra_aes_handle_req()
506 dev_err(dd->dev, "aes_start_crypt fail(%d)\n", ret); in tegra_aes_handle_req()
509 dd->flags &= ~FLAGS_FAST; in tegra_aes_handle_req()
511 dev_dbg(dd->dev, "out: copied %d\n", count); in tegra_aes_handle_req()
521 dd->total = total; in tegra_aes_handle_req()
523 if (dd->req->base.complete) in tegra_aes_handle_req()
524 dd->req->base.complete(&dd->req->base, ret); in tegra_aes_handle_req()
526 dev_dbg(dd->dev, "%s: exit\n", __func__); in tegra_aes_handle_req()
534 struct tegra_aes_dev *dd = aes_dev; in tegra_aes_setkey() local
539 dev_err(dd->dev, "unsupported key size\n"); in tegra_aes_setkey()
544 dev_dbg(dd->dev, "keylen: %d\n", keylen); in tegra_aes_setkey()
546 ctx->dd = dd; in tegra_aes_setkey()
552 dev_err(dd->dev, "no empty slot\n"); in tegra_aes_setkey()
564 dev_dbg(dd->dev, "done\n"); in tegra_aes_setkey()
570 struct tegra_aes_dev *dd = aes_dev; in aes_workqueue_handler() local
573 ret = clk_prepare_enable(dd->aes_clk); in aes_workqueue_handler()
579 ret = tegra_aes_handle_req(dd); in aes_workqueue_handler()
582 clk_disable_unprepare(dd->aes_clk); in aes_workqueue_handler()
587 struct tegra_aes_dev *dd = (struct tegra_aes_dev *)dev_id; in aes_irq() local
588 u32 value = aes_readl(dd, TEGRA_AES_INTR_STATUS); in aes_irq()
589 int busy = test_bit(FLAGS_BUSY, &dd->flags); in aes_irq()
592 dev_dbg(dd->dev, "spurious interrupt\n"); in aes_irq()
596 dev_dbg(dd->dev, "irq_stat: 0x%x\n", value); in aes_irq()
598 aes_writel(dd, TEGRA_AES_INT_ERROR_MASK, TEGRA_AES_INTR_STATUS); in aes_irq()
601 complete(&dd->op_complete); in aes_irq()
611 struct tegra_aes_dev *dd = aes_dev; in tegra_aes_crypt() local
616 dev_dbg(dd->dev, "nbytes: %d, enc: %d, cbc: %d, ofb: %d\n", in tegra_aes_crypt()
622 spin_lock_irqsave(&dd->lock, flags); in tegra_aes_crypt()
623 err = ablkcipher_enqueue_request(&dd->queue, req); in tegra_aes_crypt()
624 busy = test_and_set_bit(FLAGS_BUSY, &dd->flags); in tegra_aes_crypt()
625 spin_unlock_irqrestore(&dd->lock, flags); in tegra_aes_crypt()
666 struct tegra_aes_dev *dd = aes_dev; in tegra_aes_get_random() local
669 u8 *dest = rdata, *dt = dd->dt; in tegra_aes_get_random()
674 ret = clk_prepare_enable(dd->aes_clk); in tegra_aes_get_random()
680 ctx->dd = dd; in tegra_aes_get_random()
681 dd->ctx = ctx; in tegra_aes_get_random()
682 dd->flags = FLAGS_ENCRYPT | FLAGS_RNG; in tegra_aes_get_random()
684 memcpy(dd->buf_in, dt, DEFAULT_RNG_BLK_SZ); in tegra_aes_get_random()
686 ret = aes_start_crypt(dd, (u32)dd->dma_buf_in, in tegra_aes_get_random()
687 (u32)dd->dma_buf_out, 1, dd->flags, true); in tegra_aes_get_random()
689 dev_err(dd->dev, "aes_start_crypt fail(%d)\n", ret); in tegra_aes_get_random()
693 memcpy(dest, dd->buf_out, dlen); in tegra_aes_get_random()
703 clk_disable_unprepare(dd->aes_clk); in tegra_aes_get_random()
706 dev_dbg(dd->dev, "%s: done\n", __func__); in tegra_aes_get_random()
713 struct tegra_aes_dev *dd = aes_dev; in tegra_aes_rng_reset() local
721 if (!ctx || !dd) { in tegra_aes_rng_reset()
722 dev_err(dd->dev, "ctx=0x%x, dd=0x%x\n", in tegra_aes_rng_reset()
723 (unsigned int)ctx, (unsigned int)dd); in tegra_aes_rng_reset()
728 dev_err(dd->dev, "seed size invalid"); in tegra_aes_rng_reset()
738 dev_err(dd->dev, "no empty slot\n"); in tegra_aes_rng_reset()
745 ctx->dd = dd; in tegra_aes_rng_reset()
746 dd->ctx = ctx; in tegra_aes_rng_reset()
747 dd->ctr = 0; in tegra_aes_rng_reset()
753 memcpy(dd->ivkey_base, seed + DEFAULT_RNG_BLK_SZ, AES_KEYSIZE_128); in tegra_aes_rng_reset()
754 memset(dd->ivkey_base + AES_KEYSIZE_128, 0, AES_HW_KEY_TABLE_LENGTH_BYTES - AES_KEYSIZE_128); in tegra_aes_rng_reset()
756 dd->iv = seed; in tegra_aes_rng_reset()
757 dd->ivlen = slen; in tegra_aes_rng_reset()
759 dd->flags = FLAGS_ENCRYPT | FLAGS_RNG; in tegra_aes_rng_reset()
761 ret = clk_prepare_enable(dd->aes_clk); in tegra_aes_rng_reset()
767 aes_set_key(dd); in tegra_aes_rng_reset()
770 memcpy(dd->buf_in, dd->iv, DEFAULT_RNG_BLK_SZ); in tegra_aes_rng_reset()
771 ret = aes_start_crypt(dd, (u32)dd->dma_buf_in, in tegra_aes_rng_reset()
772 dd->dma_buf_out, 1, FLAGS_CBC, false); in tegra_aes_rng_reset()
774 dev_err(dd->dev, "aes_start_crypt fail(%d)\n", ret); in tegra_aes_rng_reset()
778 if (dd->ivlen >= (2 * DEFAULT_RNG_BLK_SZ + AES_KEYSIZE_128)) { in tegra_aes_rng_reset()
779 dt = dd->iv + DEFAULT_RNG_BLK_SZ + AES_KEYSIZE_128; in tegra_aes_rng_reset()
784 nsec ^= dd->ctr << 56; in tegra_aes_rng_reset()
785 dd->ctr++; in tegra_aes_rng_reset()
790 memcpy(dd->dt, dt, DEFAULT_RNG_BLK_SZ); in tegra_aes_rng_reset()
793 clk_disable_unprepare(dd->aes_clk); in tegra_aes_rng_reset()
796 dev_dbg(dd->dev, "%s: done\n", __func__); in tegra_aes_rng_reset()
881 struct tegra_aes_dev *dd; in tegra_aes_probe() local
885 dd = devm_kzalloc(dev, sizeof(struct tegra_aes_dev), GFP_KERNEL); in tegra_aes_probe()
886 if (dd == NULL) { in tegra_aes_probe()
891 dd->dev = dev; in tegra_aes_probe()
892 platform_set_drvdata(pdev, dd); in tegra_aes_probe()
894 dd->slots = devm_kzalloc(dev, sizeof(struct tegra_aes_slot) * in tegra_aes_probe()
896 if (dd->slots == NULL) { in tegra_aes_probe()
901 spin_lock_init(&dd->lock); in tegra_aes_probe()
902 crypto_init_queue(&dd->queue, TEGRA_AES_QUEUE_LENGTH); in tegra_aes_probe()
919 dd->io_base = devm_ioremap(dev, res->start, resource_size(res)); in tegra_aes_probe()
920 if (!dd->io_base) { in tegra_aes_probe()
927 dd->aes_clk = clk_get(dev, "vde"); in tegra_aes_probe()
928 if (IS_ERR(dd->aes_clk)) { in tegra_aes_probe()
934 err = clk_set_rate(dd->aes_clk, ULONG_MAX); in tegra_aes_probe()
936 dev_err(dd->dev, "iclk set_rate fail(%d)\n", err); in tegra_aes_probe()
945 dd->ivkey_base = dma_alloc_coherent(dev, AES_HW_KEY_TABLE_LENGTH_BYTES, in tegra_aes_probe()
946 &dd->ivkey_phys_base, in tegra_aes_probe()
948 if (!dd->ivkey_base) { in tegra_aes_probe()
954 dd->buf_in = dma_alloc_coherent(dev, AES_HW_DMA_BUFFER_SIZE_BYTES, in tegra_aes_probe()
955 &dd->dma_buf_in, GFP_KERNEL); in tegra_aes_probe()
956 if (!dd->buf_in) { in tegra_aes_probe()
962 dd->buf_out = dma_alloc_coherent(dev, AES_HW_DMA_BUFFER_SIZE_BYTES, in tegra_aes_probe()
963 &dd->dma_buf_out, GFP_KERNEL); in tegra_aes_probe()
964 if (!dd->buf_out) { in tegra_aes_probe()
970 init_completion(&dd->op_complete); in tegra_aes_probe()
985 dd->irq = res->start; in tegra_aes_probe()
987 err = devm_request_irq(dev, dd->irq, aes_irq, IRQF_TRIGGER_HIGH | in tegra_aes_probe()
988 IRQF_SHARED, "tegra-aes", dd); in tegra_aes_probe()
1002 dd->slots[i].slot_num = i; in tegra_aes_probe()
1003 INIT_LIST_HEAD(&dd->slots[i].node); in tegra_aes_probe()
1004 list_add_tail(&dd->slots[i].node, &dev_list); in tegra_aes_probe()
1008 aes_dev = dd; in tegra_aes_probe()
1027 if (dd->ivkey_base) in tegra_aes_probe()
1029 dd->ivkey_base, dd->ivkey_phys_base); in tegra_aes_probe()
1030 if (dd->buf_in) in tegra_aes_probe()
1032 dd->buf_in, dd->dma_buf_in); in tegra_aes_probe()
1033 if (dd->buf_out) in tegra_aes_probe()
1035 dd->buf_out, dd->dma_buf_out); in tegra_aes_probe()
1036 if (!IS_ERR(dd->aes_clk)) in tegra_aes_probe()
1037 clk_put(dd->aes_clk); in tegra_aes_probe()
1053 struct tegra_aes_dev *dd = platform_get_drvdata(pdev); in tegra_aes_remove() local
1066 dd->ivkey_base, dd->ivkey_phys_base); in tegra_aes_remove()
1068 dd->buf_in, dd->dma_buf_in); in tegra_aes_remove()
1070 dd->buf_out, dd->dma_buf_out); in tegra_aes_remove()
1071 clk_put(dd->aes_clk); in tegra_aes_remove()