• Home
  • Raw
  • Download

Lines Matching refs:dd

91 	struct atmel_sha_dev	*dd;  member
115 struct atmel_sha_dev *dd; member
255 static inline u32 atmel_sha_read(struct atmel_sha_dev *dd, u32 offset) in atmel_sha_read() argument
257 u32 value = readl_relaxed(dd->io_base + offset); in atmel_sha_read()
260 if (dd->flags & SHA_FLAGS_DUMP_REG) { in atmel_sha_read()
263 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value, in atmel_sha_read()
271 static inline void atmel_sha_write(struct atmel_sha_dev *dd, in atmel_sha_write() argument
275 if (dd->flags & SHA_FLAGS_DUMP_REG) { in atmel_sha_write()
278 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value, in atmel_sha_write()
283 writel_relaxed(value, dd->io_base + offset); in atmel_sha_write()
286 static inline int atmel_sha_complete(struct atmel_sha_dev *dd, int err) in atmel_sha_complete() argument
288 struct ahash_request *req = dd->req; in atmel_sha_complete()
290 dd->flags &= ~(SHA_FLAGS_BUSY | SHA_FLAGS_FINAL | SHA_FLAGS_CPU | in atmel_sha_complete()
294 clk_disable(dd->iclk); in atmel_sha_complete()
296 if ((dd->is_async || dd->force_complete) && req->base.complete) in atmel_sha_complete()
300 tasklet_schedule(&dd->queue_task); in atmel_sha_complete()
409 struct atmel_sha_dev *dd = NULL; in atmel_sha_find_dev() local
413 if (!tctx->dd) { in atmel_sha_find_dev()
415 dd = tmp; in atmel_sha_find_dev()
418 tctx->dd = dd; in atmel_sha_find_dev()
420 dd = tctx->dd; in atmel_sha_find_dev()
425 return dd; in atmel_sha_find_dev()
433 struct atmel_sha_dev *dd = atmel_sha_find_dev(tctx); in atmel_sha_init() local
435 ctx->dd = dd; in atmel_sha_init()
439 dev_dbg(dd->dev, "init: digest size: %d\n", in atmel_sha_init()
476 static void atmel_sha_write_ctrl(struct atmel_sha_dev *dd, int dma) in atmel_sha_write_ctrl() argument
478 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_write_ctrl()
483 if (!dd->caps.has_dma) in atmel_sha_write_ctrl()
484 atmel_sha_write(dd, SHA_IER, SHA_INT_TXBUFE); in atmel_sha_write_ctrl()
486 if (dd->caps.has_dualbuff) in atmel_sha_write_ctrl()
489 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_write_ctrl()
524 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_write_ctrl()
525 } else if (dd->caps.has_uihv && (ctx->flags & SHA_FLAGS_RESTORE)) { in atmel_sha_write_ctrl()
535 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIHV); in atmel_sha_write_ctrl()
537 atmel_sha_write(dd, SHA_REG_DIN(i), hash[i]); in atmel_sha_write_ctrl()
538 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_write_ctrl()
548 atmel_sha_write(dd, SHA_MR, valmr); in atmel_sha_write_ctrl()
551 static inline int atmel_sha_wait_for_data_ready(struct atmel_sha_dev *dd, in atmel_sha_wait_for_data_ready() argument
554 u32 isr = atmel_sha_read(dd, SHA_ISR); in atmel_sha_wait_for_data_ready()
557 return resume(dd); in atmel_sha_wait_for_data_ready()
559 dd->resume = resume; in atmel_sha_wait_for_data_ready()
560 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_wait_for_data_ready()
564 static int atmel_sha_xmit_cpu(struct atmel_sha_dev *dd, const u8 *buf, in atmel_sha_xmit_cpu() argument
567 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_cpu()
571 dev_dbg(dd->dev, "xmit_cpu: digcnt: 0x%llx 0x%llx, length: %zd, final: %d\n", in atmel_sha_xmit_cpu()
574 atmel_sha_write_ctrl(dd, 0); in atmel_sha_xmit_cpu()
582 dd->flags |= SHA_FLAGS_FINAL; /* catch last interrupt */ in atmel_sha_xmit_cpu()
586 dd->flags |= SHA_FLAGS_CPU; in atmel_sha_xmit_cpu()
589 atmel_sha_write(dd, SHA_REG_DIN(count), buffer[count]); in atmel_sha_xmit_cpu()
594 static int atmel_sha_xmit_pdc(struct atmel_sha_dev *dd, dma_addr_t dma_addr1, in atmel_sha_xmit_pdc() argument
597 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_pdc()
600 dev_dbg(dd->dev, "xmit_pdc: digcnt: 0x%llx 0x%llx, length: %zd, final: %d\n", in atmel_sha_xmit_pdc()
604 atmel_sha_write(dd, SHA_PTCR, SHA_PTCR_TXTDIS); in atmel_sha_xmit_pdc()
605 atmel_sha_write(dd, SHA_TPR, dma_addr1); in atmel_sha_xmit_pdc()
606 atmel_sha_write(dd, SHA_TCR, len32); in atmel_sha_xmit_pdc()
609 atmel_sha_write(dd, SHA_TNPR, dma_addr2); in atmel_sha_xmit_pdc()
610 atmel_sha_write(dd, SHA_TNCR, len32); in atmel_sha_xmit_pdc()
612 atmel_sha_write_ctrl(dd, 1); in atmel_sha_xmit_pdc()
620 dd->flags |= SHA_FLAGS_FINAL; /* catch last interrupt */ in atmel_sha_xmit_pdc()
622 dd->flags |= SHA_FLAGS_DMA_ACTIVE; in atmel_sha_xmit_pdc()
625 atmel_sha_write(dd, SHA_PTCR, SHA_PTCR_TXTEN); in atmel_sha_xmit_pdc()
632 struct atmel_sha_dev *dd = data; in atmel_sha_dma_callback() local
634 dd->is_async = true; in atmel_sha_dma_callback()
637 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_dma_callback()
640 static int atmel_sha_xmit_dma(struct atmel_sha_dev *dd, dma_addr_t dma_addr1, in atmel_sha_xmit_dma() argument
643 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_dma()
647 dev_dbg(dd->dev, "xmit_dma: digcnt: 0x%llx 0x%llx, length: %zd, final: %d\n", in atmel_sha_xmit_dma()
650 dd->dma_lch_in.dma_conf.src_maxburst = 16; in atmel_sha_xmit_dma()
651 dd->dma_lch_in.dma_conf.dst_maxburst = 16; in atmel_sha_xmit_dma()
653 dmaengine_slave_config(dd->dma_lch_in.chan, &dd->dma_lch_in.dma_conf); in atmel_sha_xmit_dma()
661 in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, sg, 2, in atmel_sha_xmit_dma()
667 in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, sg, 1, in atmel_sha_xmit_dma()
671 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_xmit_dma()
674 in_desc->callback_param = dd; in atmel_sha_xmit_dma()
676 atmel_sha_write_ctrl(dd, 1); in atmel_sha_xmit_dma()
684 dd->flags |= SHA_FLAGS_FINAL; /* catch last interrupt */ in atmel_sha_xmit_dma()
686 dd->flags |= SHA_FLAGS_DMA_ACTIVE; in atmel_sha_xmit_dma()
690 dma_async_issue_pending(dd->dma_lch_in.chan); in atmel_sha_xmit_dma()
695 static int atmel_sha_xmit_start(struct atmel_sha_dev *dd, dma_addr_t dma_addr1, in atmel_sha_xmit_start() argument
698 if (dd->caps.has_dma) in atmel_sha_xmit_start()
699 return atmel_sha_xmit_dma(dd, dma_addr1, length1, in atmel_sha_xmit_start()
702 return atmel_sha_xmit_pdc(dd, dma_addr1, length1, in atmel_sha_xmit_start()
706 static int atmel_sha_update_cpu(struct atmel_sha_dev *dd) in atmel_sha_update_cpu() argument
708 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_cpu()
716 return atmel_sha_xmit_cpu(dd, ctx->buffer, bufcnt, 1); in atmel_sha_update_cpu()
719 static int atmel_sha_xmit_dma_map(struct atmel_sha_dev *dd, in atmel_sha_xmit_dma_map() argument
723 ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer, in atmel_sha_xmit_dma_map()
725 if (dma_mapping_error(dd->dev, ctx->dma_addr)) { in atmel_sha_xmit_dma_map()
726 dev_err(dd->dev, "dma %zu bytes error\n", ctx->buflen + in atmel_sha_xmit_dma_map()
728 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_xmit_dma_map()
734 return atmel_sha_xmit_start(dd, ctx->dma_addr, length, 0, 0, final); in atmel_sha_xmit_dma_map()
737 static int atmel_sha_update_dma_slow(struct atmel_sha_dev *dd) in atmel_sha_update_dma_slow() argument
739 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_slow()
747 dev_dbg(dd->dev, "slow: bufcnt: %zu, digcnt: 0x%llx 0x%llx, final: %d\n", in atmel_sha_update_dma_slow()
756 return atmel_sha_xmit_dma_map(dd, ctx, count, final); in atmel_sha_update_dma_slow()
762 static int atmel_sha_update_dma_start(struct atmel_sha_dev *dd) in atmel_sha_update_dma_start() argument
764 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_start()
773 return atmel_sha_update_dma_slow(dd); in atmel_sha_update_dma_start()
775 dev_dbg(dd->dev, "fast: digcnt: 0x%llx 0x%llx, bufcnt: %zd, total: %u\n", in atmel_sha_update_dma_start()
781 return atmel_sha_update_dma_slow(dd); in atmel_sha_update_dma_start()
785 return atmel_sha_update_dma_slow(dd); in atmel_sha_update_dma_start()
814 ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer, in atmel_sha_update_dma_start()
816 if (dma_mapping_error(dd->dev, ctx->dma_addr)) { in atmel_sha_update_dma_start()
817 dev_err(dd->dev, "dma %zu bytes error\n", in atmel_sha_update_dma_start()
819 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_update_dma_start()
826 return atmel_sha_xmit_start(dd, ctx->dma_addr, count, 0, in atmel_sha_update_dma_start()
830 if (!dma_map_sg(dd->dev, ctx->sg, 1, in atmel_sha_update_dma_start()
832 dev_err(dd->dev, "dma_map_sg error\n"); in atmel_sha_update_dma_start()
833 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_update_dma_start()
840 return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), in atmel_sha_update_dma_start()
845 if (!dma_map_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE)) { in atmel_sha_update_dma_start()
846 dev_err(dd->dev, "dma_map_sg error\n"); in atmel_sha_update_dma_start()
847 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_update_dma_start()
853 return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), length, 0, in atmel_sha_update_dma_start()
857 static int atmel_sha_update_dma_stop(struct atmel_sha_dev *dd) in atmel_sha_update_dma_stop() argument
859 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_stop()
862 dma_unmap_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE); in atmel_sha_update_dma_stop()
869 dma_unmap_single(dd->dev, ctx->dma_addr, in atmel_sha_update_dma_stop()
873 dma_unmap_single(dd->dev, ctx->dma_addr, ctx->buflen + in atmel_sha_update_dma_stop()
880 static int atmel_sha_update_req(struct atmel_sha_dev *dd) in atmel_sha_update_req() argument
882 struct ahash_request *req = dd->req; in atmel_sha_update_req()
886 dev_dbg(dd->dev, "update_req: total: %u, digcnt: 0x%llx 0x%llx\n", in atmel_sha_update_req()
890 err = atmel_sha_update_cpu(dd); in atmel_sha_update_req()
892 err = atmel_sha_update_dma_start(dd); in atmel_sha_update_req()
895 dev_dbg(dd->dev, "update: err: %d, digcnt: 0x%llx 0%llx\n", in atmel_sha_update_req()
901 static int atmel_sha_final_req(struct atmel_sha_dev *dd) in atmel_sha_final_req() argument
903 struct ahash_request *req = dd->req; in atmel_sha_final_req()
912 err = atmel_sha_xmit_dma_map(dd, ctx, count, 1); in atmel_sha_final_req()
919 err = atmel_sha_xmit_cpu(dd, ctx->buffer, count, 1); in atmel_sha_final_req()
922 dev_dbg(dd->dev, "final_req: err: %d\n", err); in atmel_sha_final_req()
954 hash[i] = atmel_sha_read(ctx->dd, SHA_REG_DIGEST(i)); in atmel_sha_copy_hash()
992 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_finish() local
997 dev_dbg(dd->dev, "digcnt: 0x%llx 0x%llx, bufcnt: %zd\n", ctx->digcnt[1], in atmel_sha_finish()
1006 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_finish_req() local
1010 if (SHA_FLAGS_FINAL & dd->flags) in atmel_sha_finish_req()
1017 (void)atmel_sha_complete(dd, err); in atmel_sha_finish_req()
1020 static int atmel_sha_hw_init(struct atmel_sha_dev *dd) in atmel_sha_hw_init() argument
1024 err = clk_enable(dd->iclk); in atmel_sha_hw_init()
1028 if (!(SHA_FLAGS_INIT & dd->flags)) { in atmel_sha_hw_init()
1029 atmel_sha_write(dd, SHA_CR, SHA_CR_SWRST); in atmel_sha_hw_init()
1030 dd->flags |= SHA_FLAGS_INIT; in atmel_sha_hw_init()
1031 dd->err = 0; in atmel_sha_hw_init()
1037 static inline unsigned int atmel_sha_get_version(struct atmel_sha_dev *dd) in atmel_sha_get_version() argument
1039 return atmel_sha_read(dd, SHA_HW_VERSION) & 0x00000fff; in atmel_sha_get_version()
1042 static void atmel_sha_hw_version_init(struct atmel_sha_dev *dd) in atmel_sha_hw_version_init() argument
1044 atmel_sha_hw_init(dd); in atmel_sha_hw_version_init()
1046 dd->hw_version = atmel_sha_get_version(dd); in atmel_sha_hw_version_init()
1048 dev_info(dd->dev, in atmel_sha_hw_version_init()
1049 "version: 0x%x\n", dd->hw_version); in atmel_sha_hw_version_init()
1051 clk_disable(dd->iclk); in atmel_sha_hw_version_init()
1054 static int atmel_sha_handle_queue(struct atmel_sha_dev *dd, in atmel_sha_handle_queue() argument
1063 spin_lock_irqsave(&dd->lock, flags); in atmel_sha_handle_queue()
1065 ret = ahash_enqueue_request(&dd->queue, req); in atmel_sha_handle_queue()
1067 if (SHA_FLAGS_BUSY & dd->flags) { in atmel_sha_handle_queue()
1068 spin_unlock_irqrestore(&dd->lock, flags); in atmel_sha_handle_queue()
1072 backlog = crypto_get_backlog(&dd->queue); in atmel_sha_handle_queue()
1073 async_req = crypto_dequeue_request(&dd->queue); in atmel_sha_handle_queue()
1075 dd->flags |= SHA_FLAGS_BUSY; in atmel_sha_handle_queue()
1077 spin_unlock_irqrestore(&dd->lock, flags); in atmel_sha_handle_queue()
1087 dd->req = ahash_request_cast(async_req); in atmel_sha_handle_queue()
1088 start_async = (dd->req != req); in atmel_sha_handle_queue()
1089 dd->is_async = start_async; in atmel_sha_handle_queue()
1090 dd->force_complete = false; in atmel_sha_handle_queue()
1093 err = ctx->start(dd); in atmel_sha_handle_queue()
1097 static int atmel_sha_done(struct atmel_sha_dev *dd);
1099 static int atmel_sha_start(struct atmel_sha_dev *dd) in atmel_sha_start() argument
1101 struct ahash_request *req = dd->req; in atmel_sha_start()
1105 dev_dbg(dd->dev, "handling new req, op: %lu, nbytes: %d\n", in atmel_sha_start()
1108 err = atmel_sha_hw_init(dd); in atmel_sha_start()
1110 return atmel_sha_complete(dd, err); in atmel_sha_start()
1131 dd->resume = atmel_sha_done; in atmel_sha_start()
1133 err = atmel_sha_update_req(dd); in atmel_sha_start()
1136 err = atmel_sha_final_req(dd); in atmel_sha_start()
1138 err = atmel_sha_final_req(dd); in atmel_sha_start()
1145 dev_dbg(dd->dev, "exit, err: %d\n", err); in atmel_sha_start()
1154 struct atmel_sha_dev *dd = tctx->dd; in atmel_sha_enqueue() local
1158 return atmel_sha_handle_queue(dd, req); in atmel_sha_enqueue()
1383 struct atmel_sha_dev *dd = (struct atmel_sha_dev *)data; in atmel_sha_queue_task() local
1385 atmel_sha_handle_queue(dd, NULL); in atmel_sha_queue_task()
1388 static int atmel_sha_done(struct atmel_sha_dev *dd) in atmel_sha_done() argument
1392 if (SHA_FLAGS_CPU & dd->flags) { in atmel_sha_done()
1393 if (SHA_FLAGS_OUTPUT_READY & dd->flags) { in atmel_sha_done()
1394 dd->flags &= ~SHA_FLAGS_OUTPUT_READY; in atmel_sha_done()
1397 } else if (SHA_FLAGS_DMA_READY & dd->flags) { in atmel_sha_done()
1398 if (SHA_FLAGS_DMA_ACTIVE & dd->flags) { in atmel_sha_done()
1399 dd->flags &= ~SHA_FLAGS_DMA_ACTIVE; in atmel_sha_done()
1400 atmel_sha_update_dma_stop(dd); in atmel_sha_done()
1401 if (dd->err) { in atmel_sha_done()
1402 err = dd->err; in atmel_sha_done()
1406 if (SHA_FLAGS_OUTPUT_READY & dd->flags) { in atmel_sha_done()
1408 dd->flags &= ~(SHA_FLAGS_DMA_READY | in atmel_sha_done()
1410 err = atmel_sha_update_dma_start(dd); in atmel_sha_done()
1419 atmel_sha_finish_req(dd->req, err); in atmel_sha_done()
1426 struct atmel_sha_dev *dd = (struct atmel_sha_dev *)data; in atmel_sha_done_task() local
1428 dd->is_async = true; in atmel_sha_done_task()
1429 (void)dd->resume(dd); in atmel_sha_done_task()
1457 static bool atmel_sha_dma_check_aligned(struct atmel_sha_dev *dd, in atmel_sha_dma_check_aligned() argument
1461 struct atmel_sha_dma *dma = &dd->dma_lch_in; in atmel_sha_dma_check_aligned()
1462 struct ahash_request *req = dd->req; in atmel_sha_dma_check_aligned()
1494 struct atmel_sha_dev *dd = data; in atmel_sha_dma_callback2() local
1495 struct atmel_sha_dma *dma = &dd->dma_lch_in; in atmel_sha_dma_callback2()
1500 dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE); in atmel_sha_dma_callback2()
1507 dd->is_async = true; in atmel_sha_dma_callback2()
1508 (void)atmel_sha_wait_for_data_ready(dd, dd->resume); in atmel_sha_dma_callback2()
1511 static int atmel_sha_dma_start(struct atmel_sha_dev *dd, in atmel_sha_dma_start() argument
1516 struct atmel_sha_dma *dma = &dd->dma_lch_in; in atmel_sha_dma_start()
1524 dd->resume = resume; in atmel_sha_dma_start()
1531 sg_len = dma_map_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE); in atmel_sha_dma_start()
1551 desc->callback_param = dd; in atmel_sha_dma_start()
1562 dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE); in atmel_sha_dma_start()
1564 return atmel_sha_complete(dd, err); in atmel_sha_dma_start()
1570 static int atmel_sha_cpu_transfer(struct atmel_sha_dev *dd) in atmel_sha_cpu_transfer() argument
1572 struct ahash_request *req = dd->req; in atmel_sha_cpu_transfer()
1583 atmel_sha_write(dd, SHA_REG_DIN(din), words[i]); in atmel_sha_cpu_transfer()
1604 isr = atmel_sha_read(dd, SHA_ISR); in atmel_sha_cpu_transfer()
1607 dd->resume = atmel_sha_cpu_transfer; in atmel_sha_cpu_transfer()
1608 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_cpu_transfer()
1614 return dd->cpu_transfer_complete(dd); in atmel_sha_cpu_transfer()
1616 return atmel_sha_wait_for_data_ready(dd, dd->cpu_transfer_complete); in atmel_sha_cpu_transfer()
1619 static int atmel_sha_cpu_start(struct atmel_sha_dev *dd, in atmel_sha_cpu_start() argument
1626 struct ahash_request *req = dd->req; in atmel_sha_cpu_start()
1630 return resume(dd); in atmel_sha_cpu_start()
1649 dd->cpu_transfer_complete = resume; in atmel_sha_cpu_start()
1650 return atmel_sha_cpu_transfer(dd); in atmel_sha_cpu_start()
1653 static int atmel_sha_cpu_hash(struct atmel_sha_dev *dd, in atmel_sha_cpu_hash() argument
1658 struct ahash_request *req = dd->req; in atmel_sha_cpu_hash()
1664 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_cpu_hash()
1667 atmel_sha_write(dd, SHA_MR, mr); in atmel_sha_cpu_hash()
1668 atmel_sha_write(dd, SHA_MSR, msglen); in atmel_sha_cpu_hash()
1669 atmel_sha_write(dd, SHA_BCR, msglen); in atmel_sha_cpu_hash()
1670 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_cpu_hash()
1672 sg_init_one(&dd->tmp, data, datalen); in atmel_sha_cpu_hash()
1673 return atmel_sha_cpu_start(dd, &dd->tmp, datalen, false, true, resume); in atmel_sha_cpu_hash()
1739 static int atmel_sha_hmac_setup(struct atmel_sha_dev *dd,
1741 static int atmel_sha_hmac_prehash_key(struct atmel_sha_dev *dd,
1743 static int atmel_sha_hmac_prehash_key_done(struct atmel_sha_dev *dd);
1744 static int atmel_sha_hmac_compute_ipad_hash(struct atmel_sha_dev *dd);
1745 static int atmel_sha_hmac_compute_opad_hash(struct atmel_sha_dev *dd);
1746 static int atmel_sha_hmac_setup_done(struct atmel_sha_dev *dd);
1748 static int atmel_sha_hmac_init_done(struct atmel_sha_dev *dd);
1749 static int atmel_sha_hmac_final(struct atmel_sha_dev *dd);
1750 static int atmel_sha_hmac_final_done(struct atmel_sha_dev *dd);
1751 static int atmel_sha_hmac_digest2(struct atmel_sha_dev *dd);
1753 static int atmel_sha_hmac_setup(struct atmel_sha_dev *dd, in atmel_sha_hmac_setup() argument
1756 struct ahash_request *req = dd->req; in atmel_sha_hmac_setup()
1792 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_hmac_setup()
1797 return resume(dd); in atmel_sha_hmac_setup()
1801 return atmel_sha_hmac_prehash_key(dd, key, keylen); in atmel_sha_hmac_setup()
1806 return atmel_sha_hmac_compute_ipad_hash(dd); in atmel_sha_hmac_setup()
1809 static int atmel_sha_hmac_prehash_key(struct atmel_sha_dev *dd, in atmel_sha_hmac_prehash_key() argument
1812 return atmel_sha_cpu_hash(dd, key, keylen, true, in atmel_sha_hmac_prehash_key()
1816 static int atmel_sha_hmac_prehash_key_done(struct atmel_sha_dev *dd) in atmel_sha_hmac_prehash_key_done() argument
1818 struct ahash_request *req = dd->req; in atmel_sha_hmac_prehash_key_done()
1828 hmac->ipad[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_hmac_prehash_key_done()
1830 return atmel_sha_hmac_compute_ipad_hash(dd); in atmel_sha_hmac_prehash_key_done()
1833 static int atmel_sha_hmac_compute_ipad_hash(struct atmel_sha_dev *dd) in atmel_sha_hmac_compute_ipad_hash() argument
1835 struct ahash_request *req = dd->req; in atmel_sha_hmac_compute_ipad_hash()
1848 return atmel_sha_cpu_hash(dd, hmac->ipad, bs, false, in atmel_sha_hmac_compute_ipad_hash()
1852 static int atmel_sha_hmac_compute_opad_hash(struct atmel_sha_dev *dd) in atmel_sha_hmac_compute_opad_hash() argument
1854 struct ahash_request *req = dd->req; in atmel_sha_hmac_compute_opad_hash()
1863 hmac->ipad[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_hmac_compute_opad_hash()
1864 return atmel_sha_cpu_hash(dd, hmac->opad, bs, false, in atmel_sha_hmac_compute_opad_hash()
1868 static int atmel_sha_hmac_setup_done(struct atmel_sha_dev *dd) in atmel_sha_hmac_setup_done() argument
1870 struct ahash_request *req = dd->req; in atmel_sha_hmac_setup_done()
1878 hmac->opad[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_hmac_setup_done()
1880 return hmac->resume(dd); in atmel_sha_hmac_setup_done()
1883 static int atmel_sha_hmac_start(struct atmel_sha_dev *dd) in atmel_sha_hmac_start() argument
1885 struct ahash_request *req = dd->req; in atmel_sha_hmac_start()
1889 err = atmel_sha_hw_init(dd); in atmel_sha_hmac_start()
1891 return atmel_sha_complete(dd, err); in atmel_sha_hmac_start()
1895 err = atmel_sha_hmac_setup(dd, atmel_sha_hmac_init_done); in atmel_sha_hmac_start()
1899 dd->resume = atmel_sha_done; in atmel_sha_hmac_start()
1900 err = atmel_sha_update_req(dd); in atmel_sha_hmac_start()
1904 dd->resume = atmel_sha_hmac_final; in atmel_sha_hmac_start()
1905 err = atmel_sha_final_req(dd); in atmel_sha_hmac_start()
1909 err = atmel_sha_hmac_setup(dd, atmel_sha_hmac_digest2); in atmel_sha_hmac_start()
1913 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_hmac_start()
1938 static int atmel_sha_hmac_init_done(struct atmel_sha_dev *dd) in atmel_sha_hmac_init_done() argument
1940 struct ahash_request *req = dd->req; in atmel_sha_hmac_init_done()
1952 return atmel_sha_complete(dd, 0); in atmel_sha_hmac_init_done()
1955 static int atmel_sha_hmac_final(struct atmel_sha_dev *dd) in atmel_sha_hmac_final() argument
1957 struct ahash_request *req = dd->req; in atmel_sha_hmac_final()
1971 digest[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_hmac_final()
1974 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIHV); in atmel_sha_hmac_final()
1977 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->opad[i]); in atmel_sha_hmac_final()
1981 atmel_sha_write(dd, SHA_MR, mr); in atmel_sha_hmac_final()
1982 atmel_sha_write(dd, SHA_MSR, bs + ds); in atmel_sha_hmac_final()
1983 atmel_sha_write(dd, SHA_BCR, ds); in atmel_sha_hmac_final()
1984 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_hmac_final()
1986 sg_init_one(&dd->tmp, digest, ds); in atmel_sha_hmac_final()
1987 return atmel_sha_cpu_start(dd, &dd->tmp, ds, false, true, in atmel_sha_hmac_final()
1991 static int atmel_sha_hmac_final_done(struct atmel_sha_dev *dd) in atmel_sha_hmac_final_done() argument
1998 atmel_sha_copy_hash(dd->req); in atmel_sha_hmac_final_done()
1999 atmel_sha_copy_ready_hash(dd->req); in atmel_sha_hmac_final_done()
2000 return atmel_sha_complete(dd, 0); in atmel_sha_hmac_final_done()
2014 static int atmel_sha_hmac_digest2(struct atmel_sha_dev *dd) in atmel_sha_hmac_digest2() argument
2016 struct ahash_request *req = dd->req; in atmel_sha_hmac_digest2()
2027 return atmel_sha_complete(dd, -EINVAL); // TODO: in atmel_sha_hmac_digest2()
2031 atmel_sha_dma_check_aligned(dd, req->src, req->nbytes)) in atmel_sha_hmac_digest2()
2035 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIHV); in atmel_sha_hmac_digest2()
2037 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->ipad[i]); in atmel_sha_hmac_digest2()
2039 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIEHV); in atmel_sha_hmac_digest2()
2041 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->opad[i]); in atmel_sha_hmac_digest2()
2050 atmel_sha_write(dd, SHA_MR, mr); in atmel_sha_hmac_digest2()
2052 atmel_sha_write(dd, SHA_MSR, req->nbytes); in atmel_sha_hmac_digest2()
2053 atmel_sha_write(dd, SHA_BCR, req->nbytes); in atmel_sha_hmac_digest2()
2055 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_hmac_digest2()
2059 return atmel_sha_dma_start(dd, req->src, req->nbytes, in atmel_sha_hmac_digest2()
2062 return atmel_sha_cpu_start(dd, req->src, req->nbytes, false, true, in atmel_sha_hmac_digest2()
2216 static int atmel_sha_authenc_init2(struct atmel_sha_dev *dd);
2217 static int atmel_sha_authenc_init_done(struct atmel_sha_dev *dd);
2218 static int atmel_sha_authenc_final_done(struct atmel_sha_dev *dd);
2247 authctx->cb(authctx->aes_dev, err, authctx->base.dd->is_async); in atmel_sha_authenc_complete()
2250 static int atmel_sha_authenc_start(struct atmel_sha_dev *dd) in atmel_sha_authenc_start() argument
2252 struct ahash_request *req = dd->req; in atmel_sha_authenc_start()
2260 dd->force_complete = true; in atmel_sha_authenc_start()
2262 err = atmel_sha_hw_init(dd); in atmel_sha_authenc_start()
2263 return authctx->cb(authctx->aes_dev, err, dd->is_async); in atmel_sha_authenc_start()
2270 dummy.dd = NULL; in atmel_sha_authenc_is_ready()
2374 struct atmel_sha_dev *dd; in atmel_sha_authenc_schedule() local
2380 dd = atmel_sha_find_dev(tctx); in atmel_sha_authenc_schedule()
2381 if (!dd) in atmel_sha_authenc_schedule()
2385 ctx->dd = dd; in atmel_sha_authenc_schedule()
2392 return atmel_sha_handle_queue(dd, req); in atmel_sha_authenc_schedule()
2406 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_authenc_init() local
2409 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_authenc_init()
2418 return atmel_sha_hmac_setup(dd, atmel_sha_authenc_init2); in atmel_sha_authenc_init()
2422 static int atmel_sha_authenc_init2(struct atmel_sha_dev *dd) in atmel_sha_authenc_init2() argument
2424 struct ahash_request *req = dd->req; in atmel_sha_authenc_init2()
2433 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIHV); in atmel_sha_authenc_init2()
2435 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->ipad[i]); in atmel_sha_authenc_init2()
2437 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIEHV); in atmel_sha_authenc_init2()
2439 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->opad[i]); in atmel_sha_authenc_init2()
2445 atmel_sha_write(dd, SHA_MR, mr); in atmel_sha_authenc_init2()
2448 atmel_sha_write(dd, SHA_MSR, msg_size); in atmel_sha_authenc_init2()
2449 atmel_sha_write(dd, SHA_BCR, msg_size); in atmel_sha_authenc_init2()
2451 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_authenc_init2()
2454 return atmel_sha_cpu_start(dd, authctx->assoc, authctx->assoclen, in atmel_sha_authenc_init2()
2459 static int atmel_sha_authenc_init_done(struct atmel_sha_dev *dd) in atmel_sha_authenc_init_done() argument
2461 struct ahash_request *req = dd->req; in atmel_sha_authenc_init_done()
2464 return authctx->cb(authctx->aes_dev, 0, dd->is_async); in atmel_sha_authenc_init_done()
2474 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_authenc_final() local
2498 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_authenc_final()
2506 return atmel_sha_wait_for_data_ready(dd, in atmel_sha_authenc_final()
2511 static int atmel_sha_authenc_final_done(struct atmel_sha_dev *dd) in atmel_sha_authenc_final_done() argument
2513 struct ahash_request *req = dd->req; in atmel_sha_authenc_final_done()
2518 authctx->digest[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_authenc_final_done()
2520 return atmel_sha_complete(dd, 0); in atmel_sha_authenc_final_done()
2527 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_authenc_abort() local
2530 dd->is_async = false; in atmel_sha_authenc_abort()
2531 dd->force_complete = false; in atmel_sha_authenc_abort()
2532 (void)atmel_sha_complete(dd, 0); in atmel_sha_authenc_abort()
2539 static void atmel_sha_unregister_algs(struct atmel_sha_dev *dd) in atmel_sha_unregister_algs() argument
2543 if (dd->caps.has_hmac) in atmel_sha_unregister_algs()
2550 if (dd->caps.has_sha224) in atmel_sha_unregister_algs()
2553 if (dd->caps.has_sha_384_512) { in atmel_sha_unregister_algs()
2559 static int atmel_sha_register_algs(struct atmel_sha_dev *dd) in atmel_sha_register_algs() argument
2569 if (dd->caps.has_sha224) { in atmel_sha_register_algs()
2575 if (dd->caps.has_sha_384_512) { in atmel_sha_register_algs()
2583 if (dd->caps.has_hmac) { in atmel_sha_register_algs()
2623 static int atmel_sha_dma_init(struct atmel_sha_dev *dd, in atmel_sha_dma_init() argument
2633 dd->dma_lch_in.chan = dma_request_slave_channel_compat(mask_in, in atmel_sha_dma_init()
2634 atmel_sha_filter, &pdata->dma_slave->rxdata, dd->dev, "tx"); in atmel_sha_dma_init()
2635 if (!dd->dma_lch_in.chan) { in atmel_sha_dma_init()
2636 dev_warn(dd->dev, "no DMA channel available\n"); in atmel_sha_dma_init()
2640 dd->dma_lch_in.dma_conf.direction = DMA_MEM_TO_DEV; in atmel_sha_dma_init()
2641 dd->dma_lch_in.dma_conf.dst_addr = dd->phys_base + in atmel_sha_dma_init()
2643 dd->dma_lch_in.dma_conf.src_maxburst = 1; in atmel_sha_dma_init()
2644 dd->dma_lch_in.dma_conf.src_addr_width = in atmel_sha_dma_init()
2646 dd->dma_lch_in.dma_conf.dst_maxburst = 1; in atmel_sha_dma_init()
2647 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_sha_dma_init()
2649 dd->dma_lch_in.dma_conf.device_fc = false; in atmel_sha_dma_init()
2654 static void atmel_sha_dma_cleanup(struct atmel_sha_dev *dd) in atmel_sha_dma_cleanup() argument
2656 dma_release_channel(dd->dma_lch_in.chan); in atmel_sha_dma_cleanup()
2659 static void atmel_sha_get_cap(struct atmel_sha_dev *dd) in atmel_sha_get_cap() argument
2662 dd->caps.has_dma = 0; in atmel_sha_get_cap()
2663 dd->caps.has_dualbuff = 0; in atmel_sha_get_cap()
2664 dd->caps.has_sha224 = 0; in atmel_sha_get_cap()
2665 dd->caps.has_sha_384_512 = 0; in atmel_sha_get_cap()
2666 dd->caps.has_uihv = 0; in atmel_sha_get_cap()
2667 dd->caps.has_hmac = 0; in atmel_sha_get_cap()
2670 switch (dd->hw_version & 0xff0) { in atmel_sha_get_cap()
2672 dd->caps.has_dma = 1; in atmel_sha_get_cap()
2673 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
2674 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
2675 dd->caps.has_sha_384_512 = 1; in atmel_sha_get_cap()
2676 dd->caps.has_uihv = 1; in atmel_sha_get_cap()
2677 dd->caps.has_hmac = 1; in atmel_sha_get_cap()
2680 dd->caps.has_dma = 1; in atmel_sha_get_cap()
2681 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
2682 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
2683 dd->caps.has_sha_384_512 = 1; in atmel_sha_get_cap()
2684 dd->caps.has_uihv = 1; in atmel_sha_get_cap()
2687 dd->caps.has_dma = 1; in atmel_sha_get_cap()
2688 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
2689 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
2690 dd->caps.has_sha_384_512 = 1; in atmel_sha_get_cap()
2693 dd->caps.has_dma = 1; in atmel_sha_get_cap()
2694 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
2695 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
2700 dev_warn(dd->dev, in atmel_sha_get_cap()