• Home
  • Raw
  • Download

Lines Matching refs:dd

107 	struct atmel_aes_dev	*dd;  member
345 static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset) in atmel_aes_read() argument
347 u32 value = readl_relaxed(dd->io_base + offset); in atmel_aes_read()
350 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_read()
353 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value, in atmel_aes_read()
361 static inline void atmel_aes_write(struct atmel_aes_dev *dd, in atmel_aes_write() argument
365 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_write()
368 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value, in atmel_aes_write()
373 writel_relaxed(value, dd->io_base + offset); in atmel_aes_write()
376 static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_n() argument
380 *value = atmel_aes_read(dd, offset); in atmel_aes_read_n()
383 static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_n() argument
387 atmel_aes_write(dd, offset, *value); in atmel_aes_write_n()
390 static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_block() argument
393 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block()
396 static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_block() argument
399 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block()
402 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev *dd, in atmel_aes_wait_for_data_ready() argument
405 u32 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_wait_for_data_ready()
408 return resume(dd); in atmel_aes_wait_for_data_ready()
410 dd->resume = resume; in atmel_aes_wait_for_data_ready()
411 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_wait_for_data_ready()
427 if (!ctx->dd) { in atmel_aes_find_dev()
432 ctx->dd = aes_dd; in atmel_aes_find_dev()
434 aes_dd = ctx->dd; in atmel_aes_find_dev()
442 static int atmel_aes_hw_init(struct atmel_aes_dev *dd) in atmel_aes_hw_init() argument
446 err = clk_enable(dd->iclk); in atmel_aes_hw_init()
450 if (!(dd->flags & AES_FLAGS_INIT)) { in atmel_aes_hw_init()
451 atmel_aes_write(dd, AES_CR, AES_CR_SWRST); in atmel_aes_hw_init()
452 atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET); in atmel_aes_hw_init()
453 dd->flags |= AES_FLAGS_INIT; in atmel_aes_hw_init()
459 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd) in atmel_aes_get_version() argument
461 return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff; in atmel_aes_get_version()
464 static int atmel_aes_hw_version_init(struct atmel_aes_dev *dd) in atmel_aes_hw_version_init() argument
468 err = atmel_aes_hw_init(dd); in atmel_aes_hw_version_init()
472 dd->hw_version = atmel_aes_get_version(dd); in atmel_aes_hw_version_init()
474 dev_info(dd->dev, "version: 0x%x\n", dd->hw_version); in atmel_aes_hw_version_init()
476 clk_disable(dd->iclk); in atmel_aes_hw_version_init()
480 static inline void atmel_aes_set_mode(struct atmel_aes_dev *dd, in atmel_aes_set_mode() argument
484 dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode; in atmel_aes_set_mode()
487 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev *dd) in atmel_aes_is_encrypt() argument
489 return (dd->flags & AES_FLAGS_ENCRYPT); in atmel_aes_is_encrypt()
493 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err);
496 static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_complete() argument
499 atmel_aes_authenc_complete(dd, err); in atmel_aes_complete()
502 clk_disable(dd->iclk); in atmel_aes_complete()
503 dd->flags &= ~AES_FLAGS_BUSY; in atmel_aes_complete()
505 if (dd->is_async) in atmel_aes_complete()
506 dd->areq->complete(dd->areq, err); in atmel_aes_complete()
508 tasklet_schedule(&dd->queue_task); in atmel_aes_complete()
513 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl_key() argument
526 valmr |= dd->flags & AES_FLAGS_MODE_MASK; in atmel_aes_write_ctrl_key()
530 if (dd->caps.has_dualbuff) in atmel_aes_write_ctrl_key()
536 atmel_aes_write(dd, AES_MR, valmr); in atmel_aes_write_ctrl_key()
538 atmel_aes_write_n(dd, AES_KEYWR(0), key, SIZE_IN_WORDS(keylen)); in atmel_aes_write_ctrl_key()
541 atmel_aes_write_block(dd, AES_IVR(0), iv); in atmel_aes_write_ctrl_key()
544 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl() argument
548 atmel_aes_write_ctrl_key(dd, use_dma, iv, in atmel_aes_write_ctrl()
549 dd->ctx->key, dd->ctx->keylen); in atmel_aes_write_ctrl()
554 static int atmel_aes_cpu_transfer(struct atmel_aes_dev *dd) in atmel_aes_cpu_transfer() argument
560 atmel_aes_read_block(dd, AES_ODATAR(0), dd->data); in atmel_aes_cpu_transfer()
561 dd->data += 4; in atmel_aes_cpu_transfer()
562 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer()
564 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer()
567 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_transfer()
569 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_cpu_transfer()
571 dd->resume = atmel_aes_cpu_transfer; in atmel_aes_cpu_transfer()
572 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_cpu_transfer()
577 if (!sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_cpu_transfer()
578 dd->buf, dd->total)) in atmel_aes_cpu_transfer()
582 return atmel_aes_complete(dd, err); in atmel_aes_cpu_transfer()
584 return dd->cpu_transfer_complete(dd); in atmel_aes_cpu_transfer()
587 static int atmel_aes_cpu_start(struct atmel_aes_dev *dd, in atmel_aes_cpu_start() argument
598 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_cpu_start()
600 dd->total = len; in atmel_aes_cpu_start()
601 dd->real_dst = dst; in atmel_aes_cpu_start()
602 dd->cpu_transfer_complete = resume; in atmel_aes_cpu_start()
603 dd->datalen = len + padlen; in atmel_aes_cpu_start()
604 dd->data = (u32 *)dd->buf; in atmel_aes_cpu_start()
605 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_start()
606 return atmel_aes_wait_for_data_ready(dd, atmel_aes_cpu_transfer); in atmel_aes_cpu_start()
614 static bool atmel_aes_check_aligned(struct atmel_aes_dev *dd, in atmel_aes_check_aligned() argument
621 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
629 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
638 if (!IS_ALIGNED(sg->length, dd->ctx->block_size)) in atmel_aes_check_aligned()
664 static int atmel_aes_map(struct atmel_aes_dev *dd, in atmel_aes_map() argument
672 dd->total = len; in atmel_aes_map()
673 dd->src.sg = src; in atmel_aes_map()
674 dd->dst.sg = dst; in atmel_aes_map()
675 dd->real_dst = dst; in atmel_aes_map()
677 src_aligned = atmel_aes_check_aligned(dd, src, len, &dd->src); in atmel_aes_map()
681 dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst); in atmel_aes_map()
683 padlen = atmel_aes_padlen(len, dd->ctx->block_size); in atmel_aes_map()
685 if (dd->buflen < len + padlen) in atmel_aes_map()
689 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_map()
690 dd->src.sg = &dd->aligned_sg; in atmel_aes_map()
691 dd->src.nents = 1; in atmel_aes_map()
692 dd->src.remainder = 0; in atmel_aes_map()
696 dd->dst.sg = &dd->aligned_sg; in atmel_aes_map()
697 dd->dst.nents = 1; in atmel_aes_map()
698 dd->dst.remainder = 0; in atmel_aes_map()
701 sg_init_table(&dd->aligned_sg, 1); in atmel_aes_map()
702 sg_set_buf(&dd->aligned_sg, dd->buf, len + padlen); in atmel_aes_map()
705 if (dd->src.sg == dd->dst.sg) { in atmel_aes_map()
706 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
708 dd->dst.sg_len = dd->src.sg_len; in atmel_aes_map()
709 if (!dd->src.sg_len) in atmel_aes_map()
712 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
714 if (!dd->src.sg_len) in atmel_aes_map()
717 dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_map()
719 if (!dd->dst.sg_len) { in atmel_aes_map()
720 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
729 static void atmel_aes_unmap(struct atmel_aes_dev *dd) in atmel_aes_unmap() argument
731 if (dd->src.sg == dd->dst.sg) { in atmel_aes_unmap()
732 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
735 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
736 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
738 dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_unmap()
741 if (dd->dst.sg != &dd->aligned_sg) in atmel_aes_unmap()
742 atmel_aes_restore_sg(&dd->dst); in atmel_aes_unmap()
744 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
747 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
748 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
751 if (dd->dst.sg == &dd->aligned_sg) in atmel_aes_unmap()
752 sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_unmap()
753 dd->buf, dd->total); in atmel_aes_unmap()
756 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev *dd, in atmel_aes_dma_transfer_start() argument
776 dma = &dd->src; in atmel_aes_dma_transfer_start()
778 config.dst_addr = dd->phys_base + AES_IDATAR(0); in atmel_aes_dma_transfer_start()
782 dma = &dd->dst; in atmel_aes_dma_transfer_start()
784 config.src_addr = dd->phys_base + AES_ODATAR(0); in atmel_aes_dma_transfer_start()
801 desc->callback_param = dd; in atmel_aes_dma_transfer_start()
808 static void atmel_aes_dma_transfer_stop(struct atmel_aes_dev *dd, in atmel_aes_dma_transfer_stop() argument
815 dma = &dd->src; in atmel_aes_dma_transfer_stop()
819 dma = &dd->dst; in atmel_aes_dma_transfer_stop()
829 static int atmel_aes_dma_start(struct atmel_aes_dev *dd, in atmel_aes_dma_start() argument
839 switch (dd->ctx->block_size) { in atmel_aes_dma_start()
858 maxburst = dd->caps.max_burst_size; in atmel_aes_dma_start()
866 err = atmel_aes_map(dd, src, dst, len); in atmel_aes_dma_start()
870 dd->resume = resume; in atmel_aes_dma_start()
873 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_DEV_TO_MEM, in atmel_aes_dma_start()
879 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_MEM_TO_DEV, in atmel_aes_dma_start()
887 atmel_aes_dma_transfer_stop(dd, DMA_DEV_TO_MEM); in atmel_aes_dma_start()
889 atmel_aes_unmap(dd); in atmel_aes_dma_start()
891 return atmel_aes_complete(dd, err); in atmel_aes_dma_start()
894 static void atmel_aes_dma_stop(struct atmel_aes_dev *dd) in atmel_aes_dma_stop() argument
896 atmel_aes_dma_transfer_stop(dd, DMA_MEM_TO_DEV); in atmel_aes_dma_stop()
897 atmel_aes_dma_transfer_stop(dd, DMA_DEV_TO_MEM); in atmel_aes_dma_stop()
898 atmel_aes_unmap(dd); in atmel_aes_dma_stop()
903 struct atmel_aes_dev *dd = data; in atmel_aes_dma_callback() local
905 atmel_aes_dma_stop(dd); in atmel_aes_dma_callback()
906 dd->is_async = true; in atmel_aes_dma_callback()
907 (void)dd->resume(dd); in atmel_aes_dma_callback()
910 static int atmel_aes_handle_queue(struct atmel_aes_dev *dd, in atmel_aes_handle_queue() argument
919 spin_lock_irqsave(&dd->lock, flags); in atmel_aes_handle_queue()
921 ret = crypto_enqueue_request(&dd->queue, new_areq); in atmel_aes_handle_queue()
922 if (dd->flags & AES_FLAGS_BUSY) { in atmel_aes_handle_queue()
923 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
926 backlog = crypto_get_backlog(&dd->queue); in atmel_aes_handle_queue()
927 areq = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
929 dd->flags |= AES_FLAGS_BUSY; in atmel_aes_handle_queue()
930 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
940 dd->areq = areq; in atmel_aes_handle_queue()
941 dd->ctx = ctx; in atmel_aes_handle_queue()
943 dd->is_async = start_async; in atmel_aes_handle_queue()
946 err = ctx->start(dd); in atmel_aes_handle_queue()
953 static int atmel_aes_transfer_complete(struct atmel_aes_dev *dd) in atmel_aes_transfer_complete() argument
955 return atmel_aes_complete(dd, 0); in atmel_aes_transfer_complete()
958 static int atmel_aes_start(struct atmel_aes_dev *dd) in atmel_aes_start() argument
960 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_start()
963 dd->ctx->block_size != AES_BLOCK_SIZE); in atmel_aes_start()
966 atmel_aes_set_mode(dd, rctx); in atmel_aes_start()
968 err = atmel_aes_hw_init(dd); in atmel_aes_start()
970 return atmel_aes_complete(dd, err); in atmel_aes_start()
972 atmel_aes_write_ctrl(dd, use_dma, req->info); in atmel_aes_start()
974 return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes, in atmel_aes_start()
977 return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes, in atmel_aes_start()
987 static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd) in atmel_aes_ctr_transfer() argument
989 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_transfer()
990 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_ctr_transfer()
998 ctx->offset += dd->total; in atmel_aes_ctr_transfer()
1000 return atmel_aes_transfer_complete(dd); in atmel_aes_ctr_transfer()
1025 atmel_aes_write_ctrl(dd, use_dma, ctx->iv); in atmel_aes_ctr_transfer()
1036 return atmel_aes_dma_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1039 return atmel_aes_cpu_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1043 static int atmel_aes_ctr_start(struct atmel_aes_dev *dd) in atmel_aes_ctr_start() argument
1045 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_start()
1046 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_ctr_start()
1050 atmel_aes_set_mode(dd, rctx); in atmel_aes_ctr_start()
1052 err = atmel_aes_hw_init(dd); in atmel_aes_ctr_start()
1054 return atmel_aes_complete(dd, err); in atmel_aes_ctr_start()
1058 dd->total = 0; in atmel_aes_ctr_start()
1059 return atmel_aes_ctr_transfer(dd); in atmel_aes_ctr_start()
1066 struct atmel_aes_dev *dd; in atmel_aes_crypt() local
1091 dd = atmel_aes_find_dev(ctx); in atmel_aes_crypt()
1092 if (!dd) in atmel_aes_crypt()
1098 return atmel_aes_handle_queue(dd, &req->base); in atmel_aes_crypt()
1428 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
1432 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd);
1433 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd);
1435 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd);
1436 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd);
1437 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd);
1438 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd);
1439 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd);
1440 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd);
1441 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd);
1449 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd, in atmel_aes_gcm_ghash() argument
1454 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash()
1456 dd->data = (u32 *)data; in atmel_aes_gcm_ghash()
1457 dd->datalen = datalen; in atmel_aes_gcm_ghash()
1462 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_ghash()
1463 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_ghash_init); in atmel_aes_gcm_ghash()
1466 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_init() argument
1468 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_init()
1471 atmel_aes_write(dd, AES_AADLENR, dd->total); in atmel_aes_gcm_ghash_init()
1472 atmel_aes_write(dd, AES_CLENR, 0); in atmel_aes_gcm_ghash_init()
1476 atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in); in atmel_aes_gcm_ghash_init()
1478 return atmel_aes_gcm_ghash_finalize(dd); in atmel_aes_gcm_ghash_init()
1481 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_finalize() argument
1483 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_finalize()
1487 while (dd->datalen > 0) { in atmel_aes_gcm_ghash_finalize()
1488 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_ghash_finalize()
1489 dd->data += 4; in atmel_aes_gcm_ghash_finalize()
1490 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_ghash_finalize()
1492 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_ghash_finalize()
1494 dd->resume = atmel_aes_gcm_ghash_finalize; in atmel_aes_gcm_ghash_finalize()
1495 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_ghash_finalize()
1501 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out); in atmel_aes_gcm_ghash_finalize()
1503 return ctx->ghash_resume(dd); in atmel_aes_gcm_ghash_finalize()
1507 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd) in atmel_aes_gcm_start() argument
1509 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_start()
1510 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_start()
1516 u8 *data = dd->buf; in atmel_aes_gcm_start()
1519 atmel_aes_set_mode(dd, rctx); in atmel_aes_gcm_start()
1521 err = atmel_aes_hw_init(dd); in atmel_aes_gcm_start()
1523 return atmel_aes_complete(dd, err); in atmel_aes_gcm_start()
1528 return atmel_aes_gcm_process(dd); in atmel_aes_gcm_start()
1533 if (datalen > dd->buflen) in atmel_aes_gcm_start()
1534 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_start()
1540 return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen, in atmel_aes_gcm_start()
1544 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd) in atmel_aes_gcm_process() argument
1546 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_process()
1547 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_process()
1549 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_process()
1561 dd->flags |= AES_FLAGS_GTAGEN; in atmel_aes_gcm_process()
1563 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_process()
1564 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_length); in atmel_aes_gcm_process()
1567 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd) in atmel_aes_gcm_length() argument
1569 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_length()
1570 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_length()
1577 atmel_aes_write_block(dd, AES_IVR(0), j0); in atmel_aes_gcm_length()
1581 atmel_aes_write(dd, AES_AADLENR, req->assoclen); in atmel_aes_gcm_length()
1582 atmel_aes_write(dd, AES_CLENR, ctx->textlen); in atmel_aes_gcm_length()
1586 dd->datalen = 0; in atmel_aes_gcm_length()
1587 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1592 if (unlikely(req->assoclen + padlen > dd->buflen)) in atmel_aes_gcm_length()
1593 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_length()
1594 sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen); in atmel_aes_gcm_length()
1597 dd->data = (u32 *)dd->buf; in atmel_aes_gcm_length()
1598 dd->datalen = req->assoclen + padlen; in atmel_aes_gcm_length()
1599 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1602 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd) in atmel_aes_gcm_data() argument
1604 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_data()
1605 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_data()
1611 while (dd->datalen > 0) { in atmel_aes_gcm_data()
1612 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_data()
1613 dd->data += 4; in atmel_aes_gcm_data()
1614 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_data()
1616 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_data()
1618 dd->resume = atmel_aes_gcm_data; in atmel_aes_gcm_data()
1619 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_data()
1626 return atmel_aes_gcm_tag_init(dd); in atmel_aes_gcm_data()
1635 mr = atmel_aes_read(dd, AES_MR); in atmel_aes_gcm_data()
1638 if (dd->caps.has_dualbuff) in atmel_aes_gcm_data()
1640 atmel_aes_write(dd, AES_MR, mr); in atmel_aes_gcm_data()
1642 return atmel_aes_dma_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1646 return atmel_aes_cpu_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1650 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag_init() argument
1652 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag_init()
1653 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_tag_init()
1654 u64 *data = dd->buf; in atmel_aes_gcm_tag_init()
1656 if (likely(dd->flags & AES_FLAGS_GTAGEN)) { in atmel_aes_gcm_tag_init()
1657 if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) { in atmel_aes_gcm_tag_init()
1658 dd->resume = atmel_aes_gcm_tag_init; in atmel_aes_gcm_tag_init()
1659 atmel_aes_write(dd, AES_IER, AES_INT_TAGRDY); in atmel_aes_gcm_tag_init()
1663 return atmel_aes_gcm_finalize(dd); in atmel_aes_gcm_tag_init()
1667 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash); in atmel_aes_gcm_tag_init()
1672 return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE, in atmel_aes_gcm_tag_init()
1676 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag() argument
1678 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag()
1685 flags = dd->flags; in atmel_aes_gcm_tag()
1686 dd->flags &= ~(AES_FLAGS_OPMODE_MASK | AES_FLAGS_GTAGEN); in atmel_aes_gcm_tag()
1687 dd->flags |= AES_FLAGS_CTR; in atmel_aes_gcm_tag()
1688 atmel_aes_write_ctrl(dd, false, ctx->j0); in atmel_aes_gcm_tag()
1689 dd->flags = flags; in atmel_aes_gcm_tag()
1691 atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash); in atmel_aes_gcm_tag()
1692 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_finalize); in atmel_aes_gcm_tag()
1695 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_finalize() argument
1697 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_finalize()
1698 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_finalize()
1700 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_finalize()
1705 if (likely(dd->flags & AES_FLAGS_GTAGEN)) in atmel_aes_gcm_finalize()
1706 atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag); in atmel_aes_gcm_finalize()
1708 atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag); in atmel_aes_gcm_finalize()
1720 return atmel_aes_complete(dd, err); in atmel_aes_gcm_finalize()
1728 struct atmel_aes_dev *dd; in atmel_aes_gcm_crypt() local
1733 dd = atmel_aes_find_dev(ctx); in atmel_aes_gcm_crypt()
1734 if (!dd) in atmel_aes_gcm_crypt()
1740 return atmel_aes_handle_queue(dd, &req->base); in atmel_aes_gcm_crypt()
1837 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd);
1839 static int atmel_aes_xts_start(struct atmel_aes_dev *dd) in atmel_aes_xts_start() argument
1841 struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx); in atmel_aes_xts_start()
1842 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_xts_start()
1847 atmel_aes_set_mode(dd, rctx); in atmel_aes_xts_start()
1849 err = atmel_aes_hw_init(dd); in atmel_aes_xts_start()
1851 return atmel_aes_complete(dd, err); in atmel_aes_xts_start()
1854 flags = dd->flags; in atmel_aes_xts_start()
1855 dd->flags &= ~AES_FLAGS_MODE_MASK; in atmel_aes_xts_start()
1856 dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT); in atmel_aes_xts_start()
1857 atmel_aes_write_ctrl_key(dd, false, NULL, in atmel_aes_xts_start()
1859 dd->flags = flags; in atmel_aes_xts_start()
1861 atmel_aes_write_block(dd, AES_IDATAR(0), req->info); in atmel_aes_xts_start()
1862 return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data); in atmel_aes_xts_start()
1865 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd) in atmel_aes_xts_process_data() argument
1867 struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); in atmel_aes_xts_process_data()
1875 atmel_aes_read_block(dd, AES_ODATAR(0), tweak); in atmel_aes_xts_process_data()
1889 atmel_aes_write_ctrl(dd, use_dma, NULL); in atmel_aes_xts_process_data()
1890 atmel_aes_write_block(dd, AES_TWR(0), tweak); in atmel_aes_xts_process_data()
1891 atmel_aes_write_block(dd, AES_ALPHAR(0), one); in atmel_aes_xts_process_data()
1893 return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes, in atmel_aes_xts_process_data()
1896 return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes, in atmel_aes_xts_process_data()
1962 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd);
1963 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
1965 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
1967 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd);
1968 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
1971 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_authenc_complete() argument
1973 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_complete()
1976 if (err && (dd->flags & AES_FLAGS_OWN_SHA)) in atmel_aes_authenc_complete()
1978 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_complete()
1981 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd) in atmel_aes_authenc_start() argument
1983 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_start()
1989 atmel_aes_set_mode(dd, &rctx->base); in atmel_aes_authenc_start()
1991 err = atmel_aes_hw_init(dd); in atmel_aes_authenc_start()
1993 return atmel_aes_complete(dd, err); in atmel_aes_authenc_start()
1996 atmel_aes_authenc_init, dd); in atmel_aes_authenc_start()
1999 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_init() argument
2002 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_init()
2006 dd->is_async = true; in atmel_aes_authenc_init()
2008 return atmel_aes_complete(dd, err); in atmel_aes_authenc_init()
2011 dd->flags |= AES_FLAGS_OWN_SHA; in atmel_aes_authenc_init()
2017 atmel_aes_authenc_transfer, dd); in atmel_aes_authenc_init()
2020 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_transfer() argument
2023 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_transfer()
2025 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_transfer()
2031 dd->is_async = true; in atmel_aes_authenc_transfer()
2033 return atmel_aes_complete(dd, err); in atmel_aes_authenc_transfer()
2052 atmel_aes_write_ctrl(dd, true, iv); in atmel_aes_authenc_transfer()
2056 atmel_aes_write(dd, AES_EMR, emr); in atmel_aes_authenc_transfer()
2059 return atmel_aes_dma_start(dd, src, dst, rctx->textlen, in atmel_aes_authenc_transfer()
2063 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd) in atmel_aes_authenc_digest() argument
2065 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_digest()
2069 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_digest()
2072 atmel_aes_authenc_final, dd); in atmel_aes_authenc_digest()
2075 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_final() argument
2078 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_final()
2081 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_final()
2086 dd->is_async = true; in atmel_aes_authenc_final()
2101 return atmel_aes_complete(dd, err); in atmel_aes_authenc_final()
2199 struct atmel_aes_dev *dd; in atmel_aes_authenc_crypt() local
2217 dd = atmel_aes_find_dev(ctx); in atmel_aes_authenc_crypt()
2218 if (!dd) in atmel_aes_authenc_crypt()
2221 return atmel_aes_handle_queue(dd, &req->base); in atmel_aes_authenc_crypt()
2340 static int atmel_aes_buff_init(struct atmel_aes_dev *dd) in atmel_aes_buff_init() argument
2342 dd->buf = (void *)__get_free_pages(GFP_KERNEL, ATMEL_AES_BUFFER_ORDER); in atmel_aes_buff_init()
2343 dd->buflen = ATMEL_AES_BUFFER_SIZE; in atmel_aes_buff_init()
2344 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()
2346 if (!dd->buf) { in atmel_aes_buff_init()
2347 dev_err(dd->dev, "unable to alloc pages.\n"); in atmel_aes_buff_init()
2354 static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd) in atmel_aes_buff_cleanup() argument
2356 free_page((unsigned long)dd->buf); in atmel_aes_buff_cleanup()
2371 static int atmel_aes_dma_init(struct atmel_aes_dev *dd, in atmel_aes_dma_init() argument
2383 dd->src.chan = dma_request_slave_channel_compat(mask, atmel_aes_filter, in atmel_aes_dma_init()
2384 slave, dd->dev, "tx"); in atmel_aes_dma_init()
2385 if (!dd->src.chan) in atmel_aes_dma_init()
2389 dd->dst.chan = dma_request_slave_channel_compat(mask, atmel_aes_filter, in atmel_aes_dma_init()
2390 slave, dd->dev, "rx"); in atmel_aes_dma_init()
2391 if (!dd->dst.chan) in atmel_aes_dma_init()
2397 dma_release_channel(dd->src.chan); in atmel_aes_dma_init()
2399 dev_warn(dd->dev, "no DMA channel available\n"); in atmel_aes_dma_init()
2403 static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd) in atmel_aes_dma_cleanup() argument
2405 dma_release_channel(dd->dst.chan); in atmel_aes_dma_cleanup()
2406 dma_release_channel(dd->src.chan); in atmel_aes_dma_cleanup()
2411 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_queue_task() local
2413 atmel_aes_handle_queue(dd, NULL); in atmel_aes_queue_task()
2418 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_done_task() local
2420 dd->is_async = true; in atmel_aes_done_task()
2421 (void)dd->resume(dd); in atmel_aes_done_task()
2442 static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd) in atmel_aes_unregister_algs() argument
2447 if (dd->caps.has_authenc) in atmel_aes_unregister_algs()
2452 if (dd->caps.has_xts) in atmel_aes_unregister_algs()
2455 if (dd->caps.has_gcm) in atmel_aes_unregister_algs()
2458 if (dd->caps.has_cfb64) in atmel_aes_unregister_algs()
2465 static int atmel_aes_register_algs(struct atmel_aes_dev *dd) in atmel_aes_register_algs() argument
2475 if (dd->caps.has_cfb64) { in atmel_aes_register_algs()
2481 if (dd->caps.has_gcm) { in atmel_aes_register_algs()
2487 if (dd->caps.has_xts) { in atmel_aes_register_algs()
2494 if (dd->caps.has_authenc) { in atmel_aes_register_algs()
2525 static void atmel_aes_get_cap(struct atmel_aes_dev *dd) in atmel_aes_get_cap() argument
2527 dd->caps.has_dualbuff = 0; in atmel_aes_get_cap()
2528 dd->caps.has_cfb64 = 0; in atmel_aes_get_cap()
2529 dd->caps.has_gcm = 0; in atmel_aes_get_cap()
2530 dd->caps.has_xts = 0; in atmel_aes_get_cap()
2531 dd->caps.has_authenc = 0; in atmel_aes_get_cap()
2532 dd->caps.max_burst_size = 1; in atmel_aes_get_cap()
2535 switch (dd->hw_version & 0xff0) { in atmel_aes_get_cap()
2537 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2538 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2539 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2540 dd->caps.has_xts = 1; in atmel_aes_get_cap()
2541 dd->caps.has_authenc = 1; in atmel_aes_get_cap()
2542 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2545 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2546 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2547 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2548 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2551 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2552 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2553 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2558 dev_warn(dd->dev, in atmel_aes_get_cap()