• Home
  • Raw
  • Download

Lines Matching +full:at91sam9g46 +full:- +full:aes

1 // SPDX-License-Identifier: GPL-2.0
7 * Copyright (c) 2012 Eukréa Electromatique - ATMEL
10 * Some ideas are from omap-aes.c drivers.
30 #include <linux/dma-mapping.h>
38 #include "atmel-tdes-regs.h"
43 /* Reserve bits [17:16], [13:12], [2:0] for AES Mode Register */
156 count = min((*sg)->length - *offset, total); in atmel_tdes_sg_copy()
165 buflen -= count; in atmel_tdes_sg_copy()
167 total -= count; in atmel_tdes_sg_copy()
169 if (*offset == (*sg)->length) { in atmel_tdes_sg_copy()
183 return readl_relaxed(dd->io_base + offset); in atmel_tdes_read()
189 writel_relaxed(value, dd->io_base + offset); in atmel_tdes_write()
195 for (; count--; value++, offset += 4) in atmel_tdes_write_n()
215 err = clk_prepare_enable(dd->iclk); in atmel_tdes_hw_init()
219 if (!(dd->flags & TDES_FLAGS_INIT)) { in atmel_tdes_hw_init()
221 dd->flags |= TDES_FLAGS_INIT; in atmel_tdes_hw_init()
240 dd->hw_version = atmel_tdes_get_version(dd); in atmel_tdes_hw_version_init()
242 dev_info(dd->dev, in atmel_tdes_hw_version_init()
243 "version: 0x%x\n", dd->hw_version); in atmel_tdes_hw_version_init()
245 clk_disable_unprepare(dd->iclk); in atmel_tdes_hw_version_init()
254 /* dma_lch_out - completed */ in atmel_tdes_dma_callback()
255 tasklet_schedule(&dd->done_task); in atmel_tdes_dma_callback()
268 if (!dd->caps.has_dma) in atmel_tdes_write_ctrl()
273 if (dd->ctx->keylen > (DES_KEY_SIZE << 1)) { in atmel_tdes_write_ctrl()
276 } else if (dd->ctx->keylen > DES_KEY_SIZE) { in atmel_tdes_write_ctrl()
283 valmr |= dd->flags & TDES_FLAGS_MODE_MASK; in atmel_tdes_write_ctrl()
287 atmel_tdes_write_n(dd, TDES_KEY1W1R, dd->ctx->key, in atmel_tdes_write_ctrl()
288 dd->ctx->keylen >> 2); in atmel_tdes_write_ctrl()
290 if (dd->req->iv && (valmr & TDES_MR_OPMOD_MASK) != TDES_MR_OPMOD_ECB) in atmel_tdes_write_ctrl()
291 atmel_tdes_write_n(dd, TDES_IV1R, (void *)dd->req->iv, 2); in atmel_tdes_write_ctrl()
303 if (dd->flags & TDES_FLAGS_FAST) { in atmel_tdes_crypt_pdc_stop()
304 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); in atmel_tdes_crypt_pdc_stop()
305 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_pdc_stop()
307 dma_sync_single_for_device(dd->dev, dd->dma_addr_out, in atmel_tdes_crypt_pdc_stop()
308 dd->dma_size, DMA_FROM_DEVICE); in atmel_tdes_crypt_pdc_stop()
311 count = atmel_tdes_sg_copy(&dd->out_sg, &dd->out_offset, in atmel_tdes_crypt_pdc_stop()
312 dd->buf_out, dd->buflen, dd->dma_size, 1); in atmel_tdes_crypt_pdc_stop()
313 if (count != dd->dma_size) { in atmel_tdes_crypt_pdc_stop()
314 err = -EINVAL; in atmel_tdes_crypt_pdc_stop()
315 dev_dbg(dd->dev, "not all data converted: %zu\n", count); in atmel_tdes_crypt_pdc_stop()
324 int err = -ENOMEM; in atmel_tdes_buff_init()
326 dd->buf_in = (void *)__get_free_pages(GFP_KERNEL, 0); in atmel_tdes_buff_init()
327 dd->buf_out = (void *)__get_free_pages(GFP_KERNEL, 0); in atmel_tdes_buff_init()
328 dd->buflen = PAGE_SIZE; in atmel_tdes_buff_init()
329 dd->buflen &= ~(DES_BLOCK_SIZE - 1); in atmel_tdes_buff_init()
331 if (!dd->buf_in || !dd->buf_out) { in atmel_tdes_buff_init()
332 dev_dbg(dd->dev, "unable to alloc pages.\n"); in atmel_tdes_buff_init()
337 dd->dma_addr_in = dma_map_single(dd->dev, dd->buf_in, in atmel_tdes_buff_init()
338 dd->buflen, DMA_TO_DEVICE); in atmel_tdes_buff_init()
339 err = dma_mapping_error(dd->dev, dd->dma_addr_in); in atmel_tdes_buff_init()
341 dev_dbg(dd->dev, "dma %zd bytes error\n", dd->buflen); in atmel_tdes_buff_init()
345 dd->dma_addr_out = dma_map_single(dd->dev, dd->buf_out, in atmel_tdes_buff_init()
346 dd->buflen, DMA_FROM_DEVICE); in atmel_tdes_buff_init()
347 err = dma_mapping_error(dd->dev, dd->dma_addr_out); in atmel_tdes_buff_init()
349 dev_dbg(dd->dev, "dma %zd bytes error\n", dd->buflen); in atmel_tdes_buff_init()
356 dma_unmap_single(dd->dev, dd->dma_addr_in, dd->buflen, in atmel_tdes_buff_init()
360 free_page((unsigned long)dd->buf_out); in atmel_tdes_buff_init()
361 free_page((unsigned long)dd->buf_in); in atmel_tdes_buff_init()
367 dma_unmap_single(dd->dev, dd->dma_addr_out, dd->buflen, in atmel_tdes_buff_cleanup()
369 dma_unmap_single(dd->dev, dd->dma_addr_in, dd->buflen, in atmel_tdes_buff_cleanup()
371 free_page((unsigned long)dd->buf_out); in atmel_tdes_buff_cleanup()
372 free_page((unsigned long)dd->buf_in); in atmel_tdes_buff_cleanup()
379 struct atmel_tdes_reqctx *rctx = skcipher_request_ctx(dd->req); in atmel_tdes_crypt_pdc()
382 dd->dma_size = length; in atmel_tdes_crypt_pdc()
384 if (!(dd->flags & TDES_FLAGS_FAST)) { in atmel_tdes_crypt_pdc()
385 dma_sync_single_for_device(dd->dev, dma_addr_in, length, in atmel_tdes_crypt_pdc()
389 switch (rctx->mode & TDES_FLAGS_OPMODE_MASK) { in atmel_tdes_crypt_pdc()
422 struct atmel_tdes_reqctx *rctx = skcipher_request_ctx(dd->req); in atmel_tdes_crypt_dma()
427 dd->dma_size = length; in atmel_tdes_crypt_dma()
429 if (!(dd->flags & TDES_FLAGS_FAST)) { in atmel_tdes_crypt_dma()
430 dma_sync_single_for_device(dd->dev, dma_addr_in, length, in atmel_tdes_crypt_dma()
434 switch (rctx->mode & TDES_FLAGS_OPMODE_MASK) { in atmel_tdes_crypt_dma()
448 dd->dma_lch_in.dma_conf.dst_addr_width = addr_width; in atmel_tdes_crypt_dma()
449 dd->dma_lch_out.dma_conf.src_addr_width = addr_width; in atmel_tdes_crypt_dma()
451 dmaengine_slave_config(dd->dma_lch_in.chan, &dd->dma_lch_in.dma_conf); in atmel_tdes_crypt_dma()
452 dmaengine_slave_config(dd->dma_lch_out.chan, &dd->dma_lch_out.dma_conf); in atmel_tdes_crypt_dma()
454 dd->flags |= TDES_FLAGS_DMA; in atmel_tdes_crypt_dma()
464 in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, &sg[0], in atmel_tdes_crypt_dma()
468 return -EINVAL; in atmel_tdes_crypt_dma()
470 out_desc = dmaengine_prep_slave_sg(dd->dma_lch_out.chan, &sg[1], in atmel_tdes_crypt_dma()
474 return -EINVAL; in atmel_tdes_crypt_dma()
476 out_desc->callback = atmel_tdes_dma_callback; in atmel_tdes_crypt_dma()
477 out_desc->callback_param = dd; in atmel_tdes_crypt_dma()
480 dma_async_issue_pending(dd->dma_lch_out.chan); in atmel_tdes_crypt_dma()
483 dma_async_issue_pending(dd->dma_lch_in.chan); in atmel_tdes_crypt_dma()
494 if ((!dd->in_offset) && (!dd->out_offset)) { in atmel_tdes_crypt_start()
496 in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) && in atmel_tdes_crypt_start()
497 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start()
498 out = IS_ALIGNED((u32)dd->out_sg->offset, sizeof(u32)) && in atmel_tdes_crypt_start()
499 IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start()
502 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start()
508 count = min_t(size_t, dd->total, sg_dma_len(dd->in_sg)); in atmel_tdes_crypt_start()
509 count = min_t(size_t, count, sg_dma_len(dd->out_sg)); in atmel_tdes_crypt_start()
511 err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start()
513 dev_dbg(dd->dev, "dma_map_sg() error\n"); in atmel_tdes_crypt_start()
514 return -EINVAL; in atmel_tdes_crypt_start()
517 err = dma_map_sg(dd->dev, dd->out_sg, 1, in atmel_tdes_crypt_start()
520 dev_dbg(dd->dev, "dma_map_sg() error\n"); in atmel_tdes_crypt_start()
521 dma_unmap_sg(dd->dev, dd->in_sg, 1, in atmel_tdes_crypt_start()
523 return -EINVAL; in atmel_tdes_crypt_start()
526 addr_in = sg_dma_address(dd->in_sg); in atmel_tdes_crypt_start()
527 addr_out = sg_dma_address(dd->out_sg); in atmel_tdes_crypt_start()
529 dd->flags |= TDES_FLAGS_FAST; in atmel_tdes_crypt_start()
533 count = atmel_tdes_sg_copy(&dd->in_sg, &dd->in_offset, in atmel_tdes_crypt_start()
534 dd->buf_in, dd->buflen, dd->total, 0); in atmel_tdes_crypt_start()
536 addr_in = dd->dma_addr_in; in atmel_tdes_crypt_start()
537 addr_out = dd->dma_addr_out; in atmel_tdes_crypt_start()
539 dd->flags &= ~TDES_FLAGS_FAST; in atmel_tdes_crypt_start()
542 dd->total -= count; in atmel_tdes_crypt_start()
544 if (dd->caps.has_dma) in atmel_tdes_crypt_start()
549 if (err && (dd->flags & TDES_FLAGS_FAST)) { in atmel_tdes_crypt_start()
550 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start()
551 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start()
560 struct skcipher_request *req = dd->req; in atmel_tdes_set_iv_as_last_ciphertext_block()
565 if (req->cryptlen < ivsize) in atmel_tdes_set_iv_as_last_ciphertext_block()
568 if (rctx->mode & TDES_FLAGS_ENCRYPT) in atmel_tdes_set_iv_as_last_ciphertext_block()
569 scatterwalk_map_and_copy(req->iv, req->dst, in atmel_tdes_set_iv_as_last_ciphertext_block()
570 req->cryptlen - ivsize, ivsize, 0); in atmel_tdes_set_iv_as_last_ciphertext_block()
572 memcpy(req->iv, rctx->lastc, ivsize); in atmel_tdes_set_iv_as_last_ciphertext_block()
578 struct skcipher_request *req = dd->req; in atmel_tdes_finish_req()
581 clk_disable_unprepare(dd->iclk); in atmel_tdes_finish_req()
583 dd->flags &= ~TDES_FLAGS_BUSY; in atmel_tdes_finish_req()
585 if (!err && (rctx->mode & TDES_FLAGS_OPMODE_MASK) != TDES_FLAGS_ECB) in atmel_tdes_finish_req()
600 spin_lock_irqsave(&dd->lock, flags); in atmel_tdes_handle_queue()
602 ret = crypto_enqueue_request(&dd->queue, &req->base); in atmel_tdes_handle_queue()
603 if (dd->flags & TDES_FLAGS_BUSY) { in atmel_tdes_handle_queue()
604 spin_unlock_irqrestore(&dd->lock, flags); in atmel_tdes_handle_queue()
607 backlog = crypto_get_backlog(&dd->queue); in atmel_tdes_handle_queue()
608 async_req = crypto_dequeue_request(&dd->queue); in atmel_tdes_handle_queue()
610 dd->flags |= TDES_FLAGS_BUSY; in atmel_tdes_handle_queue()
611 spin_unlock_irqrestore(&dd->lock, flags); in atmel_tdes_handle_queue()
617 crypto_request_complete(backlog, -EINPROGRESS); in atmel_tdes_handle_queue()
622 dd->req = req; in atmel_tdes_handle_queue()
623 dd->total = req->cryptlen; in atmel_tdes_handle_queue()
624 dd->in_offset = 0; in atmel_tdes_handle_queue()
625 dd->in_sg = req->src; in atmel_tdes_handle_queue()
626 dd->out_offset = 0; in atmel_tdes_handle_queue()
627 dd->out_sg = req->dst; in atmel_tdes_handle_queue()
631 rctx->mode &= TDES_FLAGS_MODE_MASK; in atmel_tdes_handle_queue()
632 dd->flags = (dd->flags & ~TDES_FLAGS_MODE_MASK) | rctx->mode; in atmel_tdes_handle_queue()
633 dd->ctx = ctx; in atmel_tdes_handle_queue()
641 tasklet_schedule(&dd->queue_task); in atmel_tdes_handle_queue()
649 int err = -EINVAL; in atmel_tdes_crypt_dma_stop()
652 if (dd->flags & TDES_FLAGS_DMA) { in atmel_tdes_crypt_dma_stop()
654 if (dd->flags & TDES_FLAGS_FAST) { in atmel_tdes_crypt_dma_stop()
655 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); in atmel_tdes_crypt_dma_stop()
656 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_dma_stop()
658 dma_sync_single_for_device(dd->dev, dd->dma_addr_out, in atmel_tdes_crypt_dma_stop()
659 dd->dma_size, DMA_FROM_DEVICE); in atmel_tdes_crypt_dma_stop()
662 count = atmel_tdes_sg_copy(&dd->out_sg, &dd->out_offset, in atmel_tdes_crypt_dma_stop()
663 dd->buf_out, dd->buflen, dd->dma_size, 1); in atmel_tdes_crypt_dma_stop()
664 if (count != dd->dma_size) { in atmel_tdes_crypt_dma_stop()
665 err = -EINVAL; in atmel_tdes_crypt_dma_stop()
666 dev_dbg(dd->dev, "not all data converted: %zu\n", count); in atmel_tdes_crypt_dma_stop()
678 struct device *dev = ctx->dd->dev; in atmel_tdes_crypt()
680 if (!req->cryptlen) in atmel_tdes_crypt()
685 if (!IS_ALIGNED(req->cryptlen, CFB8_BLOCK_SIZE)) { in atmel_tdes_crypt()
687 return -EINVAL; in atmel_tdes_crypt()
689 ctx->block_size = CFB8_BLOCK_SIZE; in atmel_tdes_crypt()
693 if (!IS_ALIGNED(req->cryptlen, CFB16_BLOCK_SIZE)) { in atmel_tdes_crypt()
695 return -EINVAL; in atmel_tdes_crypt()
697 ctx->block_size = CFB16_BLOCK_SIZE; in atmel_tdes_crypt()
701 if (!IS_ALIGNED(req->cryptlen, CFB32_BLOCK_SIZE)) { in atmel_tdes_crypt()
703 return -EINVAL; in atmel_tdes_crypt()
705 ctx->block_size = CFB32_BLOCK_SIZE; in atmel_tdes_crypt()
709 if (!IS_ALIGNED(req->cryptlen, DES_BLOCK_SIZE)) { in atmel_tdes_crypt()
711 return -EINVAL; in atmel_tdes_crypt()
713 ctx->block_size = DES_BLOCK_SIZE; in atmel_tdes_crypt()
717 rctx->mode = mode; in atmel_tdes_crypt()
723 if (req->cryptlen >= ivsize) in atmel_tdes_crypt()
724 scatterwalk_map_and_copy(rctx->lastc, req->src, in atmel_tdes_crypt()
725 req->cryptlen - ivsize, in atmel_tdes_crypt()
729 return atmel_tdes_handle_queue(ctx->dd, req); in atmel_tdes_crypt()
737 dd->dma_lch_in.chan = dma_request_chan(dd->dev, "tx"); in atmel_tdes_dma_init()
738 if (IS_ERR(dd->dma_lch_in.chan)) { in atmel_tdes_dma_init()
739 ret = PTR_ERR(dd->dma_lch_in.chan); in atmel_tdes_dma_init()
743 dd->dma_lch_in.dma_conf.dst_addr = dd->phys_base + in atmel_tdes_dma_init()
745 dd->dma_lch_in.dma_conf.src_maxburst = 1; in atmel_tdes_dma_init()
746 dd->dma_lch_in.dma_conf.src_addr_width = in atmel_tdes_dma_init()
748 dd->dma_lch_in.dma_conf.dst_maxburst = 1; in atmel_tdes_dma_init()
749 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_tdes_dma_init()
751 dd->dma_lch_in.dma_conf.device_fc = false; in atmel_tdes_dma_init()
753 dd->dma_lch_out.chan = dma_request_chan(dd->dev, "rx"); in atmel_tdes_dma_init()
754 if (IS_ERR(dd->dma_lch_out.chan)) { in atmel_tdes_dma_init()
755 ret = PTR_ERR(dd->dma_lch_out.chan); in atmel_tdes_dma_init()
759 dd->dma_lch_out.dma_conf.src_addr = dd->phys_base + in atmel_tdes_dma_init()
761 dd->dma_lch_out.dma_conf.src_maxburst = 1; in atmel_tdes_dma_init()
762 dd->dma_lch_out.dma_conf.src_addr_width = in atmel_tdes_dma_init()
764 dd->dma_lch_out.dma_conf.dst_maxburst = 1; in atmel_tdes_dma_init()
765 dd->dma_lch_out.dma_conf.dst_addr_width = in atmel_tdes_dma_init()
767 dd->dma_lch_out.dma_conf.device_fc = false; in atmel_tdes_dma_init()
772 dma_release_channel(dd->dma_lch_in.chan); in atmel_tdes_dma_init()
774 dev_err(dd->dev, "no DMA channel available\n"); in atmel_tdes_dma_init()
780 dma_release_channel(dd->dma_lch_in.chan); in atmel_tdes_dma_cleanup()
781 dma_release_channel(dd->dma_lch_out.chan); in atmel_tdes_dma_cleanup()
794 memcpy(ctx->key, key, keylen); in atmel_des_setkey()
795 ctx->keylen = keylen; in atmel_des_setkey()
810 memcpy(ctx->key, key, keylen); in atmel_tdes_setkey()
811 ctx->keylen = keylen; in atmel_tdes_setkey()
889 ctx->dd = atmel_tdes_dev_alloc(); in atmel_tdes_init_tfm()
890 if (!ctx->dd) in atmel_tdes_init_tfm()
891 return -ENODEV; in atmel_tdes_init_tfm()
900 alg->base.cra_priority = ATMEL_TDES_PRIORITY; in atmel_tdes_skcipher_alg_init()
901 alg->base.cra_flags = CRYPTO_ALG_ASYNC; in atmel_tdes_skcipher_alg_init()
902 alg->base.cra_ctxsize = sizeof(struct atmel_tdes_ctx); in atmel_tdes_skcipher_alg_init()
903 alg->base.cra_module = THIS_MODULE; in atmel_tdes_skcipher_alg_init()
905 alg->init = atmel_tdes_init_tfm; in atmel_tdes_skcipher_alg_init()
911 .base.cra_driver_name = "atmel-ecb-des",
923 .base.cra_driver_name = "atmel-cbc-des",
936 .base.cra_driver_name = "atmel-cfb-des",
949 .base.cra_driver_name = "atmel-cfb8-des",
962 .base.cra_driver_name = "atmel-cfb16-des",
975 .base.cra_driver_name = "atmel-cfb32-des",
988 .base.cra_driver_name = "atmel-ofb-des",
1001 .base.cra_driver_name = "atmel-ecb-tdes",
1013 .base.cra_driver_name = "atmel-cbc-tdes",
1026 .base.cra_driver_name = "atmel-ofb-tdes",
1051 if (!(dd->flags & TDES_FLAGS_DMA)) in atmel_tdes_done_task()
1056 if (dd->total && !err) { in atmel_tdes_done_task()
1057 if (dd->flags & TDES_FLAGS_FAST) { in atmel_tdes_done_task()
1058 dd->in_sg = sg_next(dd->in_sg); in atmel_tdes_done_task()
1059 dd->out_sg = sg_next(dd->out_sg); in atmel_tdes_done_task()
1060 if (!dd->in_sg || !dd->out_sg) in atmel_tdes_done_task()
1061 err = -EINVAL; in atmel_tdes_done_task()
1081 if (TDES_FLAGS_BUSY & tdes_dd->flags) in atmel_tdes_irq()
1082 tasklet_schedule(&tdes_dd->done_task); in atmel_tdes_irq()
1084 dev_warn(tdes_dd->dev, "TDES interrupt when no active requests.\n"); in atmel_tdes_irq()
1123 dd->caps.has_dma = 0; in atmel_tdes_get_cap()
1124 dd->caps.has_cfb_3keys = 0; in atmel_tdes_get_cap()
1127 switch (dd->hw_version & 0xf00) { in atmel_tdes_get_cap()
1130 dd->caps.has_dma = 1; in atmel_tdes_get_cap()
1131 dd->caps.has_cfb_3keys = 1; in atmel_tdes_get_cap()
1136 dev_warn(dd->dev, in atmel_tdes_get_cap()
1143 { .compatible = "atmel,at91sam9g46-tdes" },
1151 struct device *dev = &pdev->dev; in atmel_tdes_probe()
1155 tdes_dd = devm_kmalloc(&pdev->dev, sizeof(*tdes_dd), GFP_KERNEL); in atmel_tdes_probe()
1157 return -ENOMEM; in atmel_tdes_probe()
1159 tdes_dd->dev = dev; in atmel_tdes_probe()
1163 INIT_LIST_HEAD(&tdes_dd->list); in atmel_tdes_probe()
1164 spin_lock_init(&tdes_dd->lock); in atmel_tdes_probe()
1166 tasklet_init(&tdes_dd->done_task, atmel_tdes_done_task, in atmel_tdes_probe()
1168 tasklet_init(&tdes_dd->queue_task, atmel_tdes_queue_task, in atmel_tdes_probe()
1171 crypto_init_queue(&tdes_dd->queue, ATMEL_TDES_QUEUE_LENGTH); in atmel_tdes_probe()
1173 tdes_dd->io_base = devm_platform_get_and_ioremap_resource(pdev, 0, &tdes_res); in atmel_tdes_probe()
1174 if (IS_ERR(tdes_dd->io_base)) { in atmel_tdes_probe()
1175 err = PTR_ERR(tdes_dd->io_base); in atmel_tdes_probe()
1178 tdes_dd->phys_base = tdes_res->start; in atmel_tdes_probe()
1181 tdes_dd->irq = platform_get_irq(pdev, 0); in atmel_tdes_probe()
1182 if (tdes_dd->irq < 0) { in atmel_tdes_probe()
1183 err = tdes_dd->irq; in atmel_tdes_probe()
1187 err = devm_request_irq(&pdev->dev, tdes_dd->irq, atmel_tdes_irq, in atmel_tdes_probe()
1188 IRQF_SHARED, "atmel-tdes", tdes_dd); in atmel_tdes_probe()
1195 tdes_dd->iclk = devm_clk_get(&pdev->dev, "tdes_clk"); in atmel_tdes_probe()
1196 if (IS_ERR(tdes_dd->iclk)) { in atmel_tdes_probe()
1198 err = PTR_ERR(tdes_dd->iclk); in atmel_tdes_probe()
1212 if (tdes_dd->caps.has_dma) { in atmel_tdes_probe()
1218 dma_chan_name(tdes_dd->dma_lch_in.chan), in atmel_tdes_probe()
1219 dma_chan_name(tdes_dd->dma_lch_out.chan)); in atmel_tdes_probe()
1223 list_add_tail(&tdes_dd->list, &atmel_tdes.dev_list); in atmel_tdes_probe()
1236 list_del(&tdes_dd->list); in atmel_tdes_probe()
1238 if (tdes_dd->caps.has_dma) in atmel_tdes_probe()
1243 tasklet_kill(&tdes_dd->done_task); in atmel_tdes_probe()
1244 tasklet_kill(&tdes_dd->queue_task); in atmel_tdes_probe()
1254 list_del(&tdes_dd->list); in atmel_tdes_remove()
1259 tasklet_kill(&tdes_dd->done_task); in atmel_tdes_remove()
1260 tasklet_kill(&tdes_dd->queue_task); in atmel_tdes_remove()
1262 if (tdes_dd->caps.has_dma) in atmel_tdes_remove()
1283 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");