Lines Matching +full:aes +full:- +full:gcm
1 // SPDX-License-Identifier: GPL-2.0-only
5 * Support for OMAP AES HW acceleration.
23 #include <linux/dma-mapping.h>
33 #include <crypto/aes.h>
34 #include <crypto/gcm.h>
39 #include "omap-crypto.h"
40 #include "omap-aes.h"
52 _read_ret = __raw_readl(dd->io_base + offset); \
60 return __raw_readl(dd->io_base + offset); in omap_aes_read()
69 __raw_writel(value, dd->io_base + offset); \
75 __raw_writel(value, dd->io_base + offset); in omap_aes_write()
93 for (; count--; value++, offset += 4) in omap_aes_write_n()
101 if (!(dd->flags & FLAGS_INIT)) { in omap_aes_hw_init()
102 dd->flags |= FLAGS_INIT; in omap_aes_hw_init()
103 dd->err = 0; in omap_aes_hw_init()
106 err = pm_runtime_resume_and_get(dd->dev); in omap_aes_hw_init()
108 dev_err(dd->dev, "failed to get sync: %d\n", err); in omap_aes_hw_init()
117 dd->flags &= ~(OMAP_CRYPTO_COPY_MASK << FLAGS_IN_DATA_ST_SHIFT); in omap_aes_clear_copy_flags()
118 dd->flags &= ~(OMAP_CRYPTO_COPY_MASK << FLAGS_OUT_DATA_ST_SHIFT); in omap_aes_clear_copy_flags()
119 dd->flags &= ~(OMAP_CRYPTO_COPY_MASK << FLAGS_ASSOC_DATA_ST_SHIFT); in omap_aes_clear_copy_flags()
133 key32 = dd->ctx->keylen / sizeof(u32); in omap_aes_write_ctrl()
136 if (dd->flags & FLAGS_GCM) in omap_aes_write_ctrl()
142 (__force u32)cpu_to_le32(dd->ctx->key[i])); in omap_aes_write_ctrl()
145 if ((dd->flags & (FLAGS_CBC | FLAGS_CTR)) && dd->req->iv) in omap_aes_write_ctrl()
146 omap_aes_write_n(dd, AES_REG_IV(dd, 0), (void *)dd->req->iv, 4); in omap_aes_write_ctrl()
148 if ((dd->flags & (FLAGS_GCM)) && dd->aead_req->iv) { in omap_aes_write_ctrl()
149 rctx = aead_request_ctx(dd->aead_req); in omap_aes_write_ctrl()
150 omap_aes_write_n(dd, AES_REG_IV(dd, 0), (u32 *)rctx->iv, 4); in omap_aes_write_ctrl()
153 val = FLD_VAL(((dd->ctx->keylen >> 3) - 1), 4, 3); in omap_aes_write_ctrl()
154 if (dd->flags & FLAGS_CBC) in omap_aes_write_ctrl()
157 if (dd->flags & (FLAGS_CTR | FLAGS_GCM)) in omap_aes_write_ctrl()
160 if (dd->flags & FLAGS_GCM) in omap_aes_write_ctrl()
163 if (dd->flags & FLAGS_ENCRYPT) in omap_aes_write_ctrl()
175 val = dd->pdata->dma_start; in omap_aes_dma_trigger_omap2()
177 if (dd->dma_lch_out != NULL) in omap_aes_dma_trigger_omap2()
178 val |= dd->pdata->dma_enable_out; in omap_aes_dma_trigger_omap2()
179 if (dd->dma_lch_in != NULL) in omap_aes_dma_trigger_omap2()
180 val |= dd->pdata->dma_enable_in; in omap_aes_dma_trigger_omap2()
182 mask = dd->pdata->dma_enable_out | dd->pdata->dma_enable_in | in omap_aes_dma_trigger_omap2()
183 dd->pdata->dma_start; in omap_aes_dma_trigger_omap2()
193 if (dd->flags & FLAGS_GCM) in omap_aes_dma_trigger_omap4()
194 omap_aes_write(dd, AES_REG_A_LEN, dd->assoc_len); in omap_aes_dma_trigger_omap4()
203 mask = dd->pdata->dma_enable_out | dd->pdata->dma_enable_in | in omap_aes_dma_stop()
204 dd->pdata->dma_start; in omap_aes_dma_stop()
215 list_move_tail(&dd->list, &dev_list); in omap_aes_find_dev()
216 rctx->dd = dd; in omap_aes_find_dev()
226 /* dma_lch_out - completed */ in omap_aes_dma_out_callback()
227 tasklet_schedule(&dd->done_task); in omap_aes_dma_out_callback()
234 dd->dma_lch_out = NULL; in omap_aes_dma_init()
235 dd->dma_lch_in = NULL; in omap_aes_dma_init()
237 dd->dma_lch_in = dma_request_chan(dd->dev, "rx"); in omap_aes_dma_init()
238 if (IS_ERR(dd->dma_lch_in)) { in omap_aes_dma_init()
239 dev_err(dd->dev, "Unable to request in DMA channel\n"); in omap_aes_dma_init()
240 return PTR_ERR(dd->dma_lch_in); in omap_aes_dma_init()
243 dd->dma_lch_out = dma_request_chan(dd->dev, "tx"); in omap_aes_dma_init()
244 if (IS_ERR(dd->dma_lch_out)) { in omap_aes_dma_init()
245 dev_err(dd->dev, "Unable to request out DMA channel\n"); in omap_aes_dma_init()
246 err = PTR_ERR(dd->dma_lch_out); in omap_aes_dma_init()
253 dma_release_channel(dd->dma_lch_in); in omap_aes_dma_init()
260 if (dd->pio_only) in omap_aes_dma_cleanup()
263 dma_release_channel(dd->dma_lch_out); in omap_aes_dma_cleanup()
264 dma_release_channel(dd->dma_lch_in); in omap_aes_dma_cleanup()
276 if (dd->pio_only) { in omap_aes_crypt_dma()
277 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_aes_crypt_dma()
279 scatterwalk_start(&dd->out_walk, dd->out_sg); in omap_aes_crypt_dma()
287 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma()
291 cfg.src_addr = dd->phys_base + AES_REG_DATA_N(dd, 0); in omap_aes_crypt_dma()
292 cfg.dst_addr = dd->phys_base + AES_REG_DATA_N(dd, 0); in omap_aes_crypt_dma()
299 ret = dmaengine_slave_config(dd->dma_lch_in, &cfg); in omap_aes_crypt_dma()
301 dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n", in omap_aes_crypt_dma()
306 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, in omap_aes_crypt_dma()
310 dev_err(dd->dev, "IN prep_slave_sg() failed\n"); in omap_aes_crypt_dma()
311 return -EINVAL; in omap_aes_crypt_dma()
315 tx_in->callback_param = dd; in omap_aes_crypt_dma()
316 tx_in->callback = NULL; in omap_aes_crypt_dma()
320 ret = dmaengine_slave_config(dd->dma_lch_out, &cfg); in omap_aes_crypt_dma()
322 dev_err(dd->dev, "can't configure OUT dmaengine slave: %d\n", in omap_aes_crypt_dma()
327 tx_out = dmaengine_prep_slave_sg(dd->dma_lch_out, out_sg, in omap_aes_crypt_dma()
332 dev_err(dd->dev, "OUT prep_slave_sg() failed\n"); in omap_aes_crypt_dma()
333 return -EINVAL; in omap_aes_crypt_dma()
341 if (dd->flags & FLAGS_GCM) in omap_aes_crypt_dma()
342 cb_desc->callback = omap_aes_gcm_dma_out_callback; in omap_aes_crypt_dma()
344 cb_desc->callback = omap_aes_dma_out_callback; in omap_aes_crypt_dma()
345 cb_desc->callback_param = dd; in omap_aes_crypt_dma()
352 dma_async_issue_pending(dd->dma_lch_in); in omap_aes_crypt_dma()
354 dma_async_issue_pending(dd->dma_lch_out); in omap_aes_crypt_dma()
357 dd->pdata->trigger(dd, dd->total); in omap_aes_crypt_dma()
366 pr_debug("total: %zu\n", dd->total); in omap_aes_crypt_dma_start()
368 if (!dd->pio_only) { in omap_aes_crypt_dma_start()
369 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
372 dev_err(dd->dev, "dma_map_sg() error\n"); in omap_aes_crypt_dma_start()
373 return -EINVAL; in omap_aes_crypt_dma_start()
376 if (dd->out_sg_len) { in omap_aes_crypt_dma_start()
377 err = dma_map_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_crypt_dma_start()
380 dev_err(dd->dev, "dma_map_sg() error\n"); in omap_aes_crypt_dma_start()
381 return -EINVAL; in omap_aes_crypt_dma_start()
386 err = omap_aes_crypt_dma(dd, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
387 dd->out_sg_len); in omap_aes_crypt_dma_start()
388 if (err && !dd->pio_only) { in omap_aes_crypt_dma_start()
389 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma_start()
390 if (dd->out_sg_len) in omap_aes_crypt_dma_start()
391 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_crypt_dma_start()
400 struct skcipher_request *req = dd->req; in omap_aes_finish_req()
404 crypto_finalize_skcipher_request(dd->engine, req, err); in omap_aes_finish_req()
406 pm_runtime_mark_last_busy(dd->dev); in omap_aes_finish_req()
407 pm_runtime_put_autosuspend(dd->dev); in omap_aes_finish_req()
412 pr_debug("total: %zu\n", dd->total); in omap_aes_crypt_dma_stop()
424 return crypto_transfer_skcipher_request_to_engine(dd->engine, req); in omap_aes_handle_queue()
436 struct omap_aes_dev *dd = rctx->dd; in omap_aes_prepare_req()
441 return -ENODEV; in omap_aes_prepare_req()
444 dd->req = req; in omap_aes_prepare_req()
445 dd->total = req->cryptlen; in omap_aes_prepare_req()
446 dd->total_save = req->cryptlen; in omap_aes_prepare_req()
447 dd->in_sg = req->src; in omap_aes_prepare_req()
448 dd->out_sg = req->dst; in omap_aes_prepare_req()
449 dd->orig_out = req->dst; in omap_aes_prepare_req()
452 if (req->src == req->dst) in omap_aes_prepare_req()
455 ret = omap_crypto_align_sg(&dd->in_sg, dd->total, AES_BLOCK_SIZE, in omap_aes_prepare_req()
456 dd->in_sgl, flags, in omap_aes_prepare_req()
457 FLAGS_IN_DATA_ST_SHIFT, &dd->flags); in omap_aes_prepare_req()
461 ret = omap_crypto_align_sg(&dd->out_sg, dd->total, AES_BLOCK_SIZE, in omap_aes_prepare_req()
462 &dd->out_sgl, 0, in omap_aes_prepare_req()
463 FLAGS_OUT_DATA_ST_SHIFT, &dd->flags); in omap_aes_prepare_req()
467 dd->in_sg_len = sg_nents_for_len(dd->in_sg, dd->total); in omap_aes_prepare_req()
468 if (dd->in_sg_len < 0) in omap_aes_prepare_req()
469 return dd->in_sg_len; in omap_aes_prepare_req()
471 dd->out_sg_len = sg_nents_for_len(dd->out_sg, dd->total); in omap_aes_prepare_req()
472 if (dd->out_sg_len < 0) in omap_aes_prepare_req()
473 return dd->out_sg_len; in omap_aes_prepare_req()
475 rctx->mode &= FLAGS_MODE_MASK; in omap_aes_prepare_req()
476 dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; in omap_aes_prepare_req()
478 dd->ctx = ctx; in omap_aes_prepare_req()
479 rctx->dd = dd; in omap_aes_prepare_req()
489 struct omap_aes_dev *dd = rctx->dd; in omap_aes_crypt_req()
492 return -ENODEV; in omap_aes_crypt_req()
511 if (!dd->pio_only) { in omap_aes_done_task()
512 dma_sync_sg_for_device(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_done_task()
514 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_done_task()
515 dma_unmap_sg(dd->dev, dd->out_sg, dd->out_sg_len, in omap_aes_done_task()
520 omap_crypto_cleanup(dd->in_sg, NULL, 0, dd->total_save, in omap_aes_done_task()
521 FLAGS_IN_DATA_ST_SHIFT, dd->flags); in omap_aes_done_task()
523 omap_crypto_cleanup(dd->out_sg, dd->orig_out, 0, dd->total_save, in omap_aes_done_task()
524 FLAGS_OUT_DATA_ST_SHIFT, dd->flags); in omap_aes_done_task()
527 if (dd->flags & (FLAGS_CBC | FLAGS_CTR)) in omap_aes_done_task()
528 omap_aes_copy_ivout(dd, dd->req->iv); in omap_aes_done_task()
543 if ((req->cryptlen % AES_BLOCK_SIZE) && !(mode & FLAGS_CTR)) in omap_aes_crypt()
544 return -EINVAL; in omap_aes_crypt()
546 pr_debug("nbytes: %d, enc: %d, cbc: %d\n", req->cryptlen, in omap_aes_crypt()
550 if (req->cryptlen < aes_fallback_sz) { in omap_aes_crypt()
551 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in omap_aes_crypt()
552 skcipher_request_set_callback(&rctx->fallback_req, in omap_aes_crypt()
553 req->base.flags, in omap_aes_crypt()
554 req->base.complete, in omap_aes_crypt()
555 req->base.data); in omap_aes_crypt()
556 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in omap_aes_crypt()
557 req->dst, req->cryptlen, req->iv); in omap_aes_crypt()
560 ret = crypto_skcipher_encrypt(&rctx->fallback_req); in omap_aes_crypt()
562 ret = crypto_skcipher_decrypt(&rctx->fallback_req); in omap_aes_crypt()
567 return -ENODEV; in omap_aes_crypt()
569 rctx->mode = mode; in omap_aes_crypt()
584 return -EINVAL; in omap_aes_setkey()
588 memcpy(ctx->key, key, keylen); in omap_aes_setkey()
589 ctx->keylen = keylen; in omap_aes_setkey()
591 crypto_skcipher_clear_flags(ctx->fallback, CRYPTO_TFM_REQ_MASK); in omap_aes_setkey()
592 crypto_skcipher_set_flags(ctx->fallback, tfm->base.crt_flags & in omap_aes_setkey()
595 ret = crypto_skcipher_setkey(ctx->fallback, key, keylen); in omap_aes_setkey()
639 const char *name = crypto_tfm_alg_name(&tfm->base); in omap_aes_init_tfm()
647 ctx->fallback = blk; in omap_aes_init_tfm()
652 ctx->enginectx.op.prepare_request = omap_aes_prepare_req; in omap_aes_init_tfm()
653 ctx->enginectx.op.unprepare_request = NULL; in omap_aes_init_tfm()
654 ctx->enginectx.op.do_one_request = omap_aes_crypt_req; in omap_aes_init_tfm()
663 if (ctx->fallback) in omap_aes_exit_tfm()
664 crypto_free_skcipher(ctx->fallback); in omap_aes_exit_tfm()
666 ctx->fallback = NULL; in omap_aes_exit_tfm()
673 .base.cra_name = "ecb(aes)",
674 .base.cra_driver_name = "ecb-aes-omap",
692 .base.cra_name = "cbc(aes)",
693 .base.cra_driver_name = "cbc-aes-omap",
715 .base.cra_name = "ctr(aes)",
716 .base.cra_driver_name = "ctr-aes-omap",
746 .cra_name = "gcm(aes)",
747 .cra_driver_name = "gcm-aes-omap",
766 .cra_name = "rfc4106(gcm(aes))",
767 .cra_driver_name = "rfc4106-gcm-aes-omap",
872 BUG_ON(!dd->in_sg); in omap_aes_irq()
874 BUG_ON(_calc_walked(in) > dd->in_sg->length); in omap_aes_irq()
876 src = sg_virt(dd->in_sg) + _calc_walked(in); in omap_aes_irq()
881 scatterwalk_advance(&dd->in_walk, 4); in omap_aes_irq()
882 if (dd->in_sg->length == _calc_walked(in)) { in omap_aes_irq()
883 dd->in_sg = sg_next(dd->in_sg); in omap_aes_irq()
884 if (dd->in_sg) { in omap_aes_irq()
885 scatterwalk_start(&dd->in_walk, in omap_aes_irq()
886 dd->in_sg); in omap_aes_irq()
887 src = sg_virt(dd->in_sg) + in omap_aes_irq()
905 BUG_ON(!dd->out_sg); in omap_aes_irq()
907 BUG_ON(_calc_walked(out) > dd->out_sg->length); in omap_aes_irq()
909 dst = sg_virt(dd->out_sg) + _calc_walked(out); in omap_aes_irq()
913 scatterwalk_advance(&dd->out_walk, 4); in omap_aes_irq()
914 if (dd->out_sg->length == _calc_walked(out)) { in omap_aes_irq()
915 dd->out_sg = sg_next(dd->out_sg); in omap_aes_irq()
916 if (dd->out_sg) { in omap_aes_irq()
917 scatterwalk_start(&dd->out_walk, in omap_aes_irq()
918 dd->out_sg); in omap_aes_irq()
919 dst = sg_virt(dd->out_sg) + in omap_aes_irq()
927 dd->total -= min_t(size_t, AES_BLOCK_SIZE, dd->total); in omap_aes_irq()
933 if (!dd->total) in omap_aes_irq()
935 tasklet_schedule(&dd->done_task); in omap_aes_irq()
946 .compatible = "ti,omap2-aes",
950 .compatible = "ti,omap3-aes",
954 .compatible = "ti,omap4-aes",
964 struct device_node *node = dev->of_node; in omap_aes_get_res_of()
967 dd->pdata = of_device_get_match_data(dev); in omap_aes_get_res_of()
968 if (!dd->pdata) { in omap_aes_get_res_of()
970 err = -EINVAL; in omap_aes_get_res_of()
977 err = -EINVAL; in omap_aes_get_res_of()
992 return -EINVAL; in omap_aes_get_res_of()
999 struct device *dev = &pdev->dev; in omap_aes_get_res_pdev()
1007 err = -ENODEV; in omap_aes_get_res_pdev()
1012 /* Only OMAP2/3 can be non-DT */ in omap_aes_get_res_pdev()
1013 dd->pdata = &omap_aes_pdata_omap2; in omap_aes_get_res_pdev()
1038 return -EINVAL; in fallback_store()
1051 return sprintf(buf, "%d\n", dd->engine->queue.max_qlen); in queue_len_show()
1068 return -EINVAL; in queue_len_store()
1077 spin_lock_irqsave(&dd->lock, flags); in queue_len_store()
1078 dd->engine->queue.max_qlen = value; in queue_len_store()
1079 dd->aead_queue.base.max_qlen = value; in queue_len_store()
1080 spin_unlock_irqrestore(&dd->lock, flags); in queue_len_store()
1102 struct device *dev = &pdev->dev; in omap_aes_probe()
1107 int err = -ENOMEM, i, j, irq = -1; in omap_aes_probe()
1115 dd->dev = dev; in omap_aes_probe()
1118 aead_init_queue(&dd->aead_queue, OMAP_AES_QUEUE_LENGTH); in omap_aes_probe()
1120 err = (dev->of_node) ? omap_aes_get_res_of(dd, dev, &res) : in omap_aes_probe()
1125 dd->io_base = devm_ioremap_resource(dev, &res); in omap_aes_probe()
1126 if (IS_ERR(dd->io_base)) { in omap_aes_probe()
1127 err = PTR_ERR(dd->io_base); in omap_aes_probe()
1130 dd->phys_base = res.start; in omap_aes_probe()
1149 dev_info(dev, "OMAP AES hw accel rev: %u.%u\n", in omap_aes_probe()
1150 (reg & dd->pdata->major_mask) >> dd->pdata->major_shift, in omap_aes_probe()
1151 (reg & dd->pdata->minor_mask) >> dd->pdata->minor_shift); in omap_aes_probe()
1153 tasklet_init(&dd->done_task, omap_aes_done_task, (unsigned long)dd); in omap_aes_probe()
1156 if (err == -EPROBE_DEFER) { in omap_aes_probe()
1159 dd->pio_only = 1; in omap_aes_probe()
1170 dev_err(dev, "Unable to grab omap-aes IRQ\n"); in omap_aes_probe()
1175 spin_lock_init(&dd->lock); in omap_aes_probe()
1177 INIT_LIST_HEAD(&dd->list); in omap_aes_probe()
1179 list_add_tail(&dd->list, &dev_list); in omap_aes_probe()
1183 dd->engine = crypto_engine_alloc_init(dev, 1); in omap_aes_probe()
1184 if (!dd->engine) { in omap_aes_probe()
1185 err = -ENOMEM; in omap_aes_probe()
1189 err = crypto_engine_start(dd->engine); in omap_aes_probe()
1193 for (i = 0; i < dd->pdata->algs_info_size; i++) { in omap_aes_probe()
1194 if (!dd->pdata->algs_info[i].registered) { in omap_aes_probe()
1195 for (j = 0; j < dd->pdata->algs_info[i].size; j++) { in omap_aes_probe()
1196 algp = &dd->pdata->algs_info[i].algs_list[j]; in omap_aes_probe()
1198 pr_debug("reg alg: %s\n", algp->base.cra_name); in omap_aes_probe()
1204 dd->pdata->algs_info[i].registered++; in omap_aes_probe()
1209 if (dd->pdata->aead_algs_info && in omap_aes_probe()
1210 !dd->pdata->aead_algs_info->registered) { in omap_aes_probe()
1211 for (i = 0; i < dd->pdata->aead_algs_info->size; i++) { in omap_aes_probe()
1212 aalg = &dd->pdata->aead_algs_info->algs_list[i]; in omap_aes_probe()
1214 pr_debug("reg alg: %s\n", aalg->base.cra_name); in omap_aes_probe()
1220 dd->pdata->aead_algs_info->registered++; in omap_aes_probe()
1224 err = sysfs_create_group(&dev->kobj, &omap_aes_attr_group); in omap_aes_probe()
1232 for (i = dd->pdata->aead_algs_info->registered - 1; i >= 0; i--) { in omap_aes_probe()
1233 aalg = &dd->pdata->aead_algs_info->algs_list[i]; in omap_aes_probe()
1237 for (i = dd->pdata->algs_info_size - 1; i >= 0; i--) in omap_aes_probe()
1238 for (j = dd->pdata->algs_info[i].registered - 1; j >= 0; j--) in omap_aes_probe()
1240 &dd->pdata->algs_info[i].algs_list[j]); in omap_aes_probe()
1243 if (dd->engine) in omap_aes_probe()
1244 crypto_engine_exit(dd->engine); in omap_aes_probe()
1248 tasklet_kill(&dd->done_task); in omap_aes_probe()
1265 return -ENODEV; in omap_aes_remove()
1268 list_del(&dd->list); in omap_aes_remove()
1271 for (i = dd->pdata->algs_info_size - 1; i >= 0; i--) in omap_aes_remove()
1272 for (j = dd->pdata->algs_info[i].registered - 1; j >= 0; j--) { in omap_aes_remove()
1274 &dd->pdata->algs_info[i].algs_list[j]); in omap_aes_remove()
1275 dd->pdata->algs_info[i].registered--; in omap_aes_remove()
1278 for (i = dd->pdata->aead_algs_info->registered - 1; i >= 0; i--) { in omap_aes_remove()
1279 aalg = &dd->pdata->aead_algs_info->algs_list[i]; in omap_aes_remove()
1281 dd->pdata->aead_algs_info->registered--; in omap_aes_remove()
1285 crypto_engine_exit(dd->engine); in omap_aes_remove()
1287 tasklet_kill(&dd->done_task); in omap_aes_remove()
1289 pm_runtime_disable(dd->dev); in omap_aes_remove()
1291 sysfs_remove_group(&dd->dev->kobj, &omap_aes_attr_group); in omap_aes_remove()
1316 .name = "omap-aes",
1324 MODULE_DESCRIPTION("OMAP AES hw acceleration support.");