Lines Matching refs:dd
81 struct atmel_aes_dev *dd; member
213 static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset) in atmel_aes_read() argument
215 return readl_relaxed(dd->io_base + offset); in atmel_aes_read()
218 static inline void atmel_aes_write(struct atmel_aes_dev *dd, in atmel_aes_write() argument
221 writel_relaxed(value, dd->io_base + offset); in atmel_aes_write()
224 static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_n() argument
228 *value = atmel_aes_read(dd, offset); in atmel_aes_read_n()
231 static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_n() argument
235 atmel_aes_write(dd, offset, *value); in atmel_aes_write_n()
244 if (!ctx->dd) { in atmel_aes_find_dev()
249 ctx->dd = aes_dd; in atmel_aes_find_dev()
251 aes_dd = ctx->dd; in atmel_aes_find_dev()
259 static int atmel_aes_hw_init(struct atmel_aes_dev *dd) in atmel_aes_hw_init() argument
261 clk_prepare_enable(dd->iclk); in atmel_aes_hw_init()
263 if (!(dd->flags & AES_FLAGS_INIT)) { in atmel_aes_hw_init()
264 atmel_aes_write(dd, AES_CR, AES_CR_SWRST); in atmel_aes_hw_init()
265 atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET); in atmel_aes_hw_init()
266 dd->flags |= AES_FLAGS_INIT; in atmel_aes_hw_init()
267 dd->err = 0; in atmel_aes_hw_init()
273 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd) in atmel_aes_get_version() argument
275 return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff; in atmel_aes_get_version()
278 static void atmel_aes_hw_version_init(struct atmel_aes_dev *dd) in atmel_aes_hw_version_init() argument
280 atmel_aes_hw_init(dd); in atmel_aes_hw_version_init()
282 dd->hw_version = atmel_aes_get_version(dd); in atmel_aes_hw_version_init()
284 dev_info(dd->dev, in atmel_aes_hw_version_init()
285 "version: 0x%x\n", dd->hw_version); in atmel_aes_hw_version_init()
287 clk_disable_unprepare(dd->iclk); in atmel_aes_hw_version_init()
290 static void atmel_aes_finish_req(struct atmel_aes_dev *dd, int err) in atmel_aes_finish_req() argument
292 struct ablkcipher_request *req = dd->req; in atmel_aes_finish_req()
294 clk_disable_unprepare(dd->iclk); in atmel_aes_finish_req()
295 dd->flags &= ~AES_FLAGS_BUSY; in atmel_aes_finish_req()
302 struct atmel_aes_dev *dd = data; in atmel_aes_dma_callback() local
305 tasklet_schedule(&dd->done_task); in atmel_aes_dma_callback()
308 static int atmel_aes_crypt_dma(struct atmel_aes_dev *dd, in atmel_aes_crypt_dma() argument
314 dd->dma_size = length; in atmel_aes_crypt_dma()
316 if (!(dd->flags & AES_FLAGS_FAST)) { in atmel_aes_crypt_dma()
317 dma_sync_single_for_device(dd->dev, dma_addr_in, length, in atmel_aes_crypt_dma()
321 if (dd->flags & AES_FLAGS_CFB8) { in atmel_aes_crypt_dma()
322 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_aes_crypt_dma()
324 dd->dma_lch_out.dma_conf.src_addr_width = in atmel_aes_crypt_dma()
326 } else if (dd->flags & AES_FLAGS_CFB16) { in atmel_aes_crypt_dma()
327 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_aes_crypt_dma()
329 dd->dma_lch_out.dma_conf.src_addr_width = in atmel_aes_crypt_dma()
332 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_aes_crypt_dma()
334 dd->dma_lch_out.dma_conf.src_addr_width = in atmel_aes_crypt_dma()
338 if (dd->flags & (AES_FLAGS_CFB8 | AES_FLAGS_CFB16 | in atmel_aes_crypt_dma()
340 dd->dma_lch_in.dma_conf.src_maxburst = 1; in atmel_aes_crypt_dma()
341 dd->dma_lch_in.dma_conf.dst_maxburst = 1; in atmel_aes_crypt_dma()
342 dd->dma_lch_out.dma_conf.src_maxburst = 1; in atmel_aes_crypt_dma()
343 dd->dma_lch_out.dma_conf.dst_maxburst = 1; in atmel_aes_crypt_dma()
345 dd->dma_lch_in.dma_conf.src_maxburst = dd->caps.max_burst_size; in atmel_aes_crypt_dma()
346 dd->dma_lch_in.dma_conf.dst_maxburst = dd->caps.max_burst_size; in atmel_aes_crypt_dma()
347 dd->dma_lch_out.dma_conf.src_maxburst = dd->caps.max_burst_size; in atmel_aes_crypt_dma()
348 dd->dma_lch_out.dma_conf.dst_maxburst = dd->caps.max_burst_size; in atmel_aes_crypt_dma()
351 dmaengine_slave_config(dd->dma_lch_in.chan, &dd->dma_lch_in.dma_conf); in atmel_aes_crypt_dma()
352 dmaengine_slave_config(dd->dma_lch_out.chan, &dd->dma_lch_out.dma_conf); in atmel_aes_crypt_dma()
354 dd->flags |= AES_FLAGS_DMA; in atmel_aes_crypt_dma()
364 in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, &sg[0], in atmel_aes_crypt_dma()
370 out_desc = dmaengine_prep_slave_sg(dd->dma_lch_out.chan, &sg[1], in atmel_aes_crypt_dma()
377 out_desc->callback_param = dd; in atmel_aes_crypt_dma()
380 dma_async_issue_pending(dd->dma_lch_out.chan); in atmel_aes_crypt_dma()
383 dma_async_issue_pending(dd->dma_lch_in.chan); in atmel_aes_crypt_dma()
388 static int atmel_aes_crypt_cpu_start(struct atmel_aes_dev *dd) in atmel_aes_crypt_cpu_start() argument
390 dd->flags &= ~AES_FLAGS_DMA; in atmel_aes_crypt_cpu_start()
393 dd->nb_in_sg = atmel_aes_sg_length(dd->req, dd->in_sg); in atmel_aes_crypt_cpu_start()
394 if (!dd->nb_in_sg) in atmel_aes_crypt_cpu_start()
397 dd->nb_out_sg = atmel_aes_sg_length(dd->req, dd->out_sg); in atmel_aes_crypt_cpu_start()
398 if (!dd->nb_out_sg) in atmel_aes_crypt_cpu_start()
401 dd->bufcnt = sg_copy_to_buffer(dd->in_sg, dd->nb_in_sg, in atmel_aes_crypt_cpu_start()
402 dd->buf_in, dd->total); in atmel_aes_crypt_cpu_start()
404 if (!dd->bufcnt) in atmel_aes_crypt_cpu_start()
407 dd->total -= dd->bufcnt; in atmel_aes_crypt_cpu_start()
409 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_crypt_cpu_start()
410 atmel_aes_write_n(dd, AES_IDATAR(0), (u32 *) dd->buf_in, in atmel_aes_crypt_cpu_start()
411 dd->bufcnt >> 2); in atmel_aes_crypt_cpu_start()
416 static int atmel_aes_crypt_dma_start(struct atmel_aes_dev *dd) in atmel_aes_crypt_dma_start() argument
422 if ((!dd->in_offset) && (!dd->out_offset)) { in atmel_aes_crypt_dma_start()
424 in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) && in atmel_aes_crypt_dma_start()
425 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); in atmel_aes_crypt_dma_start()
426 out = IS_ALIGNED((u32)dd->out_sg->offset, sizeof(u32)) && in atmel_aes_crypt_dma_start()
427 IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size); in atmel_aes_crypt_dma_start()
430 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_aes_crypt_dma_start()
436 count = min(dd->total, sg_dma_len(dd->in_sg)); in atmel_aes_crypt_dma_start()
437 count = min(count, sg_dma_len(dd->out_sg)); in atmel_aes_crypt_dma_start()
439 err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start()
441 dev_err(dd->dev, "dma_map_sg() error\n"); in atmel_aes_crypt_dma_start()
445 err = dma_map_sg(dd->dev, dd->out_sg, 1, in atmel_aes_crypt_dma_start()
448 dev_err(dd->dev, "dma_map_sg() error\n"); in atmel_aes_crypt_dma_start()
449 dma_unmap_sg(dd->dev, dd->in_sg, 1, in atmel_aes_crypt_dma_start()
454 addr_in = sg_dma_address(dd->in_sg); in atmel_aes_crypt_dma_start()
455 addr_out = sg_dma_address(dd->out_sg); in atmel_aes_crypt_dma_start()
457 dd->flags |= AES_FLAGS_FAST; in atmel_aes_crypt_dma_start()
461 count = atmel_aes_sg_copy(&dd->in_sg, &dd->in_offset, in atmel_aes_crypt_dma_start()
462 dd->buf_in, dd->buflen, dd->total, 0); in atmel_aes_crypt_dma_start()
464 addr_in = dd->dma_addr_in; in atmel_aes_crypt_dma_start()
465 addr_out = dd->dma_addr_out; in atmel_aes_crypt_dma_start()
467 dd->flags &= ~AES_FLAGS_FAST; in atmel_aes_crypt_dma_start()
470 dd->total -= count; in atmel_aes_crypt_dma_start()
472 err = atmel_aes_crypt_dma(dd, addr_in, addr_out, count); in atmel_aes_crypt_dma_start()
474 if (err && (dd->flags & AES_FLAGS_FAST)) { in atmel_aes_crypt_dma_start()
475 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start()
476 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start()
482 static int atmel_aes_write_ctrl(struct atmel_aes_dev *dd) in atmel_aes_write_ctrl() argument
487 err = atmel_aes_hw_init(dd); in atmel_aes_write_ctrl()
493 if (dd->ctx->keylen == AES_KEYSIZE_128) in atmel_aes_write_ctrl()
495 else if (dd->ctx->keylen == AES_KEYSIZE_192) in atmel_aes_write_ctrl()
500 if (dd->flags & AES_FLAGS_CBC) { in atmel_aes_write_ctrl()
502 } else if (dd->flags & AES_FLAGS_CFB) { in atmel_aes_write_ctrl()
504 if (dd->flags & AES_FLAGS_CFB8) in atmel_aes_write_ctrl()
506 else if (dd->flags & AES_FLAGS_CFB16) in atmel_aes_write_ctrl()
508 else if (dd->flags & AES_FLAGS_CFB32) in atmel_aes_write_ctrl()
510 else if (dd->flags & AES_FLAGS_CFB64) in atmel_aes_write_ctrl()
512 else if (dd->flags & AES_FLAGS_CFB128) in atmel_aes_write_ctrl()
514 } else if (dd->flags & AES_FLAGS_OFB) { in atmel_aes_write_ctrl()
516 } else if (dd->flags & AES_FLAGS_CTR) { in atmel_aes_write_ctrl()
522 if (dd->flags & AES_FLAGS_ENCRYPT) in atmel_aes_write_ctrl()
525 if (dd->total > ATMEL_AES_DMA_THRESHOLD) { in atmel_aes_write_ctrl()
527 if (dd->caps.has_dualbuff) in atmel_aes_write_ctrl()
533 atmel_aes_write(dd, AES_CR, valcr); in atmel_aes_write_ctrl()
534 atmel_aes_write(dd, AES_MR, valmr); in atmel_aes_write_ctrl()
536 atmel_aes_write_n(dd, AES_KEYWR(0), dd->ctx->key, in atmel_aes_write_ctrl()
537 dd->ctx->keylen >> 2); in atmel_aes_write_ctrl()
539 if (((dd->flags & AES_FLAGS_CBC) || (dd->flags & AES_FLAGS_CFB) || in atmel_aes_write_ctrl()
540 (dd->flags & AES_FLAGS_OFB) || (dd->flags & AES_FLAGS_CTR)) && in atmel_aes_write_ctrl()
541 dd->req->info) { in atmel_aes_write_ctrl()
542 atmel_aes_write_n(dd, AES_IVR(0), dd->req->info, 4); in atmel_aes_write_ctrl()
548 static int atmel_aes_handle_queue(struct atmel_aes_dev *dd, in atmel_aes_handle_queue() argument
557 spin_lock_irqsave(&dd->lock, flags); in atmel_aes_handle_queue()
559 ret = ablkcipher_enqueue_request(&dd->queue, req); in atmel_aes_handle_queue()
560 if (dd->flags & AES_FLAGS_BUSY) { in atmel_aes_handle_queue()
561 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
564 backlog = crypto_get_backlog(&dd->queue); in atmel_aes_handle_queue()
565 async_req = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
567 dd->flags |= AES_FLAGS_BUSY; in atmel_aes_handle_queue()
568 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
579 dd->req = req; in atmel_aes_handle_queue()
580 dd->total = req->nbytes; in atmel_aes_handle_queue()
581 dd->in_offset = 0; in atmel_aes_handle_queue()
582 dd->in_sg = req->src; in atmel_aes_handle_queue()
583 dd->out_offset = 0; in atmel_aes_handle_queue()
584 dd->out_sg = req->dst; in atmel_aes_handle_queue()
589 dd->flags = (dd->flags & ~AES_FLAGS_MODE_MASK) | rctx->mode; in atmel_aes_handle_queue()
590 dd->ctx = ctx; in atmel_aes_handle_queue()
591 ctx->dd = dd; in atmel_aes_handle_queue()
593 err = atmel_aes_write_ctrl(dd); in atmel_aes_handle_queue()
595 if (dd->total > ATMEL_AES_DMA_THRESHOLD) in atmel_aes_handle_queue()
596 err = atmel_aes_crypt_dma_start(dd); in atmel_aes_handle_queue()
598 err = atmel_aes_crypt_cpu_start(dd); in atmel_aes_handle_queue()
602 atmel_aes_finish_req(dd, err); in atmel_aes_handle_queue()
603 tasklet_schedule(&dd->queue_task); in atmel_aes_handle_queue()
609 static int atmel_aes_crypt_dma_stop(struct atmel_aes_dev *dd) in atmel_aes_crypt_dma_stop() argument
614 if (dd->flags & AES_FLAGS_DMA) { in atmel_aes_crypt_dma_stop()
616 if (dd->flags & AES_FLAGS_FAST) { in atmel_aes_crypt_dma_stop()
617 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); in atmel_aes_crypt_dma_stop()
618 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_stop()
620 dma_sync_single_for_device(dd->dev, dd->dma_addr_out, in atmel_aes_crypt_dma_stop()
621 dd->dma_size, DMA_FROM_DEVICE); in atmel_aes_crypt_dma_stop()
624 count = atmel_aes_sg_copy(&dd->out_sg, &dd->out_offset, in atmel_aes_crypt_dma_stop()
625 dd->buf_out, dd->buflen, dd->dma_size, 1); in atmel_aes_crypt_dma_stop()
626 if (count != dd->dma_size) { in atmel_aes_crypt_dma_stop()
637 static int atmel_aes_buff_init(struct atmel_aes_dev *dd) in atmel_aes_buff_init() argument
641 dd->buf_in = (void *)__get_free_pages(GFP_KERNEL, 0); in atmel_aes_buff_init()
642 dd->buf_out = (void *)__get_free_pages(GFP_KERNEL, 0); in atmel_aes_buff_init()
643 dd->buflen = PAGE_SIZE; in atmel_aes_buff_init()
644 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()
646 if (!dd->buf_in || !dd->buf_out) { in atmel_aes_buff_init()
647 dev_err(dd->dev, "unable to alloc pages.\n"); in atmel_aes_buff_init()
652 dd->dma_addr_in = dma_map_single(dd->dev, dd->buf_in, in atmel_aes_buff_init()
653 dd->buflen, DMA_TO_DEVICE); in atmel_aes_buff_init()
654 if (dma_mapping_error(dd->dev, dd->dma_addr_in)) { in atmel_aes_buff_init()
655 dev_err(dd->dev, "dma %d bytes error\n", dd->buflen); in atmel_aes_buff_init()
660 dd->dma_addr_out = dma_map_single(dd->dev, dd->buf_out, in atmel_aes_buff_init()
661 dd->buflen, DMA_FROM_DEVICE); in atmel_aes_buff_init()
662 if (dma_mapping_error(dd->dev, dd->dma_addr_out)) { in atmel_aes_buff_init()
663 dev_err(dd->dev, "dma %d bytes error\n", dd->buflen); in atmel_aes_buff_init()
671 dma_unmap_single(dd->dev, dd->dma_addr_in, dd->buflen, in atmel_aes_buff_init()
674 free_page((unsigned long)dd->buf_out); in atmel_aes_buff_init()
675 free_page((unsigned long)dd->buf_in); in atmel_aes_buff_init()
682 static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd) in atmel_aes_buff_cleanup() argument
684 dma_unmap_single(dd->dev, dd->dma_addr_out, dd->buflen, in atmel_aes_buff_cleanup()
686 dma_unmap_single(dd->dev, dd->dma_addr_in, dd->buflen, in atmel_aes_buff_cleanup()
688 free_page((unsigned long)dd->buf_out); in atmel_aes_buff_cleanup()
689 free_page((unsigned long)dd->buf_in); in atmel_aes_buff_cleanup()
697 struct atmel_aes_dev *dd; in atmel_aes_crypt() local
725 dd = atmel_aes_find_dev(ctx); in atmel_aes_crypt()
726 if (!dd) in atmel_aes_crypt()
731 return atmel_aes_handle_queue(dd, req); in atmel_aes_crypt()
746 static int atmel_aes_dma_init(struct atmel_aes_dev *dd, in atmel_aes_dma_init() argument
759 dd->dma_lch_in.chan = dma_request_channel(mask_in, in atmel_aes_dma_init()
762 if (!dd->dma_lch_in.chan) in atmel_aes_dma_init()
765 dd->dma_lch_in.dma_conf.direction = DMA_MEM_TO_DEV; in atmel_aes_dma_init()
766 dd->dma_lch_in.dma_conf.dst_addr = dd->phys_base + in atmel_aes_dma_init()
768 dd->dma_lch_in.dma_conf.src_maxburst = dd->caps.max_burst_size; in atmel_aes_dma_init()
769 dd->dma_lch_in.dma_conf.src_addr_width = in atmel_aes_dma_init()
771 dd->dma_lch_in.dma_conf.dst_maxburst = dd->caps.max_burst_size; in atmel_aes_dma_init()
772 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_aes_dma_init()
774 dd->dma_lch_in.dma_conf.device_fc = false; in atmel_aes_dma_init()
778 dd->dma_lch_out.chan = dma_request_channel(mask_out, in atmel_aes_dma_init()
781 if (!dd->dma_lch_out.chan) in atmel_aes_dma_init()
784 dd->dma_lch_out.dma_conf.direction = DMA_DEV_TO_MEM; in atmel_aes_dma_init()
785 dd->dma_lch_out.dma_conf.src_addr = dd->phys_base + in atmel_aes_dma_init()
787 dd->dma_lch_out.dma_conf.src_maxburst = dd->caps.max_burst_size; in atmel_aes_dma_init()
788 dd->dma_lch_out.dma_conf.src_addr_width = in atmel_aes_dma_init()
790 dd->dma_lch_out.dma_conf.dst_maxburst = dd->caps.max_burst_size; in atmel_aes_dma_init()
791 dd->dma_lch_out.dma_conf.dst_addr_width = in atmel_aes_dma_init()
793 dd->dma_lch_out.dma_conf.device_fc = false; in atmel_aes_dma_init()
801 dma_release_channel(dd->dma_lch_in.chan); in atmel_aes_dma_init()
806 static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd) in atmel_aes_dma_cleanup() argument
808 dma_release_channel(dd->dma_lch_in.chan); in atmel_aes_dma_cleanup()
809 dma_release_channel(dd->dma_lch_out.chan); in atmel_aes_dma_cleanup()
1142 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_queue_task() local
1144 atmel_aes_handle_queue(dd, NULL); in atmel_aes_queue_task()
1149 struct atmel_aes_dev *dd = (struct atmel_aes_dev *) data; in atmel_aes_done_task() local
1152 if (!(dd->flags & AES_FLAGS_DMA)) { in atmel_aes_done_task()
1153 atmel_aes_read_n(dd, AES_ODATAR(0), (u32 *) dd->buf_out, in atmel_aes_done_task()
1154 dd->bufcnt >> 2); in atmel_aes_done_task()
1156 if (sg_copy_from_buffer(dd->out_sg, dd->nb_out_sg, in atmel_aes_done_task()
1157 dd->buf_out, dd->bufcnt)) in atmel_aes_done_task()
1165 err = atmel_aes_crypt_dma_stop(dd); in atmel_aes_done_task()
1167 err = dd->err ? : err; in atmel_aes_done_task()
1169 if (dd->total && !err) { in atmel_aes_done_task()
1170 if (dd->flags & AES_FLAGS_FAST) { in atmel_aes_done_task()
1171 dd->in_sg = sg_next(dd->in_sg); in atmel_aes_done_task()
1172 dd->out_sg = sg_next(dd->out_sg); in atmel_aes_done_task()
1173 if (!dd->in_sg || !dd->out_sg) in atmel_aes_done_task()
1177 err = atmel_aes_crypt_dma_start(dd); in atmel_aes_done_task()
1183 atmel_aes_finish_req(dd, err); in atmel_aes_done_task()
1184 atmel_aes_handle_queue(dd, NULL); in atmel_aes_done_task()
1205 static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd) in atmel_aes_unregister_algs() argument
1211 if (dd->caps.has_cfb64) in atmel_aes_unregister_algs()
1215 static int atmel_aes_register_algs(struct atmel_aes_dev *dd) in atmel_aes_register_algs() argument
1225 if (dd->caps.has_cfb64) { in atmel_aes_register_algs()
1242 static void atmel_aes_get_cap(struct atmel_aes_dev *dd) in atmel_aes_get_cap() argument
1244 dd->caps.has_dualbuff = 0; in atmel_aes_get_cap()
1245 dd->caps.has_cfb64 = 0; in atmel_aes_get_cap()
1246 dd->caps.max_burst_size = 1; in atmel_aes_get_cap()
1249 switch (dd->hw_version & 0xff0) { in atmel_aes_get_cap()
1251 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
1252 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
1253 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
1258 dev_warn(dd->dev, in atmel_aes_get_cap()