• Home
  • Raw
  • Download

Lines Matching refs:hdev

85 	struct img_hash_dev	*hdev;  member
112 struct img_hash_dev *hdev; member
149 static inline u32 img_hash_read(struct img_hash_dev *hdev, u32 offset) in img_hash_read() argument
151 return readl_relaxed(hdev->io_base + offset); in img_hash_read()
154 static inline void img_hash_write(struct img_hash_dev *hdev, in img_hash_write() argument
157 writel_relaxed(value, hdev->io_base + offset); in img_hash_write()
160 static inline u32 img_hash_read_result_queue(struct img_hash_dev *hdev) in img_hash_read_result_queue() argument
162 return be32_to_cpu(img_hash_read(hdev, CR_RESULT_QUEUE)); in img_hash_read_result_queue()
165 static void img_hash_start(struct img_hash_dev *hdev, bool dma) in img_hash_start() argument
167 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_start()
178 dev_dbg(hdev->dev, "Starting hash process\n"); in img_hash_start()
179 img_hash_write(hdev, CR_CONTROL, cr); in img_hash_start()
188 img_hash_read(hdev, CR_CONTROL); in img_hash_start()
191 static int img_hash_xmit_cpu(struct img_hash_dev *hdev, const u8 *buf, in img_hash_xmit_cpu() argument
197 dev_dbg(hdev->dev, "xmit_cpu: length: %zu bytes\n", length); in img_hash_xmit_cpu()
200 hdev->flags |= DRIVER_FLAGS_FINAL; in img_hash_xmit_cpu()
205 writel_relaxed(buffer[count], hdev->cpu_addr); in img_hash_xmit_cpu()
212 struct img_hash_dev *hdev = (struct img_hash_dev *)data; in img_hash_dma_callback() local
213 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_dma_callback()
216 img_hash_xmit_cpu(hdev, ctx->buffer, ctx->bufcnt, 0); in img_hash_dma_callback()
220 tasklet_schedule(&hdev->dma_task); in img_hash_dma_callback()
223 static int img_hash_xmit_dma(struct img_hash_dev *hdev, struct scatterlist *sg) in img_hash_xmit_dma() argument
226 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_xmit_dma()
228 ctx->dma_ct = dma_map_sg(hdev->dev, sg, 1, DMA_TO_DEVICE); in img_hash_xmit_dma()
230 dev_err(hdev->dev, "Invalid DMA sg\n"); in img_hash_xmit_dma()
231 hdev->err = -EINVAL; in img_hash_xmit_dma()
235 desc = dmaengine_prep_slave_sg(hdev->dma_lch, in img_hash_xmit_dma()
241 dev_err(hdev->dev, "Null DMA descriptor\n"); in img_hash_xmit_dma()
242 hdev->err = -EINVAL; in img_hash_xmit_dma()
243 dma_unmap_sg(hdev->dev, sg, 1, DMA_TO_DEVICE); in img_hash_xmit_dma()
247 desc->callback_param = hdev; in img_hash_xmit_dma()
249 dma_async_issue_pending(hdev->dma_lch); in img_hash_xmit_dma()
254 static int img_hash_write_via_cpu(struct img_hash_dev *hdev) in img_hash_write_via_cpu() argument
256 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_write_via_cpu()
258 ctx->bufcnt = sg_copy_to_buffer(hdev->req->src, sg_nents(ctx->sg), in img_hash_write_via_cpu()
259 ctx->buffer, hdev->req->nbytes); in img_hash_write_via_cpu()
261 ctx->total = hdev->req->nbytes; in img_hash_write_via_cpu()
264 hdev->flags |= (DRIVER_FLAGS_CPU | DRIVER_FLAGS_FINAL); in img_hash_write_via_cpu()
266 img_hash_start(hdev, false); in img_hash_write_via_cpu()
268 return img_hash_xmit_cpu(hdev, ctx->buffer, ctx->total, 1); in img_hash_write_via_cpu()
290 hash[i] = img_hash_read_result_queue(ctx->hdev); in img_hash_copy_hash()
296 struct img_hash_dev *hdev = ctx->hdev; in img_hash_finish_req() local
300 if (DRIVER_FLAGS_FINAL & hdev->flags) in img_hash_finish_req()
303 dev_warn(hdev->dev, "Hash failed with error %d\n", err); in img_hash_finish_req()
307 hdev->flags &= ~(DRIVER_FLAGS_DMA_READY | DRIVER_FLAGS_OUTPUT_READY | in img_hash_finish_req()
314 static int img_hash_write_via_dma(struct img_hash_dev *hdev) in img_hash_write_via_dma() argument
316 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_write_via_dma()
318 img_hash_start(hdev, true); in img_hash_write_via_dma()
320 dev_dbg(hdev->dev, "xmit dma size: %d\n", ctx->total); in img_hash_write_via_dma()
323 hdev->flags |= DRIVER_FLAGS_FINAL; in img_hash_write_via_dma()
325 hdev->flags |= DRIVER_FLAGS_DMA_ACTIVE | DRIVER_FLAGS_FINAL; in img_hash_write_via_dma()
327 tasklet_schedule(&hdev->dma_task); in img_hash_write_via_dma()
332 static int img_hash_dma_init(struct img_hash_dev *hdev) in img_hash_dma_init() argument
337 hdev->dma_lch = dma_request_chan(hdev->dev, "tx"); in img_hash_dma_init()
338 if (IS_ERR(hdev->dma_lch)) { in img_hash_dma_init()
339 dev_err(hdev->dev, "Couldn't acquire a slave DMA channel.\n"); in img_hash_dma_init()
340 return PTR_ERR(hdev->dma_lch); in img_hash_dma_init()
343 dma_conf.dst_addr = hdev->bus_addr; in img_hash_dma_init()
348 err = dmaengine_slave_config(hdev->dma_lch, &dma_conf); in img_hash_dma_init()
350 dev_err(hdev->dev, "Couldn't configure DMA slave.\n"); in img_hash_dma_init()
351 dma_release_channel(hdev->dma_lch); in img_hash_dma_init()
360 struct img_hash_dev *hdev = (struct img_hash_dev *)d; in img_hash_dma_task() local
366 if (!hdev->req) in img_hash_dma_task()
369 ctx = ahash_request_ctx(hdev->req); in img_hash_dma_task()
391 if (img_hash_xmit_dma(hdev, &tsg)) { in img_hash_dma_task()
392 dev_err(hdev->dev, "DMA failed, falling back to CPU"); in img_hash_dma_task()
394 hdev->err = 0; in img_hash_dma_task()
395 img_hash_xmit_cpu(hdev, addr + ctx->offset, in img_hash_dma_task()
427 img_hash_dma_callback(hdev); in img_hash_dma_task()
434 static int img_hash_write_via_dma_stop(struct img_hash_dev *hdev) in img_hash_write_via_dma_stop() argument
436 struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req); in img_hash_write_via_dma_stop()
439 dma_unmap_sg(hdev->dev, ctx->sg, ctx->dma_ct, DMA_TO_DEVICE); in img_hash_write_via_dma_stop()
444 static int img_hash_process_data(struct img_hash_dev *hdev) in img_hash_process_data() argument
446 struct ahash_request *req = hdev->req; in img_hash_process_data()
453 dev_dbg(hdev->dev, "process data request(%d bytes) using DMA\n", in img_hash_process_data()
455 err = img_hash_write_via_dma(hdev); in img_hash_process_data()
457 dev_dbg(hdev->dev, "process data request(%d bytes) using CPU\n", in img_hash_process_data()
459 err = img_hash_write_via_cpu(hdev); in img_hash_process_data()
464 static int img_hash_hw_init(struct img_hash_dev *hdev) in img_hash_hw_init() argument
469 img_hash_write(hdev, CR_RESET, CR_RESET_SET); in img_hash_hw_init()
470 img_hash_write(hdev, CR_RESET, CR_RESET_UNSET); in img_hash_hw_init()
471 img_hash_write(hdev, CR_INTENAB, CR_INT_NEW_RESULTS_SET); in img_hash_hw_init()
473 nbits = (u64)hdev->req->nbytes << 3; in img_hash_hw_init()
476 img_hash_write(hdev, CR_MESSAGE_LENGTH_H, u); in img_hash_hw_init()
477 img_hash_write(hdev, CR_MESSAGE_LENGTH_L, l); in img_hash_hw_init()
479 if (!(DRIVER_FLAGS_INIT & hdev->flags)) { in img_hash_hw_init()
480 hdev->flags |= DRIVER_FLAGS_INIT; in img_hash_hw_init()
481 hdev->err = 0; in img_hash_hw_init()
483 dev_dbg(hdev->dev, "hw initialized, nbits: %llx\n", nbits); in img_hash_hw_init()
500 static int img_hash_handle_queue(struct img_hash_dev *hdev, in img_hash_handle_queue() argument
508 spin_lock_irqsave(&hdev->lock, flags); in img_hash_handle_queue()
511 res = ahash_enqueue_request(&hdev->queue, req); in img_hash_handle_queue()
513 if (DRIVER_FLAGS_BUSY & hdev->flags) { in img_hash_handle_queue()
514 spin_unlock_irqrestore(&hdev->lock, flags); in img_hash_handle_queue()
518 backlog = crypto_get_backlog(&hdev->queue); in img_hash_handle_queue()
519 async_req = crypto_dequeue_request(&hdev->queue); in img_hash_handle_queue()
521 hdev->flags |= DRIVER_FLAGS_BUSY; in img_hash_handle_queue()
523 spin_unlock_irqrestore(&hdev->lock, flags); in img_hash_handle_queue()
532 hdev->req = req; in img_hash_handle_queue()
536 dev_info(hdev->dev, "processing req, op: %lu, bytes: %d\n", in img_hash_handle_queue()
539 err = img_hash_hw_init(hdev); in img_hash_handle_queue()
542 err = img_hash_process_data(hdev); in img_hash_handle_queue()
627 struct img_hash_dev *hdev = NULL; in img_hash_digest() local
632 if (!tctx->hdev) { in img_hash_digest()
634 hdev = tmp; in img_hash_digest()
637 tctx->hdev = hdev; in img_hash_digest()
640 hdev = tctx->hdev; in img_hash_digest()
644 ctx->hdev = hdev; in img_hash_digest()
673 err = img_hash_handle_queue(tctx->hdev, req); in img_hash_digest()
730 struct img_hash_dev *hdev = dev_id; in img_irq_handler() local
733 reg = img_hash_read(hdev, CR_INTSTAT); in img_irq_handler()
734 img_hash_write(hdev, CR_INTCLEAR, reg); in img_irq_handler()
737 dev_dbg(hdev->dev, "IRQ CR_INT_NEW_RESULTS_SET\n"); in img_irq_handler()
738 if (DRIVER_FLAGS_BUSY & hdev->flags) { in img_irq_handler()
739 hdev->flags |= DRIVER_FLAGS_OUTPUT_READY; in img_irq_handler()
740 if (!(DRIVER_FLAGS_CPU & hdev->flags)) in img_irq_handler()
741 hdev->flags |= DRIVER_FLAGS_DMA_READY; in img_irq_handler()
742 tasklet_schedule(&hdev->done_task); in img_irq_handler()
744 dev_warn(hdev->dev, in img_irq_handler()
748 dev_warn(hdev->dev, in img_irq_handler()
751 dev_warn(hdev->dev, in img_irq_handler()
754 dev_warn(hdev->dev, in img_irq_handler()
867 static int img_register_algs(struct img_hash_dev *hdev) in img_register_algs() argument
885 static int img_unregister_algs(struct img_hash_dev *hdev) in img_unregister_algs() argument
896 struct img_hash_dev *hdev = (struct img_hash_dev *)data; in img_hash_done_task() local
899 if (hdev->err == -EINVAL) { in img_hash_done_task()
900 err = hdev->err; in img_hash_done_task()
904 if (!(DRIVER_FLAGS_BUSY & hdev->flags)) { in img_hash_done_task()
905 img_hash_handle_queue(hdev, NULL); in img_hash_done_task()
909 if (DRIVER_FLAGS_CPU & hdev->flags) { in img_hash_done_task()
910 if (DRIVER_FLAGS_OUTPUT_READY & hdev->flags) { in img_hash_done_task()
911 hdev->flags &= ~DRIVER_FLAGS_OUTPUT_READY; in img_hash_done_task()
914 } else if (DRIVER_FLAGS_DMA_READY & hdev->flags) { in img_hash_done_task()
915 if (DRIVER_FLAGS_DMA_ACTIVE & hdev->flags) { in img_hash_done_task()
916 hdev->flags &= ~DRIVER_FLAGS_DMA_ACTIVE; in img_hash_done_task()
917 img_hash_write_via_dma_stop(hdev); in img_hash_done_task()
918 if (hdev->err) { in img_hash_done_task()
919 err = hdev->err; in img_hash_done_task()
923 if (DRIVER_FLAGS_OUTPUT_READY & hdev->flags) { in img_hash_done_task()
924 hdev->flags &= ~(DRIVER_FLAGS_DMA_READY | in img_hash_done_task()
932 img_hash_finish_req(hdev->req, err); in img_hash_done_task()
943 struct img_hash_dev *hdev; in img_hash_probe() local
949 hdev = devm_kzalloc(dev, sizeof(*hdev), GFP_KERNEL); in img_hash_probe()
950 if (hdev == NULL) in img_hash_probe()
953 spin_lock_init(&hdev->lock); in img_hash_probe()
955 hdev->dev = dev; in img_hash_probe()
957 platform_set_drvdata(pdev, hdev); in img_hash_probe()
959 INIT_LIST_HEAD(&hdev->list); in img_hash_probe()
961 tasklet_init(&hdev->done_task, img_hash_done_task, (unsigned long)hdev); in img_hash_probe()
962 tasklet_init(&hdev->dma_task, img_hash_dma_task, (unsigned long)hdev); in img_hash_probe()
964 crypto_init_queue(&hdev->queue, IMG_HASH_QUEUE_LENGTH); in img_hash_probe()
967 hdev->io_base = devm_platform_ioremap_resource(pdev, 0); in img_hash_probe()
968 if (IS_ERR(hdev->io_base)) { in img_hash_probe()
969 err = PTR_ERR(hdev->io_base); in img_hash_probe()
975 hdev->cpu_addr = devm_ioremap_resource(dev, hash_res); in img_hash_probe()
976 if (IS_ERR(hdev->cpu_addr)) { in img_hash_probe()
977 err = PTR_ERR(hdev->cpu_addr); in img_hash_probe()
980 hdev->bus_addr = hash_res->start; in img_hash_probe()
989 dev_name(dev), hdev); in img_hash_probe()
996 hdev->hash_clk = devm_clk_get(&pdev->dev, "hash"); in img_hash_probe()
997 if (IS_ERR(hdev->hash_clk)) { in img_hash_probe()
999 err = PTR_ERR(hdev->hash_clk); in img_hash_probe()
1003 hdev->sys_clk = devm_clk_get(&pdev->dev, "sys"); in img_hash_probe()
1004 if (IS_ERR(hdev->sys_clk)) { in img_hash_probe()
1006 err = PTR_ERR(hdev->sys_clk); in img_hash_probe()
1010 err = clk_prepare_enable(hdev->hash_clk); in img_hash_probe()
1014 err = clk_prepare_enable(hdev->sys_clk); in img_hash_probe()
1018 err = img_hash_dma_init(hdev); in img_hash_probe()
1023 dma_chan_name(hdev->dma_lch)); in img_hash_probe()
1026 list_add_tail(&hdev->list, &img_hash.dev_list); in img_hash_probe()
1029 err = img_register_algs(hdev); in img_hash_probe()
1038 list_del(&hdev->list); in img_hash_probe()
1040 dma_release_channel(hdev->dma_lch); in img_hash_probe()
1042 clk_disable_unprepare(hdev->sys_clk); in img_hash_probe()
1044 clk_disable_unprepare(hdev->hash_clk); in img_hash_probe()
1046 tasklet_kill(&hdev->done_task); in img_hash_probe()
1047 tasklet_kill(&hdev->dma_task); in img_hash_probe()
1054 struct img_hash_dev *hdev; in img_hash_remove() local
1056 hdev = platform_get_drvdata(pdev); in img_hash_remove()
1058 list_del(&hdev->list); in img_hash_remove()
1061 img_unregister_algs(hdev); in img_hash_remove()
1063 tasklet_kill(&hdev->done_task); in img_hash_remove()
1064 tasklet_kill(&hdev->dma_task); in img_hash_remove()
1066 dma_release_channel(hdev->dma_lch); in img_hash_remove()
1068 clk_disable_unprepare(hdev->hash_clk); in img_hash_remove()
1069 clk_disable_unprepare(hdev->sys_clk); in img_hash_remove()
1077 struct img_hash_dev *hdev = dev_get_drvdata(dev); in img_hash_suspend() local
1079 clk_disable_unprepare(hdev->hash_clk); in img_hash_suspend()
1080 clk_disable_unprepare(hdev->sys_clk); in img_hash_suspend()
1087 struct img_hash_dev *hdev = dev_get_drvdata(dev); in img_hash_resume() local
1090 ret = clk_prepare_enable(hdev->hash_clk); in img_hash_resume()
1094 ret = clk_prepare_enable(hdev->sys_clk); in img_hash_resume()
1096 clk_disable_unprepare(hdev->hash_clk); in img_hash_resume()