Lines Matching refs:imxdma
148 struct imxdma_engine *imxdma; member
205 static inline int is_imx1_dma(struct imxdma_engine *imxdma) in is_imx1_dma() argument
207 return imxdma->devtype == IMX1_DMA; in is_imx1_dma()
210 static inline int is_imx21_dma(struct imxdma_engine *imxdma) in is_imx21_dma() argument
212 return imxdma->devtype == IMX21_DMA; in is_imx21_dma()
215 static inline int is_imx27_dma(struct imxdma_engine *imxdma) in is_imx27_dma() argument
217 return imxdma->devtype == IMX27_DMA; in is_imx27_dma()
240 static void imx_dmav1_writel(struct imxdma_engine *imxdma, unsigned val, in imx_dmav1_writel() argument
243 __raw_writel(val, imxdma->base + offset); in imx_dmav1_writel()
246 static unsigned imx_dmav1_readl(struct imxdma_engine *imxdma, unsigned offset) in imx_dmav1_readl() argument
248 return __raw_readl(imxdma->base + offset); in imx_dmav1_readl()
253 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_hw_chain() local
255 if (is_imx27_dma(imxdma)) in imxdma_hw_chain()
267 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_sg_next() local
276 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
279 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
282 imx_dmav1_writel(imxdma, now, DMA_CNTR(imxdmac->channel)); in imxdma_sg_next()
284 dev_dbg(imxdma->dev, " %s channel: %d dst 0x%08x, src 0x%08x, " in imxdma_sg_next()
286 imx_dmav1_readl(imxdma, DMA_DAR(imxdmac->channel)), in imxdma_sg_next()
287 imx_dmav1_readl(imxdma, DMA_SAR(imxdmac->channel)), in imxdma_sg_next()
288 imx_dmav1_readl(imxdma, DMA_CNTR(imxdmac->channel))); in imxdma_sg_next()
296 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_enable_hw() local
300 dev_dbg(imxdma->dev, "%s channel %d\n", __func__, channel); in imxdma_enable_hw()
304 imx_dmav1_writel(imxdma, 1 << channel, DMA_DISR); in imxdma_enable_hw()
305 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_DIMR) & in imxdma_enable_hw()
307 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_CCR(channel)) | in imxdma_enable_hw()
310 if (!is_imx1_dma(imxdma) && in imxdma_enable_hw()
316 tmp = imx_dmav1_readl(imxdma, DMA_CCR(channel)); in imxdma_enable_hw()
317 imx_dmav1_writel(imxdma, tmp | CCR_RPT | CCR_ACRPT, in imxdma_enable_hw()
327 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_disable_hw() local
331 dev_dbg(imxdma->dev, "%s channel %d\n", __func__, channel); in imxdma_disable_hw()
337 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_DIMR) | in imxdma_disable_hw()
339 imx_dmav1_writel(imxdma, imx_dmav1_readl(imxdma, DMA_CCR(channel)) & in imxdma_disable_hw()
341 imx_dmav1_writel(imxdma, 1 << channel, DMA_DISR); in imxdma_disable_hw()
348 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_watchdog() local
351 imx_dmav1_writel(imxdma, 0, DMA_CCR(channel)); in imxdma_watchdog()
355 dev_dbg(imxdma->dev, "channel %d: watchdog timeout!\n", in imxdma_watchdog()
361 struct imxdma_engine *imxdma = dev_id; in imxdma_err_handler() local
366 disr = imx_dmav1_readl(imxdma, DMA_DISR); in imxdma_err_handler()
368 err_mask = imx_dmav1_readl(imxdma, DMA_DBTOSR) | in imxdma_err_handler()
369 imx_dmav1_readl(imxdma, DMA_DRTOSR) | in imxdma_err_handler()
370 imx_dmav1_readl(imxdma, DMA_DSESR) | in imxdma_err_handler()
371 imx_dmav1_readl(imxdma, DMA_DBOSR); in imxdma_err_handler()
376 imx_dmav1_writel(imxdma, disr & err_mask, DMA_DISR); in imxdma_err_handler()
383 if (imx_dmav1_readl(imxdma, DMA_DBTOSR) & (1 << i)) { in imxdma_err_handler()
384 imx_dmav1_writel(imxdma, 1 << i, DMA_DBTOSR); in imxdma_err_handler()
387 if (imx_dmav1_readl(imxdma, DMA_DRTOSR) & (1 << i)) { in imxdma_err_handler()
388 imx_dmav1_writel(imxdma, 1 << i, DMA_DRTOSR); in imxdma_err_handler()
391 if (imx_dmav1_readl(imxdma, DMA_DSESR) & (1 << i)) { in imxdma_err_handler()
392 imx_dmav1_writel(imxdma, 1 << i, DMA_DSESR); in imxdma_err_handler()
395 if (imx_dmav1_readl(imxdma, DMA_DBOSR) & (1 << i)) { in imxdma_err_handler()
396 imx_dmav1_writel(imxdma, 1 << i, DMA_DBOSR); in imxdma_err_handler()
400 tasklet_schedule(&imxdma->channel[i].dma_tasklet); in imxdma_err_handler()
414 struct imxdma_engine *imxdma = imxdmac->imxdma; in dma_irq_handle_channel() local
418 spin_lock(&imxdma->lock); in dma_irq_handle_channel()
420 spin_unlock(&imxdma->lock); in dma_irq_handle_channel()
427 spin_unlock(&imxdma->lock); in dma_irq_handle_channel()
436 tmp = imx_dmav1_readl(imxdma, DMA_CCR(chno)); in dma_irq_handle_channel()
446 imx_dmav1_writel(imxdma, tmp, DMA_CCR(chno)); in dma_irq_handle_channel()
448 imx_dmav1_writel(imxdma, tmp & ~CCR_CEN, in dma_irq_handle_channel()
453 imx_dmav1_writel(imxdma, tmp, DMA_CCR(chno)); in dma_irq_handle_channel()
469 imx_dmav1_writel(imxdma, 0, DMA_CCR(chno)); in dma_irq_handle_channel()
476 struct imxdma_engine *imxdma = dev_id; in dma_irq_handler() local
479 if (!is_imx1_dma(imxdma)) in dma_irq_handler()
482 disr = imx_dmav1_readl(imxdma, DMA_DISR); in dma_irq_handler()
484 dev_dbg(imxdma->dev, "%s called, disr=0x%08x\n", __func__, disr); in dma_irq_handler()
486 imx_dmav1_writel(imxdma, disr, DMA_DISR); in dma_irq_handler()
489 dma_irq_handle_channel(&imxdma->channel[i]); in dma_irq_handler()
498 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_xfer_desc() local
507 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_xfer_desc()
509 if ((imxdma->slots_2d[i].count > 0) && in imxdma_xfer_desc()
510 ((imxdma->slots_2d[i].xsr != d->x) || in imxdma_xfer_desc()
511 (imxdma->slots_2d[i].ysr != d->y) || in imxdma_xfer_desc()
512 (imxdma->slots_2d[i].wsr != d->w))) in imxdma_xfer_desc()
518 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_xfer_desc()
522 imxdma->slots_2d[slot].xsr = d->x; in imxdma_xfer_desc()
523 imxdma->slots_2d[slot].ysr = d->y; in imxdma_xfer_desc()
524 imxdma->slots_2d[slot].wsr = d->w; in imxdma_xfer_desc()
525 imxdma->slots_2d[slot].count++; in imxdma_xfer_desc()
529 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_xfer_desc()
534 imx_dmav1_writel(imxdma, d->x, DMA_XSRA); in imxdma_xfer_desc()
535 imx_dmav1_writel(imxdma, d->y, DMA_YSRA); in imxdma_xfer_desc()
536 imx_dmav1_writel(imxdma, d->w, DMA_WSRA); in imxdma_xfer_desc()
540 imx_dmav1_writel(imxdma, d->x, DMA_XSRB); in imxdma_xfer_desc()
541 imx_dmav1_writel(imxdma, d->y, DMA_YSRB); in imxdma_xfer_desc()
542 imx_dmav1_writel(imxdma, d->w, DMA_WSRB); in imxdma_xfer_desc()
549 imx_dmav1_writel(imxdma, d->src, DMA_SAR(imxdmac->channel)); in imxdma_xfer_desc()
550 imx_dmav1_writel(imxdma, d->dest, DMA_DAR(imxdmac->channel)); in imxdma_xfer_desc()
551 imx_dmav1_writel(imxdma, d->config_mem | (d->config_port << 2), in imxdma_xfer_desc()
554 imx_dmav1_writel(imxdma, d->len, DMA_CNTR(imxdmac->channel)); in imxdma_xfer_desc()
556 dev_dbg(imxdma->dev, "%s channel: %d dest=0x%08x src=0x%08x " in imxdma_xfer_desc()
565 imx_dmav1_writel(imxdma, imxdmac->per_address, in imxdma_xfer_desc()
567 imx_dmav1_writel(imxdma, imxdmac->ccr_from_device, in imxdma_xfer_desc()
570 dev_dbg(imxdma->dev, "%s channel: %d sg=%p sgcount=%d " in imxdma_xfer_desc()
575 imx_dmav1_writel(imxdma, imxdmac->per_address, in imxdma_xfer_desc()
577 imx_dmav1_writel(imxdma, imxdmac->ccr_to_device, in imxdma_xfer_desc()
580 dev_dbg(imxdma->dev, "%s channel: %d sg=%p sgcount=%d " in imxdma_xfer_desc()
585 dev_err(imxdma->dev, "%s channel: %d bad dma mode\n", in imxdma_xfer_desc()
603 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_tasklet() local
606 spin_lock(&imxdma->lock); in imxdma_tasklet()
628 imxdma->slots_2d[imxdmac->slot_2d].count--; in imxdma_tasklet()
639 dev_warn(imxdma->dev, "%s: channel: %d couldn't xfer desc\n", in imxdma_tasklet()
643 spin_unlock(&imxdma->lock); in imxdma_tasklet()
651 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_control() local
659 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_control()
662 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_control()
696 imx_dmav1_writel(imxdma, imxdmac->dma_request, in imxdma_control()
700 imx_dmav1_writel(imxdma, imxdmac->watermark_level * in imxdma_control()
721 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_tx_submit() local
725 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_tx_submit()
728 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_tx_submit()
767 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_free_chan_resources() local
771 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_free_chan_resources()
777 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_free_chan_resources()
848 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_cyclic() local
853 dev_dbg(imxdma->dev, "%s channel: %d buf_len=%d period_len=%d\n", in imxdma_prep_dma_cyclic()
906 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_memcpy() local
909 dev_dbg(imxdma->dev, "%s channel: %d src=0x%x dst=0x%x len=%d\n", in imxdma_prep_dma_memcpy()
936 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_interleaved() local
939 dev_dbg(imxdma->dev, "%s channel: %d src_start=0x%x dst_start=0x%x\n" in imxdma_prep_dma_interleaved()
977 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_issue_pending() local
981 spin_lock_irqsave(&imxdma->lock, flags); in imxdma_issue_pending()
988 dev_warn(imxdma->dev, in imxdma_issue_pending()
996 spin_unlock_irqrestore(&imxdma->lock, flags); in imxdma_issue_pending()
1001 struct imxdma_engine *imxdma; in imxdma_probe() local
1006 imxdma = devm_kzalloc(&pdev->dev, sizeof(*imxdma), GFP_KERNEL); in imxdma_probe()
1007 if (!imxdma) in imxdma_probe()
1010 imxdma->devtype = pdev->id_entry->driver_data; in imxdma_probe()
1013 imxdma->base = devm_ioremap_resource(&pdev->dev, res); in imxdma_probe()
1014 if (IS_ERR(imxdma->base)) in imxdma_probe()
1015 return PTR_ERR(imxdma->base); in imxdma_probe()
1021 imxdma->dma_ipg = devm_clk_get(&pdev->dev, "ipg"); in imxdma_probe()
1022 if (IS_ERR(imxdma->dma_ipg)) in imxdma_probe()
1023 return PTR_ERR(imxdma->dma_ipg); in imxdma_probe()
1025 imxdma->dma_ahb = devm_clk_get(&pdev->dev, "ahb"); in imxdma_probe()
1026 if (IS_ERR(imxdma->dma_ahb)) in imxdma_probe()
1027 return PTR_ERR(imxdma->dma_ahb); in imxdma_probe()
1029 clk_prepare_enable(imxdma->dma_ipg); in imxdma_probe()
1030 clk_prepare_enable(imxdma->dma_ahb); in imxdma_probe()
1033 imx_dmav1_writel(imxdma, DCR_DRST, DMA_DCR); in imxdma_probe()
1035 if (is_imx1_dma(imxdma)) { in imxdma_probe()
1037 dma_irq_handler, 0, "DMA", imxdma); in imxdma_probe()
1039 dev_warn(imxdma->dev, "Can't register IRQ for DMA\n"); in imxdma_probe()
1050 imxdma_err_handler, 0, "DMA", imxdma); in imxdma_probe()
1052 dev_warn(imxdma->dev, "Can't register ERRIRQ for DMA\n"); in imxdma_probe()
1058 imx_dmav1_writel(imxdma, DCR_DEN, DMA_DCR); in imxdma_probe()
1061 imx_dmav1_writel(imxdma, (1 << IMX_DMA_CHANNELS) - 1, DMA_DISR); in imxdma_probe()
1064 imx_dmav1_writel(imxdma, (1 << IMX_DMA_CHANNELS) - 1, DMA_DIMR); in imxdma_probe()
1066 INIT_LIST_HEAD(&imxdma->dma_device.channels); in imxdma_probe()
1068 dma_cap_set(DMA_SLAVE, imxdma->dma_device.cap_mask); in imxdma_probe()
1069 dma_cap_set(DMA_CYCLIC, imxdma->dma_device.cap_mask); in imxdma_probe()
1070 dma_cap_set(DMA_MEMCPY, imxdma->dma_device.cap_mask); in imxdma_probe()
1071 dma_cap_set(DMA_INTERLEAVE, imxdma->dma_device.cap_mask); in imxdma_probe()
1075 imxdma->slots_2d[i].count = 0; in imxdma_probe()
1077 spin_lock_init(&imxdma->lock); in imxdma_probe()
1081 struct imxdma_channel *imxdmac = &imxdma->channel[i]; in imxdma_probe()
1083 if (!is_imx1_dma(imxdma)) { in imxdma_probe()
1085 dma_irq_handler, 0, "DMA", imxdma); in imxdma_probe()
1087 dev_warn(imxdma->dev, "Can't register IRQ %d " in imxdma_probe()
1097 imxdmac->imxdma = imxdma; in imxdma_probe()
1105 imxdmac->chan.device = &imxdma->dma_device; in imxdma_probe()
1111 &imxdma->dma_device.channels); in imxdma_probe()
1114 imxdma->dev = &pdev->dev; in imxdma_probe()
1115 imxdma->dma_device.dev = &pdev->dev; in imxdma_probe()
1117 imxdma->dma_device.device_alloc_chan_resources = imxdma_alloc_chan_resources; in imxdma_probe()
1118 imxdma->dma_device.device_free_chan_resources = imxdma_free_chan_resources; in imxdma_probe()
1119 imxdma->dma_device.device_tx_status = imxdma_tx_status; in imxdma_probe()
1120 imxdma->dma_device.device_prep_slave_sg = imxdma_prep_slave_sg; in imxdma_probe()
1121 imxdma->dma_device.device_prep_dma_cyclic = imxdma_prep_dma_cyclic; in imxdma_probe()
1122 imxdma->dma_device.device_prep_dma_memcpy = imxdma_prep_dma_memcpy; in imxdma_probe()
1123 imxdma->dma_device.device_prep_interleaved_dma = imxdma_prep_dma_interleaved; in imxdma_probe()
1124 imxdma->dma_device.device_control = imxdma_control; in imxdma_probe()
1125 imxdma->dma_device.device_issue_pending = imxdma_issue_pending; in imxdma_probe()
1127 platform_set_drvdata(pdev, imxdma); in imxdma_probe()
1129 imxdma->dma_device.copy_align = 2; /* 2^2 = 4 bytes alignment */ in imxdma_probe()
1130 imxdma->dma_device.dev->dma_parms = &imxdma->dma_parms; in imxdma_probe()
1131 dma_set_max_seg_size(imxdma->dma_device.dev, 0xffffff); in imxdma_probe()
1133 ret = dma_async_device_register(&imxdma->dma_device); in imxdma_probe()
1142 clk_disable_unprepare(imxdma->dma_ipg); in imxdma_probe()
1143 clk_disable_unprepare(imxdma->dma_ahb); in imxdma_probe()
1149 struct imxdma_engine *imxdma = platform_get_drvdata(pdev); in imxdma_remove() local
1151 dma_async_device_unregister(&imxdma->dma_device); in imxdma_remove()
1153 clk_disable_unprepare(imxdma->dma_ipg); in imxdma_remove()
1154 clk_disable_unprepare(imxdma->dma_ahb); in imxdma_remove()