Lines Matching full:od
246 static void dma_update(struct owl_dma *od, u32 reg, u32 val, bool state) in dma_update() argument
250 regval = readl(od->base + reg); in dma_update()
257 writel(val, od->base + reg); in dma_update()
260 static void dma_writel(struct owl_dma *od, u32 reg, u32 data) in dma_writel() argument
262 writel(data, od->base + reg); in dma_writel()
265 static u32 dma_readl(struct owl_dma *od, u32 reg) in dma_readl() argument
267 return readl(od->base + reg); in dma_readl()
313 static void owl_dma_free_lli(struct owl_dma *od, in owl_dma_free_lli() argument
317 dma_pool_free(od->lli_pool, lli, lli->phys); in owl_dma_free_lli()
320 static struct owl_dma_lli *owl_dma_alloc_lli(struct owl_dma *od) in owl_dma_alloc_lli() argument
325 lli = dma_pool_alloc(od->lli_pool, GFP_NOWAIT, &phys); in owl_dma_alloc_lli()
387 static struct owl_dma_pchan *owl_dma_get_pchan(struct owl_dma *od, in owl_dma_get_pchan() argument
394 for (i = 0; i < od->nr_pchans; i++) { in owl_dma_get_pchan()
395 pchan = &od->pchans[i]; in owl_dma_get_pchan()
397 spin_lock_irqsave(&od->lock, flags); in owl_dma_get_pchan()
400 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_get_pchan()
404 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_get_pchan()
410 static int owl_dma_pchan_busy(struct owl_dma *od, struct owl_dma_pchan *pchan) in owl_dma_pchan_busy() argument
414 val = dma_readl(od, OWL_DMA_IDLE_STAT); in owl_dma_pchan_busy()
419 static void owl_dma_terminate_pchan(struct owl_dma *od, in owl_dma_terminate_pchan() argument
428 spin_lock_irqsave(&od->lock, flags); in owl_dma_terminate_pchan()
429 dma_update(od, OWL_DMA_IRQ_EN0, (1 << pchan->id), false); in owl_dma_terminate_pchan()
431 irq_pd = dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_terminate_pchan()
433 dev_warn(od->dma.dev, in owl_dma_terminate_pchan()
436 dma_writel(od, OWL_DMA_IRQ_PD0, (1 << pchan->id)); in owl_dma_terminate_pchan()
441 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_terminate_pchan()
446 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_start_next_txd() local
459 while (owl_dma_pchan_busy(od, pchan)) in owl_dma_start_next_txd()
476 spin_lock_irqsave(&od->lock, flags); in owl_dma_start_next_txd()
478 dma_update(od, OWL_DMA_IRQ_EN0, (1 << pchan->id), true); in owl_dma_start_next_txd()
480 spin_unlock_irqrestore(&od->lock, flags); in owl_dma_start_next_txd()
490 static void owl_dma_phy_free(struct owl_dma *od, struct owl_dma_vchan *vchan) in owl_dma_phy_free() argument
493 owl_dma_terminate_pchan(od, vchan->pchan); in owl_dma_phy_free()
500 struct owl_dma *od = dev_id; in owl_dma_interrupt() local
507 spin_lock(&od->lock); in owl_dma_interrupt()
509 pending = dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_interrupt()
512 for_each_set_bit(i, &pending, od->nr_pchans) { in owl_dma_interrupt()
513 pchan = &od->pchans[i]; in owl_dma_interrupt()
518 dma_writel(od, OWL_DMA_IRQ_PD0, pending); in owl_dma_interrupt()
521 for (i = 0; i < od->nr_pchans; i++) { in owl_dma_interrupt()
522 pchan = &od->pchans[i]; in owl_dma_interrupt()
527 dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_interrupt()
529 global_irq_pending = dma_readl(od, OWL_DMA_IRQ_PD0); in owl_dma_interrupt()
532 dev_dbg(od->dma.dev, in owl_dma_interrupt()
544 spin_unlock(&od->lock); in owl_dma_interrupt()
546 for_each_set_bit(i, &pending, od->nr_pchans) { in owl_dma_interrupt()
549 pchan = &od->pchans[i]; in owl_dma_interrupt()
553 dev_warn(od->dma.dev, "no vchan attached on pchan %d\n", in owl_dma_interrupt()
573 owl_dma_phy_free(od, vchan); in owl_dma_interrupt()
582 static void owl_dma_free_txd(struct owl_dma *od, struct owl_dma_txd *txd) in owl_dma_free_txd() argument
590 owl_dma_free_lli(od, lli); in owl_dma_free_txd()
597 struct owl_dma *od = to_owl_dma(vd->tx.chan->device); in owl_dma_desc_free() local
600 owl_dma_free_txd(od, txd); in owl_dma_desc_free()
605 struct owl_dma *od = to_owl_dma(chan->device); in owl_dma_terminate_all() local
613 owl_dma_phy_free(od, vchan); in owl_dma_terminate_all()
697 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_phy_alloc_and_start() local
700 pchan = owl_dma_get_pchan(od, vchan); in owl_dma_phy_alloc_and_start()
704 dev_dbg(od->dma.dev, "allocated pchan %d\n", pchan->id); in owl_dma_phy_alloc_and_start()
728 struct owl_dma *od = to_owl_dma(chan->device); in owl_dma_prep_memcpy() local
746 lli = owl_dma_alloc_lli(od); in owl_dma_prep_memcpy()
767 owl_dma_free_txd(od, txd); in owl_dma_prep_memcpy()
779 static inline void owl_dma_free(struct owl_dma *od) in owl_dma_free() argument
785 next, &od->dma.channels, vc.chan.device_node) { in owl_dma_free()
794 struct owl_dma *od; in owl_dma_probe() local
798 od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); in owl_dma_probe()
799 if (!od) in owl_dma_probe()
806 od->base = devm_ioremap_resource(&pdev->dev, res); in owl_dma_probe()
807 if (IS_ERR(od->base)) in owl_dma_probe()
808 return PTR_ERR(od->base); in owl_dma_probe()
825 od->nr_pchans = nr_channels; in owl_dma_probe()
826 od->nr_vchans = nr_requests; in owl_dma_probe()
830 platform_set_drvdata(pdev, od); in owl_dma_probe()
831 spin_lock_init(&od->lock); in owl_dma_probe()
833 dma_cap_set(DMA_MEMCPY, od->dma.cap_mask); in owl_dma_probe()
835 od->dma.dev = &pdev->dev; in owl_dma_probe()
836 od->dma.device_free_chan_resources = owl_dma_free_chan_resources; in owl_dma_probe()
837 od->dma.device_tx_status = owl_dma_tx_status; in owl_dma_probe()
838 od->dma.device_issue_pending = owl_dma_issue_pending; in owl_dma_probe()
839 od->dma.device_prep_dma_memcpy = owl_dma_prep_memcpy; in owl_dma_probe()
840 od->dma.device_terminate_all = owl_dma_terminate_all; in owl_dma_probe()
841 od->dma.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in owl_dma_probe()
842 od->dma.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES); in owl_dma_probe()
843 od->dma.directions = BIT(DMA_MEM_TO_MEM); in owl_dma_probe()
844 od->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in owl_dma_probe()
846 INIT_LIST_HEAD(&od->dma.channels); in owl_dma_probe()
848 od->clk = devm_clk_get(&pdev->dev, NULL); in owl_dma_probe()
849 if (IS_ERR(od->clk)) { in owl_dma_probe()
851 return PTR_ERR(od->clk); in owl_dma_probe()
859 od->irq = platform_get_irq(pdev, 0); in owl_dma_probe()
860 ret = devm_request_irq(&pdev->dev, od->irq, owl_dma_interrupt, 0, in owl_dma_probe()
861 dev_name(&pdev->dev), od); in owl_dma_probe()
868 od->pchans = devm_kcalloc(&pdev->dev, od->nr_pchans, in owl_dma_probe()
870 if (!od->pchans) in owl_dma_probe()
873 for (i = 0; i < od->nr_pchans; i++) { in owl_dma_probe()
874 struct owl_dma_pchan *pchan = &od->pchans[i]; in owl_dma_probe()
877 pchan->base = od->base + OWL_DMA_CHAN_BASE(i); in owl_dma_probe()
881 od->vchans = devm_kcalloc(&pdev->dev, od->nr_vchans, in owl_dma_probe()
883 if (!od->vchans) in owl_dma_probe()
886 for (i = 0; i < od->nr_vchans; i++) { in owl_dma_probe()
887 struct owl_dma_vchan *vchan = &od->vchans[i]; in owl_dma_probe()
890 vchan_init(&vchan->vc, &od->dma); in owl_dma_probe()
894 od->lli_pool = dma_pool_create(dev_name(od->dma.dev), od->dma.dev, in owl_dma_probe()
898 if (!od->lli_pool) { in owl_dma_probe()
903 clk_prepare_enable(od->clk); in owl_dma_probe()
905 ret = dma_async_device_register(&od->dma); in owl_dma_probe()
914 clk_disable_unprepare(od->clk); in owl_dma_probe()
915 dma_pool_destroy(od->lli_pool); in owl_dma_probe()
922 struct owl_dma *od = platform_get_drvdata(pdev); in owl_dma_remove() local
924 dma_async_device_unregister(&od->dma); in owl_dma_remove()
927 dma_writel(od, OWL_DMA_IRQ_EN0, 0x0); in owl_dma_remove()
930 devm_free_irq(od->dma.dev, od->irq, od); in owl_dma_remove()
932 owl_dma_free(od); in owl_dma_remove()
934 clk_disable_unprepare(od->clk); in owl_dma_remove()