• Home
  • Raw
  • Download

Lines Matching +full:0 +full:d

26 #define DMA_MAX_SIZE		(0x10000 - 512)
29 #define REG_ZX_SRC_ADDR 0x00
30 #define REG_ZX_DST_ADDR 0x04
31 #define REG_ZX_TX_X_COUNT 0x08
32 #define REG_ZX_TX_ZY_COUNT 0x0c
33 #define REG_ZX_SRC_ZY_STEP 0x10
34 #define REG_ZX_DST_ZY_STEP 0x14
35 #define REG_ZX_LLI_ADDR 0x1c
36 #define REG_ZX_CTRL 0x20
37 #define REG_ZX_TC_IRQ 0x800
38 #define REG_ZX_SRC_ERR_IRQ 0x804
39 #define REG_ZX_DST_ERR_IRQ 0x808
40 #define REG_ZX_CFG_ERR_IRQ 0x80c
41 #define REG_ZX_TC_IRQ_RAW 0x810
42 #define REG_ZX_SRC_ERR_IRQ_RAW 0x814
43 #define REG_ZX_DST_ERR_IRQ_RAW 0x818
44 #define REG_ZX_CFG_ERR_IRQ_RAW 0x81c
45 #define REG_ZX_STATUS 0x820
46 #define REG_ZX_DMA_GRP_PRIO 0x824
47 #define REG_ZX_DMA_ARB 0x828
50 #define ZX_DST_BURST_WIDTH(x) (((x) & 0x7) << 13)
52 #define ZX_SRC_BURST_LEN(x) (((x) & 0xf) << 9)
53 #define ZX_SRC_BURST_WIDTH(x) (((x) & 0x7) << 6)
58 #define ZX_CH_ENABLE BIT(0)
68 ZX_DMA_WIDTH_8BIT = 0,
138 static void zx_dma_terminate_chan(struct zx_dma_phy *phy, struct zx_dma_dev *d) in zx_dma_terminate_chan() argument
140 u32 val = 0; in zx_dma_terminate_chan()
147 val = 0x1 << phy->idx; in zx_dma_terminate_chan()
148 writel_relaxed(val, d->base + REG_ZX_TC_IRQ_RAW); in zx_dma_terminate_chan()
149 writel_relaxed(val, d->base + REG_ZX_SRC_ERR_IRQ_RAW); in zx_dma_terminate_chan()
150 writel_relaxed(val, d->base + REG_ZX_DST_ERR_IRQ_RAW); in zx_dma_terminate_chan()
151 writel_relaxed(val, d->base + REG_ZX_CFG_ERR_IRQ_RAW); in zx_dma_terminate_chan()
159 writel_relaxed(0, phy->base + REG_ZX_TX_ZY_COUNT); in zx_dma_set_desc()
160 writel_relaxed(0, phy->base + REG_ZX_SRC_ZY_STEP); in zx_dma_set_desc()
161 writel_relaxed(0, phy->base + REG_ZX_DST_ZY_STEP); in zx_dma_set_desc()
171 static u32 zx_dma_get_chan_stat(struct zx_dma_dev *d) in zx_dma_get_chan_stat() argument
173 return readl_relaxed(d->base + REG_ZX_STATUS); in zx_dma_get_chan_stat()
176 static void zx_dma_init_state(struct zx_dma_dev *d) in zx_dma_init_state() argument
179 writel_relaxed(0x0, d->base + REG_ZX_DMA_ARB); in zx_dma_init_state()
181 writel_relaxed(0xffffffff, d->base + REG_ZX_TC_IRQ_RAW); in zx_dma_init_state()
182 writel_relaxed(0xffffffff, d->base + REG_ZX_SRC_ERR_IRQ_RAW); in zx_dma_init_state()
183 writel_relaxed(0xffffffff, d->base + REG_ZX_DST_ERR_IRQ_RAW); in zx_dma_init_state()
184 writel_relaxed(0xffffffff, d->base + REG_ZX_CFG_ERR_IRQ_RAW); in zx_dma_init_state()
189 struct zx_dma_dev *d = to_zx_dma(c->vc.chan.device); in zx_dma_start_txd() local
195 if (BIT(c->phy->idx) & zx_dma_get_chan_stat(d)) in zx_dma_start_txd()
210 return 0; in zx_dma_start_txd()
217 static void zx_dma_task(struct zx_dma_dev *d) in zx_dma_task() argument
221 unsigned pch, pch_alloc = 0; in zx_dma_task()
225 list_for_each_entry_safe(c, cn, &d->slave.channels, in zx_dma_task()
231 dev_dbg(d->slave.dev, "pchan %u: free\n", p->idx); in zx_dma_task()
239 /* check new channel request in d->chan_pending */ in zx_dma_task()
240 spin_lock_irqsave(&d->lock, flags); in zx_dma_task()
241 while (!list_empty(&d->chan_pending)) { in zx_dma_task()
242 c = list_first_entry(&d->chan_pending, in zx_dma_task()
244 p = &d->phy[c->id]; in zx_dma_task()
246 /* remove from d->chan_pending */ in zx_dma_task()
253 dev_dbg(d->slave.dev, "pchan %u: busy!\n", c->id); in zx_dma_task()
256 spin_unlock_irqrestore(&d->lock, flags); in zx_dma_task()
258 for (pch = 0; pch < d->dma_channels; pch++) { in zx_dma_task()
260 p = &d->phy[pch]; in zx_dma_task()
273 struct zx_dma_dev *d = (struct zx_dma_dev *)dev_id; in zx_dma_int_handler() local
276 u32 tc = readl_relaxed(d->base + REG_ZX_TC_IRQ); in zx_dma_int_handler()
277 u32 serr = readl_relaxed(d->base + REG_ZX_SRC_ERR_IRQ); in zx_dma_int_handler()
278 u32 derr = readl_relaxed(d->base + REG_ZX_DST_ERR_IRQ); in zx_dma_int_handler()
279 u32 cfg = readl_relaxed(d->base + REG_ZX_CFG_ERR_IRQ); in zx_dma_int_handler()
280 u32 i, irq_chan = 0, task = 0; in zx_dma_int_handler()
285 p = &d->phy[i]; in zx_dma_int_handler()
302 dev_warn(d->slave.dev, "DMA ERR src 0x%x, dst 0x%x, cfg 0x%x\n", in zx_dma_int_handler()
305 writel_relaxed(irq_chan, d->base + REG_ZX_TC_IRQ_RAW); in zx_dma_int_handler()
306 writel_relaxed(serr, d->base + REG_ZX_SRC_ERR_IRQ_RAW); in zx_dma_int_handler()
307 writel_relaxed(derr, d->base + REG_ZX_DST_ERR_IRQ_RAW); in zx_dma_int_handler()
308 writel_relaxed(cfg, d->base + REG_ZX_CFG_ERR_IRQ_RAW); in zx_dma_int_handler()
311 zx_dma_task(d); in zx_dma_int_handler()
318 struct zx_dma_dev *d = to_zx_dma(chan->device); in zx_dma_free_chan_resources() local
321 spin_lock_irqsave(&d->lock, flags); in zx_dma_free_chan_resources()
323 spin_unlock_irqrestore(&d->lock, flags); in zx_dma_free_chan_resources()
326 c->ccfg = 0; in zx_dma_free_chan_resources()
338 size_t bytes = 0; in zx_dma_tx_status()
356 bytes = 0; in zx_dma_tx_status()
359 u32 clli = 0, index = 0; in zx_dma_tx_status()
361 bytes = 0; in zx_dma_tx_status()
380 struct zx_dma_dev *d = to_zx_dma(chan->device); in zx_dma_issue_pending() local
382 int issue = 0; in zx_dma_issue_pending()
387 spin_lock(&d->lock); in zx_dma_issue_pending()
390 list_add_tail(&c->node, &d->chan_pending); in zx_dma_issue_pending()
392 dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc); in zx_dma_issue_pending()
394 spin_unlock(&d->lock); in zx_dma_issue_pending()
396 dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc); in zx_dma_issue_pending()
401 zx_dma_task(d); in zx_dma_issue_pending()
421 struct zx_dma_dev *d = to_zx_dma(chan->device); in zx_alloc_desc_resource() local
425 dev_dbg(chan->device->dev, "vch %p: sg num %d exceed max %d\n", in zx_alloc_desc_resource()
434 ds->desc_hw = dma_pool_zalloc(d->pool, GFP_NOWAIT, &ds->desc_hw_lli); in zx_alloc_desc_resource()
462 u32 maxburst = 0; in zx_pre_config()
501 return 0; in zx_pre_config()
510 size_t copy = 0; in zx_dma_prep_memcpy()
511 int num = 0; in zx_dma_prep_memcpy()
526 num = 0; in zx_dma_prep_memcpy()
537 c->cyclic = 0; in zx_dma_prep_memcpy()
538 ds->desc_hw[num - 1].lli = 0; /* end of link */ in zx_dma_prep_memcpy()
549 size_t len, avail, total = 0; in zx_dma_prep_slave_sg()
551 dma_addr_t addr, src = 0, dst = 0; in zx_dma_prep_slave_sg()
570 c->cyclic = 0; in zx_dma_prep_slave_sg()
571 num = 0; in zx_dma_prep_slave_sg()
595 ds->desc_hw[num - 1].lli = 0; /* end of link */ in zx_dma_prep_slave_sg()
608 dma_addr_t src = 0, dst = 0; in zx_dma_prep_dma_cyclic()
610 int buf = 0, num = 0; in zx_dma_prep_dma_cyclic()
654 return 0; in zx_dma_config()
660 struct zx_dma_dev *d = to_zx_dma(chan->device); in zx_dma_terminate_all() local
665 dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc); in zx_dma_terminate_all()
668 spin_lock(&d->lock); in zx_dma_terminate_all()
670 spin_unlock(&d->lock); in zx_dma_terminate_all()
677 zx_dma_terminate_chan(p, d); in zx_dma_terminate_all()
686 return 0; in zx_dma_terminate_all()
692 u32 val = 0; in zx_dma_transfer_pause()
698 return 0; in zx_dma_transfer_pause()
704 u32 val = 0; in zx_dma_transfer_resume()
710 return 0; in zx_dma_transfer_resume()
717 struct zx_dma_dev *d = to_zx_dma(vd->tx.chan->device); in zx_dma_free_desc() local
719 dma_pool_free(d->pool, ds->desc_hw, ds->desc_hw_lli); in zx_dma_free_desc()
732 struct zx_dma_dev *d = ofdma->of_dma_data; in zx_of_dma_simple_xlate() local
733 unsigned int request = dma_spec->args[0]; in zx_of_dma_simple_xlate()
737 if (request >= d->dma_requests) in zx_of_dma_simple_xlate()
740 chan = dma_get_any_slave_channel(&d->slave); in zx_of_dma_simple_xlate()
742 dev_err(d->slave.dev, "get channel fail in %s.\n", __func__); in zx_of_dma_simple_xlate()
747 dev_info(d->slave.dev, "zx_dma: pchan %u: alloc vchan %p\n", in zx_of_dma_simple_xlate()
754 struct zx_dma_dev *d; in zx_dma_probe() local
755 int i, ret = 0; in zx_dma_probe()
757 d = devm_kzalloc(&op->dev, sizeof(*d), GFP_KERNEL); in zx_dma_probe()
758 if (!d) in zx_dma_probe()
761 d->base = devm_platform_ioremap_resource(op, 0); in zx_dma_probe()
762 if (IS_ERR(d->base)) in zx_dma_probe()
763 return PTR_ERR(d->base); in zx_dma_probe()
766 "dma-channels", &d->dma_channels); in zx_dma_probe()
768 "dma-requests", &d->dma_requests); in zx_dma_probe()
769 if (!d->dma_requests || !d->dma_channels) in zx_dma_probe()
772 d->clk = devm_clk_get(&op->dev, NULL); in zx_dma_probe()
773 if (IS_ERR(d->clk)) { in zx_dma_probe()
775 return PTR_ERR(d->clk); in zx_dma_probe()
778 d->irq = platform_get_irq(op, 0); in zx_dma_probe()
779 ret = devm_request_irq(&op->dev, d->irq, zx_dma_int_handler, in zx_dma_probe()
780 0, DRIVER_NAME, d); in zx_dma_probe()
785 d->pool = dmam_pool_create(DRIVER_NAME, &op->dev, in zx_dma_probe()
786 LLI_BLOCK_SIZE, 32, 0); in zx_dma_probe()
787 if (!d->pool) in zx_dma_probe()
791 d->phy = devm_kcalloc(&op->dev, in zx_dma_probe()
792 d->dma_channels, sizeof(struct zx_dma_phy), GFP_KERNEL); in zx_dma_probe()
793 if (!d->phy) in zx_dma_probe()
796 for (i = 0; i < d->dma_channels; i++) { in zx_dma_probe()
797 struct zx_dma_phy *p = &d->phy[i]; in zx_dma_probe()
800 p->base = d->base + i * 0x40; in zx_dma_probe()
803 INIT_LIST_HEAD(&d->slave.channels); in zx_dma_probe()
804 dma_cap_set(DMA_SLAVE, d->slave.cap_mask); in zx_dma_probe()
805 dma_cap_set(DMA_MEMCPY, d->slave.cap_mask); in zx_dma_probe()
806 dma_cap_set(DMA_CYCLIC, d->slave.cap_mask); in zx_dma_probe()
807 dma_cap_set(DMA_PRIVATE, d->slave.cap_mask); in zx_dma_probe()
808 d->slave.dev = &op->dev; in zx_dma_probe()
809 d->slave.device_free_chan_resources = zx_dma_free_chan_resources; in zx_dma_probe()
810 d->slave.device_tx_status = zx_dma_tx_status; in zx_dma_probe()
811 d->slave.device_prep_dma_memcpy = zx_dma_prep_memcpy; in zx_dma_probe()
812 d->slave.device_prep_slave_sg = zx_dma_prep_slave_sg; in zx_dma_probe()
813 d->slave.device_prep_dma_cyclic = zx_dma_prep_dma_cyclic; in zx_dma_probe()
814 d->slave.device_issue_pending = zx_dma_issue_pending; in zx_dma_probe()
815 d->slave.device_config = zx_dma_config; in zx_dma_probe()
816 d->slave.device_terminate_all = zx_dma_terminate_all; in zx_dma_probe()
817 d->slave.device_pause = zx_dma_transfer_pause; in zx_dma_probe()
818 d->slave.device_resume = zx_dma_transfer_resume; in zx_dma_probe()
819 d->slave.copy_align = DMA_ALIGN; in zx_dma_probe()
820 d->slave.src_addr_widths = ZX_DMA_BUSWIDTHS; in zx_dma_probe()
821 d->slave.dst_addr_widths = ZX_DMA_BUSWIDTHS; in zx_dma_probe()
822 d->slave.directions = BIT(DMA_MEM_TO_MEM) | BIT(DMA_MEM_TO_DEV) in zx_dma_probe()
824 d->slave.residue_granularity = DMA_RESIDUE_GRANULARITY_SEGMENT; in zx_dma_probe()
827 d->chans = devm_kcalloc(&op->dev, in zx_dma_probe()
828 d->dma_requests, sizeof(struct zx_dma_chan), GFP_KERNEL); in zx_dma_probe()
829 if (!d->chans) in zx_dma_probe()
832 for (i = 0; i < d->dma_requests; i++) { in zx_dma_probe()
833 struct zx_dma_chan *c = &d->chans[i]; in zx_dma_probe()
838 vchan_init(&c->vc, &d->slave); in zx_dma_probe()
842 ret = clk_prepare_enable(d->clk); in zx_dma_probe()
843 if (ret < 0) { in zx_dma_probe()
844 dev_err(&op->dev, "clk_prepare_enable failed: %d\n", ret); in zx_dma_probe()
848 zx_dma_init_state(d); in zx_dma_probe()
850 spin_lock_init(&d->lock); in zx_dma_probe()
851 INIT_LIST_HEAD(&d->chan_pending); in zx_dma_probe()
852 platform_set_drvdata(op, d); in zx_dma_probe()
854 ret = dma_async_device_register(&d->slave); in zx_dma_probe()
859 zx_of_dma_simple_xlate, d); in zx_dma_probe()
864 return 0; in zx_dma_probe()
867 dma_async_device_unregister(&d->slave); in zx_dma_probe()
869 clk_disable_unprepare(d->clk); in zx_dma_probe()
877 struct zx_dma_dev *d = platform_get_drvdata(op); in zx_dma_remove() local
880 devm_free_irq(&op->dev, d->irq, d); in zx_dma_remove()
882 dma_async_device_unregister(&d->slave); in zx_dma_remove()
885 list_for_each_entry_safe(c, cn, &d->slave.channels, in zx_dma_remove()
889 clk_disable_unprepare(d->clk); in zx_dma_remove()
891 return 0; in zx_dma_remove()
897 struct zx_dma_dev *d = dev_get_drvdata(dev); in zx_dma_suspend_dev() local
898 u32 stat = 0; in zx_dma_suspend_dev()
900 stat = zx_dma_get_chan_stat(d); in zx_dma_suspend_dev()
902 dev_warn(d->slave.dev, in zx_dma_suspend_dev()
903 "chan %d is running fail to suspend\n", stat); in zx_dma_suspend_dev()
906 clk_disable_unprepare(d->clk); in zx_dma_suspend_dev()
907 return 0; in zx_dma_suspend_dev()
912 struct zx_dma_dev *d = dev_get_drvdata(dev); in zx_dma_resume_dev() local
913 int ret = 0; in zx_dma_resume_dev()
915 ret = clk_prepare_enable(d->clk); in zx_dma_resume_dev()
916 if (ret < 0) { in zx_dma_resume_dev()
917 dev_err(d->slave.dev, "clk_prepare_enable failed: %d\n", ret); in zx_dma_resume_dev()
920 zx_dma_init_state(d); in zx_dma_resume_dev()
921 return 0; in zx_dma_resume_dev()