Lines Matching refs:chan
56 axi_chan_iowrite32(struct axi_dma_chan *chan, u32 reg, u32 val) in axi_chan_iowrite32() argument
58 iowrite32(val, chan->chan_regs + reg); in axi_chan_iowrite32()
61 static inline u32 axi_chan_ioread32(struct axi_dma_chan *chan, u32 reg) in axi_chan_ioread32() argument
63 return ioread32(chan->chan_regs + reg); in axi_chan_ioread32()
67 axi_chan_iowrite64(struct axi_dma_chan *chan, u32 reg, u64 val) in axi_chan_iowrite64() argument
73 iowrite32(lower_32_bits(val), chan->chan_regs + reg); in axi_chan_iowrite64()
74 iowrite32(upper_32_bits(val), chan->chan_regs + reg + 4); in axi_chan_iowrite64()
113 static inline void axi_chan_irq_disable(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_disable() argument
118 axi_chan_iowrite32(chan, CH_INTSTATUS_ENA, DWAXIDMAC_IRQ_NONE); in axi_chan_irq_disable()
120 val = axi_chan_ioread32(chan, CH_INTSTATUS_ENA); in axi_chan_irq_disable()
122 axi_chan_iowrite32(chan, CH_INTSTATUS_ENA, val); in axi_chan_irq_disable()
126 static inline void axi_chan_irq_set(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_set() argument
128 axi_chan_iowrite32(chan, CH_INTSTATUS_ENA, irq_mask); in axi_chan_irq_set()
131 static inline void axi_chan_irq_sig_set(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_sig_set() argument
133 axi_chan_iowrite32(chan, CH_INTSIGNAL_ENA, irq_mask); in axi_chan_irq_sig_set()
136 static inline void axi_chan_irq_clear(struct axi_dma_chan *chan, u32 irq_mask) in axi_chan_irq_clear() argument
138 axi_chan_iowrite32(chan, CH_INTCLEAR, irq_mask); in axi_chan_irq_clear()
141 static inline u32 axi_chan_irq_read(struct axi_dma_chan *chan) in axi_chan_irq_read() argument
143 return axi_chan_ioread32(chan, CH_INTSTATUS); in axi_chan_irq_read()
146 static inline void axi_chan_disable(struct axi_dma_chan *chan) in axi_chan_disable() argument
150 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_disable()
151 val &= ~(BIT(chan->id) << DMAC_CHAN_EN_SHIFT); in axi_chan_disable()
152 val |= BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_disable()
153 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_disable()
156 static inline void axi_chan_enable(struct axi_dma_chan *chan) in axi_chan_enable() argument
160 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_enable()
161 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
162 BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_enable()
163 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_enable()
166 static inline bool axi_chan_is_hw_enable(struct axi_dma_chan *chan) in axi_chan_is_hw_enable() argument
170 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_is_hw_enable()
172 return !!(val & (BIT(chan->id) << DMAC_CHAN_EN_SHIFT)); in axi_chan_is_hw_enable()
180 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in axi_dma_hw_init()
181 axi_chan_disable(&chip->dw->chan[i]); in axi_dma_hw_init()
185 static u32 axi_chan_get_xfer_width(struct axi_dma_chan *chan, dma_addr_t src, in axi_chan_get_xfer_width() argument
188 u32 max_width = chan->chip->dw->hdata->m_data_width; in axi_chan_get_xfer_width()
193 static inline const char *axi_chan_name(struct axi_dma_chan *chan) in axi_chan_name() argument
195 return dma_chan_name(&chan->vc.chan); in axi_chan_name()
198 static struct axi_dma_desc *axi_desc_get(struct axi_dma_chan *chan) in axi_desc_get() argument
200 struct dw_axi_dma *dw = chan->chip->dw; in axi_desc_get()
206 dev_err(chan2dev(chan), "%s: not enough descriptors available\n", in axi_desc_get()
207 axi_chan_name(chan)); in axi_desc_get()
211 atomic_inc(&chan->descs_allocated); in axi_desc_get()
214 desc->chan = chan; in axi_desc_get()
221 struct axi_dma_chan *chan = desc->chan; in axi_desc_put() local
222 struct dw_axi_dma *dw = chan->chip->dw; in axi_desc_put()
235 atomic_sub(descs_put, &chan->descs_allocated); in axi_desc_put()
236 dev_vdbg(chan2dev(chan), "%s: %d descs put, %d still allocated\n", in axi_desc_put()
237 axi_chan_name(chan), descs_put, in axi_desc_put()
238 atomic_read(&chan->descs_allocated)); in axi_desc_put()
250 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_tx_status() local
255 if (chan->is_paused && ret == DMA_IN_PROGRESS) in dma_chan_tx_status()
266 static void write_chan_llp(struct axi_dma_chan *chan, dma_addr_t adr) in write_chan_llp() argument
268 axi_chan_iowrite64(chan, CH_LLP, adr); in write_chan_llp()
272 static void axi_chan_block_xfer_start(struct axi_dma_chan *chan, in axi_chan_block_xfer_start() argument
275 u32 priority = chan->chip->dw->hdata->priority[chan->id]; in axi_chan_block_xfer_start()
279 if (unlikely(axi_chan_is_hw_enable(chan))) { in axi_chan_block_xfer_start()
280 dev_err(chan2dev(chan), "%s is non-idle!\n", in axi_chan_block_xfer_start()
281 axi_chan_name(chan)); in axi_chan_block_xfer_start()
286 axi_dma_enable(chan->chip); in axi_chan_block_xfer_start()
290 axi_chan_iowrite32(chan, CH_CFG_L, reg); in axi_chan_block_xfer_start()
296 axi_chan_iowrite32(chan, CH_CFG_H, reg); in axi_chan_block_xfer_start()
298 write_chan_llp(chan, first->vd.tx.phys | lms); in axi_chan_block_xfer_start()
301 axi_chan_irq_sig_set(chan, irq_mask); in axi_chan_block_xfer_start()
305 axi_chan_irq_set(chan, irq_mask); in axi_chan_block_xfer_start()
307 axi_chan_enable(chan); in axi_chan_block_xfer_start()
310 static void axi_chan_start_first_queued(struct axi_dma_chan *chan) in axi_chan_start_first_queued() argument
315 vd = vchan_next_desc(&chan->vc); in axi_chan_start_first_queued()
320 dev_vdbg(chan2dev(chan), "%s: started %u\n", axi_chan_name(chan), in axi_chan_start_first_queued()
322 axi_chan_block_xfer_start(chan, desc); in axi_chan_start_first_queued()
327 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_issue_pending() local
330 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_issue_pending()
331 if (vchan_issue_pending(&chan->vc)) in dma_chan_issue_pending()
332 axi_chan_start_first_queued(chan); in dma_chan_issue_pending()
333 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_issue_pending()
338 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_alloc_chan_resources() local
341 if (axi_chan_is_hw_enable(chan)) { in dma_chan_alloc_chan_resources()
342 dev_err(chan2dev(chan), "%s is non-idle!\n", in dma_chan_alloc_chan_resources()
343 axi_chan_name(chan)); in dma_chan_alloc_chan_resources()
347 dev_vdbg(dchan2dev(dchan), "%s: allocating\n", axi_chan_name(chan)); in dma_chan_alloc_chan_resources()
349 pm_runtime_get(chan->chip->dev); in dma_chan_alloc_chan_resources()
356 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_free_chan_resources() local
359 if (axi_chan_is_hw_enable(chan)) in dma_chan_free_chan_resources()
361 axi_chan_name(chan)); in dma_chan_free_chan_resources()
363 axi_chan_disable(chan); in dma_chan_free_chan_resources()
364 axi_chan_irq_disable(chan, DWAXIDMAC_IRQ_ALL); in dma_chan_free_chan_resources()
366 vchan_free_chan_resources(&chan->vc); in dma_chan_free_chan_resources()
370 axi_chan_name(chan), atomic_read(&chan->descs_allocated)); in dma_chan_free_chan_resources()
372 pm_runtime_put(chan->chip->dev); in dma_chan_free_chan_resources()
416 if (desc->chan->chip->dw->hdata->nr_masters > 1) in set_desc_dest_master()
429 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_prep_dma_memcpy() local
434 dev_dbg(chan2dev(chan), "%s: memcpy: src: %pad dst: %pad length: %zd flags: %#lx", in dma_chan_prep_dma_memcpy()
435 axi_chan_name(chan), &src_adr, &dst_adr, len, flags); in dma_chan_prep_dma_memcpy()
437 max_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dma_chan_prep_dma_memcpy()
447 xfer_width = axi_chan_get_xfer_width(chan, src_adr, dst_adr, xfer_len); in dma_chan_prep_dma_memcpy()
460 desc = axi_desc_get(chan); in dma_chan_prep_dma_memcpy()
469 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dma_chan_prep_dma_memcpy()
470 u32 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dma_chan_prep_dma_memcpy()
512 return vchan_tx_prep(&chan->vc, &first->vd, flags); in dma_chan_prep_dma_memcpy()
520 static void axi_chan_dump_lli(struct axi_dma_chan *chan, in axi_chan_dump_lli() argument
523 dev_err(dchan2dev(&chan->vc.chan), in axi_chan_dump_lli()
533 static void axi_chan_list_dump_lli(struct axi_dma_chan *chan, in axi_chan_list_dump_lli() argument
538 axi_chan_dump_lli(chan, desc_head); in axi_chan_list_dump_lli()
540 axi_chan_dump_lli(chan, desc); in axi_chan_list_dump_lli()
543 static noinline void axi_chan_handle_err(struct axi_dma_chan *chan, u32 status) in axi_chan_handle_err() argument
548 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_handle_err()
550 axi_chan_disable(chan); in axi_chan_handle_err()
553 vd = vchan_next_desc(&chan->vc); in axi_chan_handle_err()
555 dev_err(chan2dev(chan), "BUG: %s, IRQ with no descriptors\n", in axi_chan_handle_err()
556 axi_chan_name(chan)); in axi_chan_handle_err()
563 dev_err(chan2dev(chan), in axi_chan_handle_err()
565 axi_chan_name(chan), vd->tx.cookie, status); in axi_chan_handle_err()
566 axi_chan_list_dump_lli(chan, vd_to_axi_desc(vd)); in axi_chan_handle_err()
571 axi_chan_start_first_queued(chan); in axi_chan_handle_err()
574 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_handle_err()
577 static void axi_chan_block_xfer_complete(struct axi_dma_chan *chan) in axi_chan_block_xfer_complete() argument
582 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
583 if (unlikely(axi_chan_is_hw_enable(chan))) { in axi_chan_block_xfer_complete()
584 dev_err(chan2dev(chan), "BUG: %s caught DWAXIDMAC_IRQ_DMA_TRF, but channel not idle!\n", in axi_chan_block_xfer_complete()
585 axi_chan_name(chan)); in axi_chan_block_xfer_complete()
586 axi_chan_disable(chan); in axi_chan_block_xfer_complete()
590 vd = vchan_next_desc(&chan->vc); in axi_chan_block_xfer_complete()
596 axi_chan_start_first_queued(chan); in axi_chan_block_xfer_complete()
598 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
605 struct axi_dma_chan *chan; in dw_axi_dma_interrupt() local
614 chan = &dw->chan[i]; in dw_axi_dma_interrupt()
615 status = axi_chan_irq_read(chan); in dw_axi_dma_interrupt()
616 axi_chan_irq_clear(chan, status); in dw_axi_dma_interrupt()
619 axi_chan_name(chan), i, status); in dw_axi_dma_interrupt()
622 axi_chan_handle_err(chan, status); in dw_axi_dma_interrupt()
624 axi_chan_block_xfer_complete(chan); in dw_axi_dma_interrupt()
635 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_terminate_all() local
639 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_terminate_all()
641 axi_chan_disable(chan); in dma_chan_terminate_all()
643 vchan_get_all_descriptors(&chan->vc, &head); in dma_chan_terminate_all()
645 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_terminate_all()
647 vchan_dma_desc_free_list(&chan->vc, &head); in dma_chan_terminate_all()
649 dev_vdbg(dchan2dev(dchan), "terminated: %s\n", axi_chan_name(chan)); in dma_chan_terminate_all()
656 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_pause() local
661 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_pause()
663 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in dma_chan_pause()
664 val |= BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT | in dma_chan_pause()
665 BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT; in dma_chan_pause()
666 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in dma_chan_pause()
669 if (axi_chan_irq_read(chan) & DWAXIDMAC_IRQ_SUSPENDED) in dma_chan_pause()
675 axi_chan_irq_clear(chan, DWAXIDMAC_IRQ_SUSPENDED); in dma_chan_pause()
677 chan->is_paused = true; in dma_chan_pause()
679 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_pause()
685 static inline void axi_chan_resume(struct axi_dma_chan *chan) in axi_chan_resume() argument
689 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_resume()
690 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT); in axi_chan_resume()
691 val |= (BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT); in axi_chan_resume()
692 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_resume()
694 chan->is_paused = false; in axi_chan_resume()
699 struct axi_dma_chan *chan = dchan_to_axi_dma_chan(dchan); in dma_chan_resume() local
702 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_resume()
704 if (chan->is_paused) in dma_chan_resume()
705 axi_chan_resume(chan); in dma_chan_resume()
707 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_resume()
869 dw->chan = devm_kcalloc(chip->dev, hdata->nr_channels, in dw_probe()
870 sizeof(*dw->chan), GFP_KERNEL); in dw_probe()
871 if (!dw->chan) in dw_probe()
889 struct axi_dma_chan *chan = &dw->chan[i]; in dw_probe() local
891 chan->chip = chip; in dw_probe()
892 chan->id = i; in dw_probe()
893 chan->chan_regs = chip->regs + COMMON_REG_LEN + i * CHAN_REG_LEN; in dw_probe()
894 atomic_set(&chan->descs_allocated, 0); in dw_probe()
896 chan->vc.desc_free = vchan_desc_put; in dw_probe()
897 vchan_init(&chan->vc, &dw->dma); in dw_probe()
959 struct axi_dma_chan *chan, *_chan; in dw_remove() local
967 axi_chan_disable(&chip->dw->chan[i]); in dw_remove()
968 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in dw_remove()
977 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_remove()
978 vc.chan.device_node) { in dw_remove()
979 list_del(&chan->vc.chan.device_node); in dw_remove()
980 tasklet_kill(&chan->vc.task); in dw_remove()