Lines Matching refs:dwc
51 static struct dw_desc *dwc_first_active(struct dw_dma_chan *dwc) in dwc_first_active() argument
53 return to_dw_desc(dwc->active_list.next); in dwc_first_active()
59 struct dw_dma_chan *dwc = to_dw_dma_chan(tx->chan); in dwc_tx_submit() local
63 spin_lock_irqsave(&dwc->lock, flags); in dwc_tx_submit()
72 list_add_tail(&desc->desc_node, &dwc->queue); in dwc_tx_submit()
73 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_tx_submit()
80 static struct dw_desc *dwc_desc_get(struct dw_dma_chan *dwc) in dwc_desc_get() argument
82 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_desc_get()
90 dwc->descs_allocated++; in dwc_desc_get()
92 dma_async_tx_descriptor_init(&desc->txd, &dwc->chan); in dwc_desc_get()
99 static void dwc_desc_put(struct dw_dma_chan *dwc, struct dw_desc *desc) in dwc_desc_put() argument
101 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_desc_put()
110 dwc->descs_allocated--; in dwc_desc_put()
114 dwc->descs_allocated--; in dwc_desc_put()
117 static void dwc_initialize(struct dw_dma_chan *dwc) in dwc_initialize() argument
119 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_initialize()
121 dw->initialize_chan(dwc); in dwc_initialize()
124 channel_set_bit(dw, MASK.XFER, dwc->mask); in dwc_initialize()
125 channel_set_bit(dw, MASK.ERROR, dwc->mask); in dwc_initialize()
130 static inline void dwc_dump_chan_regs(struct dw_dma_chan *dwc) in dwc_dump_chan_regs() argument
132 dev_err(chan2dev(&dwc->chan), in dwc_dump_chan_regs()
134 channel_readl(dwc, SAR), in dwc_dump_chan_regs()
135 channel_readl(dwc, DAR), in dwc_dump_chan_regs()
136 channel_readl(dwc, LLP), in dwc_dump_chan_regs()
137 channel_readl(dwc, CTL_HI), in dwc_dump_chan_regs()
138 channel_readl(dwc, CTL_LO)); in dwc_dump_chan_regs()
141 static inline void dwc_chan_disable(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_chan_disable() argument
143 channel_clear_bit(dw, CH_EN, dwc->mask); in dwc_chan_disable()
144 while (dma_readl(dw, CH_EN) & dwc->mask) in dwc_chan_disable()
151 static inline void dwc_do_single_block(struct dw_dma_chan *dwc, in dwc_do_single_block() argument
154 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_do_single_block()
163 channel_writel(dwc, SAR, lli_read(desc, sar)); in dwc_do_single_block()
164 channel_writel(dwc, DAR, lli_read(desc, dar)); in dwc_do_single_block()
165 channel_writel(dwc, CTL_LO, ctllo); in dwc_do_single_block()
166 channel_writel(dwc, CTL_HI, lli_read(desc, ctlhi)); in dwc_do_single_block()
167 channel_set_bit(dw, CH_EN, dwc->mask); in dwc_do_single_block()
170 dwc->tx_node_active = dwc->tx_node_active->next; in dwc_do_single_block()
174 static void dwc_dostart(struct dw_dma_chan *dwc, struct dw_desc *first) in dwc_dostart() argument
176 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_dostart()
177 u8 lms = DWC_LLP_LMS(dwc->dws.m_master); in dwc_dostart()
181 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_dostart()
182 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
185 dwc_dump_chan_regs(dwc); in dwc_dostart()
191 if (dwc->nollp) { in dwc_dostart()
193 &dwc->flags); in dwc_dostart()
195 dev_err(chan2dev(&dwc->chan), in dwc_dostart()
200 dwc_initialize(dwc); in dwc_dostart()
203 dwc->tx_node_active = &first->tx_list; in dwc_dostart()
206 dwc_do_single_block(dwc, first); in dwc_dostart()
211 dwc_initialize(dwc); in dwc_dostart()
213 channel_writel(dwc, LLP, first->txd.phys | lms); in dwc_dostart()
214 channel_writel(dwc, CTL_LO, DWC_CTLL_LLP_D_EN | DWC_CTLL_LLP_S_EN); in dwc_dostart()
215 channel_writel(dwc, CTL_HI, 0); in dwc_dostart()
216 channel_set_bit(dw, CH_EN, dwc->mask); in dwc_dostart()
219 static void dwc_dostart_first_queued(struct dw_dma_chan *dwc) in dwc_dostart_first_queued() argument
223 if (list_empty(&dwc->queue)) in dwc_dostart_first_queued()
226 list_move(dwc->queue.next, &dwc->active_list); in dwc_dostart_first_queued()
227 desc = dwc_first_active(dwc); in dwc_dostart_first_queued()
228 dev_vdbg(chan2dev(&dwc->chan), "%s: started %u\n", __func__, desc->txd.cookie); in dwc_dostart_first_queued()
229 dwc_dostart(dwc, desc); in dwc_dostart_first_queued()
235 dwc_descriptor_complete(struct dw_dma_chan *dwc, struct dw_desc *desc, in dwc_descriptor_complete() argument
243 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); in dwc_descriptor_complete()
245 spin_lock_irqsave(&dwc->lock, flags); in dwc_descriptor_complete()
256 dwc_desc_put(dwc, desc); in dwc_descriptor_complete()
257 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_descriptor_complete()
262 static void dwc_complete_all(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_complete_all() argument
268 spin_lock_irqsave(&dwc->lock, flags); in dwc_complete_all()
269 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_complete_all()
270 dev_err(chan2dev(&dwc->chan), in dwc_complete_all()
274 dwc_chan_disable(dw, dwc); in dwc_complete_all()
281 list_splice_init(&dwc->active_list, &list); in dwc_complete_all()
282 dwc_dostart_first_queued(dwc); in dwc_complete_all()
284 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_complete_all()
287 dwc_descriptor_complete(dwc, desc, true); in dwc_complete_all()
291 static inline u32 dwc_get_sent(struct dw_dma_chan *dwc) in dwc_get_sent() argument
293 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_get_sent()
294 u32 ctlhi = channel_readl(dwc, CTL_HI); in dwc_get_sent()
295 u32 ctllo = channel_readl(dwc, CTL_LO); in dwc_get_sent()
297 return dw->block2bytes(dwc, ctlhi, ctllo >> 4 & 7); in dwc_get_sent()
300 static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_scan_descriptors() argument
308 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
309 llp = channel_readl(dwc, LLP); in dwc_scan_descriptors()
312 if (status_xfer & dwc->mask) { in dwc_scan_descriptors()
314 dma_writel(dw, CLEAR.XFER, dwc->mask); in dwc_scan_descriptors()
316 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { in dwc_scan_descriptors()
317 struct list_head *head, *active = dwc->tx_node_active; in dwc_scan_descriptors()
323 desc = dwc_first_active(dwc); in dwc_scan_descriptors()
336 dwc_do_single_block(dwc, child); in dwc_scan_descriptors()
338 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
343 clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); in dwc_scan_descriptors()
346 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
348 dwc_complete_all(dw, dwc); in dwc_scan_descriptors()
352 if (list_empty(&dwc->active_list)) { in dwc_scan_descriptors()
353 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
357 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { in dwc_scan_descriptors()
358 dev_vdbg(chan2dev(&dwc->chan), "%s: soft LLP mode\n", __func__); in dwc_scan_descriptors()
359 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
363 dev_vdbg(chan2dev(&dwc->chan), "%s: llp=%pad\n", __func__, &llp); in dwc_scan_descriptors()
365 list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) { in dwc_scan_descriptors()
371 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
378 desc->residue -= dwc_get_sent(dwc); in dwc_scan_descriptors()
379 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
387 desc->residue -= dwc_get_sent(dwc); in dwc_scan_descriptors()
388 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
398 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
399 dwc_descriptor_complete(dwc, desc, true); in dwc_scan_descriptors()
400 spin_lock_irqsave(&dwc->lock, flags); in dwc_scan_descriptors()
403 dev_err(chan2dev(&dwc->chan), in dwc_scan_descriptors()
407 dwc_chan_disable(dw, dwc); in dwc_scan_descriptors()
409 dwc_dostart_first_queued(dwc); in dwc_scan_descriptors()
410 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_scan_descriptors()
413 static inline void dwc_dump_lli(struct dw_dma_chan *dwc, struct dw_desc *desc) in dwc_dump_lli() argument
415 dev_crit(chan2dev(&dwc->chan), " desc: s0x%x d0x%x l0x%x c0x%x:%x\n", in dwc_dump_lli()
423 static void dwc_handle_error(struct dw_dma *dw, struct dw_dma_chan *dwc) in dwc_handle_error() argument
429 dwc_scan_descriptors(dw, dwc); in dwc_handle_error()
431 spin_lock_irqsave(&dwc->lock, flags); in dwc_handle_error()
438 bad_desc = dwc_first_active(dwc); in dwc_handle_error()
440 list_move(dwc->queue.next, dwc->active_list.prev); in dwc_handle_error()
443 dma_writel(dw, CLEAR.ERROR, dwc->mask); in dwc_handle_error()
444 if (!list_empty(&dwc->active_list)) in dwc_handle_error()
445 dwc_dostart(dwc, dwc_first_active(dwc)); in dwc_handle_error()
454 dev_WARN(chan2dev(&dwc->chan), "Bad descriptor submitted for DMA!\n" in dwc_handle_error()
456 dwc_dump_lli(dwc, bad_desc); in dwc_handle_error()
458 dwc_dump_lli(dwc, child); in dwc_handle_error()
460 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_handle_error()
463 dwc_descriptor_complete(dwc, bad_desc, true); in dwc_handle_error()
469 struct dw_dma_chan *dwc; in dw_dma_tasklet() local
480 dwc = &dw->chan[i]; in dw_dma_tasklet()
481 if (test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) in dw_dma_tasklet()
484 dwc_handle_error(dw, dwc); in dw_dma_tasklet()
486 dwc_scan_descriptors(dw, dwc); in dw_dma_tasklet()
543 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_dma_memcpy() local
550 u8 m_master = dwc->dws.m_master; in dwc_prep_dma_memcpy()
566 dwc->direction = DMA_MEM_TO_MEM; in dwc_prep_dma_memcpy()
570 ctllo = dw->prepare_ctllo(dwc) in dwc_prep_dma_memcpy()
579 desc = dwc_desc_get(dwc); in dwc_prep_dma_memcpy()
583 ctlhi = dw->bytes2block(dwc, len - offset, src_width, &xfer_count); in dwc_prep_dma_memcpy()
612 dwc_desc_put(dwc, first); in dwc_prep_dma_memcpy()
621 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_prep_slave_sg() local
623 struct dma_slave_config *sconfig = &dwc->dma_sconfig; in dwc_prep_slave_sg()
627 u8 m_master = dwc->dws.m_master; in dwc_prep_slave_sg()
642 dwc->direction = direction; in dwc_prep_slave_sg()
650 ctllo = dw->prepare_ctllo(dwc) in dwc_prep_slave_sg()
669 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
673 ctlhi = dw->bytes2block(dwc, len, mem_width, &dlen); in dwc_prep_slave_sg()
700 ctllo = dw->prepare_ctllo(dwc) in dwc_prep_slave_sg()
717 desc = dwc_desc_get(dwc); in dwc_prep_slave_sg()
721 ctlhi = dw->bytes2block(dwc, len, reg_width, &dlen); in dwc_prep_slave_sg()
763 dwc_desc_put(dwc, first); in dwc_prep_slave_sg()
769 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dw_dma_filter() local
776 if (dws->channels && !(dws->channels & dwc->mask)) in dw_dma_filter()
780 memcpy(&dwc->dws, dws, sizeof(struct dw_dma_slave)); in dw_dma_filter()
788 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_config() local
791 memcpy(&dwc->dma_sconfig, sconfig, sizeof(*sconfig)); in dwc_config()
793 dwc->dma_sconfig.src_maxburst = in dwc_config()
794 clamp(dwc->dma_sconfig.src_maxburst, 0U, dwc->max_burst); in dwc_config()
795 dwc->dma_sconfig.dst_maxburst = in dwc_config()
796 clamp(dwc->dma_sconfig.dst_maxburst, 0U, dwc->max_burst); in dwc_config()
798 dw->encode_maxburst(dwc, &dwc->dma_sconfig.src_maxburst); in dwc_config()
799 dw->encode_maxburst(dwc, &dwc->dma_sconfig.dst_maxburst); in dwc_config()
804 static void dwc_chan_pause(struct dw_dma_chan *dwc, bool drain) in dwc_chan_pause() argument
806 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_chan_pause()
809 dw->suspend_chan(dwc, drain); in dwc_chan_pause()
811 while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--) in dwc_chan_pause()
814 set_bit(DW_DMA_IS_PAUSED, &dwc->flags); in dwc_chan_pause()
819 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_pause() local
822 spin_lock_irqsave(&dwc->lock, flags); in dwc_pause()
823 dwc_chan_pause(dwc, false); in dwc_pause()
824 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_pause()
829 static inline void dwc_chan_resume(struct dw_dma_chan *dwc, bool drain) in dwc_chan_resume() argument
831 struct dw_dma *dw = to_dw_dma(dwc->chan.device); in dwc_chan_resume()
833 dw->resume_chan(dwc, drain); in dwc_chan_resume()
835 clear_bit(DW_DMA_IS_PAUSED, &dwc->flags); in dwc_chan_resume()
840 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_resume() local
843 spin_lock_irqsave(&dwc->lock, flags); in dwc_resume()
845 if (test_bit(DW_DMA_IS_PAUSED, &dwc->flags)) in dwc_resume()
846 dwc_chan_resume(dwc, false); in dwc_resume()
848 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_resume()
855 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_terminate_all() local
861 spin_lock_irqsave(&dwc->lock, flags); in dwc_terminate_all()
863 clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); in dwc_terminate_all()
865 dwc_chan_pause(dwc, true); in dwc_terminate_all()
867 dwc_chan_disable(dw, dwc); in dwc_terminate_all()
869 dwc_chan_resume(dwc, true); in dwc_terminate_all()
872 list_splice_init(&dwc->queue, &list); in dwc_terminate_all()
873 list_splice_init(&dwc->active_list, &list); in dwc_terminate_all()
875 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_terminate_all()
879 dwc_descriptor_complete(dwc, desc, false); in dwc_terminate_all()
884 static struct dw_desc *dwc_find_desc(struct dw_dma_chan *dwc, dma_cookie_t c) in dwc_find_desc() argument
888 list_for_each_entry(desc, &dwc->active_list, desc_node) in dwc_find_desc()
895 static u32 dwc_get_residue(struct dw_dma_chan *dwc, dma_cookie_t cookie) in dwc_get_residue() argument
901 spin_lock_irqsave(&dwc->lock, flags); in dwc_get_residue()
903 desc = dwc_find_desc(dwc, cookie); in dwc_get_residue()
905 if (desc == dwc_first_active(dwc)) { in dwc_get_residue()
907 if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags) && residue) in dwc_get_residue()
908 residue -= dwc_get_sent(dwc); in dwc_get_residue()
916 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_get_residue()
925 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_tx_status() local
932 dwc_scan_descriptors(to_dw_dma(chan->device), dwc); in dwc_tx_status()
938 dma_set_residue(txstate, dwc_get_residue(dwc, cookie)); in dwc_tx_status()
940 if (test_bit(DW_DMA_IS_PAUSED, &dwc->flags) && ret == DMA_IN_PROGRESS) in dwc_tx_status()
948 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_issue_pending() local
951 spin_lock_irqsave(&dwc->lock, flags); in dwc_issue_pending()
952 if (list_empty(&dwc->active_list)) in dwc_issue_pending()
953 dwc_dostart_first_queued(dwc); in dwc_issue_pending()
954 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_issue_pending()
980 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_alloc_chan_resources() local
986 if (dma_readl(dw, CH_EN) & dwc->mask) { in dwc_alloc_chan_resources()
1010 dw->in_use |= dwc->mask; in dwc_alloc_chan_resources()
1017 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_free_chan_resources() local
1022 dwc->descs_allocated); in dwc_free_chan_resources()
1025 BUG_ON(!list_empty(&dwc->active_list)); in dwc_free_chan_resources()
1026 BUG_ON(!list_empty(&dwc->queue)); in dwc_free_chan_resources()
1027 BUG_ON(dma_readl(to_dw_dma(chan->device), CH_EN) & dwc->mask); in dwc_free_chan_resources()
1029 spin_lock_irqsave(&dwc->lock, flags); in dwc_free_chan_resources()
1032 memset(&dwc->dws, 0, sizeof(struct dw_dma_slave)); in dwc_free_chan_resources()
1035 channel_clear_bit(dw, MASK.XFER, dwc->mask); in dwc_free_chan_resources()
1036 channel_clear_bit(dw, MASK.BLOCK, dwc->mask); in dwc_free_chan_resources()
1037 channel_clear_bit(dw, MASK.ERROR, dwc->mask); in dwc_free_chan_resources()
1039 spin_unlock_irqrestore(&dwc->lock, flags); in dwc_free_chan_resources()
1042 dw->in_use &= ~dwc->mask; in dwc_free_chan_resources()
1051 struct dw_dma_chan *dwc = to_dw_dma_chan(chan); in dwc_caps() local
1053 caps->max_burst = dwc->max_burst; in dwc_caps()
1062 if (dwc->nollp) in dwc_caps()
1154 struct dw_dma_chan *dwc = &dw->chan[i]; in do_dma_probe() local
1156 dwc->chan.device = &dw->dma; in do_dma_probe()
1157 dma_cookie_init(&dwc->chan); in do_dma_probe()
1159 list_add_tail(&dwc->chan.device_node, in do_dma_probe()
1162 list_add(&dwc->chan.device_node, &dw->dma.channels); in do_dma_probe()
1166 dwc->priority = pdata->nr_channels - i - 1; in do_dma_probe()
1168 dwc->priority = i; in do_dma_probe()
1170 dwc->ch_regs = &__dw_regs(dw)->CHAN[i]; in do_dma_probe()
1171 spin_lock_init(&dwc->lock); in do_dma_probe()
1172 dwc->mask = 1 << i; in do_dma_probe()
1174 INIT_LIST_HEAD(&dwc->active_list); in do_dma_probe()
1175 INIT_LIST_HEAD(&dwc->queue); in do_dma_probe()
1177 channel_clear_bit(dw, CH_EN, dwc->mask); in do_dma_probe()
1179 dwc->direction = DMA_TRANS_NONE; in do_dma_probe()
1195 dwc->block_size = in do_dma_probe()
1205 dwc->nollp = in do_dma_probe()
1208 dwc->max_burst = in do_dma_probe()
1211 dwc->block_size = pdata->block_size; in do_dma_probe()
1212 dwc->nollp = !pdata->multi_block[i]; in do_dma_probe()
1213 dwc->max_burst = pdata->max_burst[i] ?: DW_DMA_MAX_BURST; in do_dma_probe()
1282 struct dw_dma_chan *dwc, *_dwc; in do_dma_remove() local
1292 list_for_each_entry_safe(dwc, _dwc, &dw->dma.channels, in do_dma_remove()
1294 list_del(&dwc->chan.device_node); in do_dma_remove()
1295 channel_clear_bit(dw, CH_EN, dwc->mask); in do_dma_remove()