Lines Matching +full:llp +full:- +full:based
1 // SPDX-License-Identifier: GPL-2.0-or-later
12 #include <dt-bindings/dma/at91.h>
15 #include <linux/dma-mapping.h>
30 * --------
63 /*----------------------------------------------------------------------*/
82 return list_first_entry(&atchan->active_list, in atc_first_active()
88 return list_first_entry(&atchan->queue, in atc_first_queued()
93 * atc_alloc_descriptor - allocate and return an initialized descriptor
97 * Note: The ack-bit is positioned in the descriptor flag at creation time
106 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_descriptor()
109 desc = dma_pool_zalloc(atdma->dma_desc_pool, gfp_flags, &phys); in atc_alloc_descriptor()
111 INIT_LIST_HEAD(&desc->tx_list); in atc_alloc_descriptor()
112 dma_async_tx_descriptor_init(&desc->txd, chan); in atc_alloc_descriptor()
114 desc->txd.flags = DMA_CTRL_ACK; in atc_alloc_descriptor()
115 desc->txd.tx_submit = atc_tx_submit; in atc_alloc_descriptor()
116 desc->txd.phys = phys; in atc_alloc_descriptor()
123 * atc_desc_get - get an unused descriptor from free_list
133 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_get()
134 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_desc_get()
136 if (async_tx_test_ack(&desc->txd)) { in atc_desc_get()
137 list_del(&desc->desc_node); in atc_desc_get()
141 dev_dbg(chan2dev(&atchan->chan_common), in atc_desc_get()
144 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_get()
145 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_get()
150 ret = atc_alloc_descriptor(&atchan->chan_common, GFP_NOWAIT); in atc_desc_get()
156 * atc_desc_put - move a descriptor, including any children, to the free list
166 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_put()
167 list_for_each_entry(child, &desc->tx_list, desc_node) in atc_desc_put()
168 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
171 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_desc_put()
172 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
174 list_add(&desc->desc_node, &atchan->free_list); in atc_desc_put()
175 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_put()
180 * atc_desc_chain - build chain adding a descriptor
194 (*prev)->lli.dscr = desc->txd.phys; in atc_desc_chain()
196 list_add_tail(&desc->desc_node, in atc_desc_chain()
197 &(*first)->tx_list); in atc_desc_chain()
203 * atc_dostart - starts the DMA engine for real
207 * Called with atchan->lock held and bh disabled
211 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_dostart()
215 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
216 "BUG: Attempted to start non-idle channel\n"); in atc_dostart()
217 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
235 channel_writel(atchan, DSCR, first->txd.phys); in atc_dostart()
236 channel_writel(atchan, SPIP, ATC_SPIP_HOLE(first->src_hole) | in atc_dostart()
237 ATC_SPIP_BOUNDARY(first->boundary)); in atc_dostart()
238 channel_writel(atchan, DPIP, ATC_DPIP_HOLE(first->dst_hole) | in atc_dostart()
239 ATC_DPIP_BOUNDARY(first->boundary)); in atc_dostart()
240 dma_writel(atdma, CHER, atchan->mask); in atc_dostart()
246 * atc_get_desc_by_cookie - get the descriptor of a cookie
255 list_for_each_entry_safe(desc, _desc, &atchan->queue, desc_node) { in atc_get_desc_by_cookie()
256 if (desc->txd.cookie == cookie) in atc_get_desc_by_cookie()
260 list_for_each_entry_safe(desc, _desc, &atchan->active_list, desc_node) { in atc_get_desc_by_cookie()
261 if (desc->txd.cookie == cookie) in atc_get_desc_by_cookie()
269 * atc_calc_bytes_left - calculates the number of bytes left according to the
286 return current_len - (btsize << src_width); in atc_calc_bytes_left()
290 * atc_get_bytes_left - get the number of bytes residue for a cookie
309 return -EINVAL; in atc_get_bytes_left()
311 return desc->total_len; in atc_get_bytes_left()
314 ret = desc_first->total_len; in atc_get_bytes_left()
316 if (desc_first->lli.dscr) { in atc_get_bytes_left()
343 * - If the DMA transfer is paused, RX overruns or TX underruns in atc_get_bytes_left()
350 * - The atc_pause() function masks interrupts but we'd rather in atc_get_bytes_left()
397 return -ETIMEDOUT; in atc_get_bytes_left()
400 if (desc_first->lli.dscr == dscr) in atc_get_bytes_left()
403 ret -= desc_first->len; in atc_get_bytes_left()
404 list_for_each_entry(desc, &desc_first->tx_list, desc_node) { in atc_get_bytes_left()
405 if (desc->lli.dscr == dscr) in atc_get_bytes_left()
408 ret -= desc->len; in atc_get_bytes_left()
426 * atc_chain_complete - finish work for one transaction chain
433 struct dma_async_tx_descriptor *txd = &desc->txd; in atc_chain_complete()
434 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_chain_complete()
437 dev_vdbg(chan2dev(&atchan->chan_common), in atc_chain_complete()
438 "descriptor %u complete\n", txd->cookie); in atc_chain_complete()
440 spin_lock_irqsave(&atchan->lock, flags); in atc_chain_complete()
447 if (desc->memset_buffer) { in atc_chain_complete()
448 dma_pool_free(atdma->memset_pool, desc->memset_vaddr, in atc_chain_complete()
449 desc->memset_paddr); in atc_chain_complete()
450 desc->memset_buffer = false; in atc_chain_complete()
454 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_chain_complete()
456 list_move(&desc->desc_node, &atchan->free_list); in atc_chain_complete()
458 spin_unlock_irqrestore(&atchan->lock, flags); in atc_chain_complete()
470 * atc_complete_all - finish work for all transactions
476 * Called with atchan->lock held and bh disabled
484 dev_vdbg(chan2dev(&atchan->chan_common), "complete all\n"); in atc_complete_all()
486 spin_lock_irqsave(&atchan->lock, flags); in atc_complete_all()
492 if (!list_empty(&atchan->queue)) in atc_complete_all()
495 list_splice_init(&atchan->active_list, &list); in atc_complete_all()
497 list_splice_init(&atchan->queue, &atchan->active_list); in atc_complete_all()
499 spin_unlock_irqrestore(&atchan->lock, flags); in atc_complete_all()
506 * atc_advance_work - at the end of a transaction, move forward
514 dev_vdbg(chan2dev(&atchan->chan_common), "advance_work\n"); in atc_advance_work()
516 spin_lock_irqsave(&atchan->lock, flags); in atc_advance_work()
518 spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
522 if (list_empty(&atchan->active_list) || in atc_advance_work()
523 list_is_singular(&atchan->active_list)) in atc_advance_work()
529 spin_lock_irqsave(&atchan->lock, flags); in atc_advance_work()
531 spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
536 * atc_handle_error - handle errors reported by DMA controller
545 spin_lock_irqsave(&atchan->lock, flags); in atc_handle_error()
552 list_del_init(&bad_desc->desc_node); in atc_handle_error()
556 list_splice_init(&atchan->queue, atchan->active_list.prev); in atc_handle_error()
559 if (!list_empty(&atchan->active_list)) in atc_handle_error()
569 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
571 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
572 " cookie: %d\n", bad_desc->txd.cookie); in atc_handle_error()
573 atc_dump_lli(atchan, &bad_desc->lli); in atc_handle_error()
574 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in atc_handle_error()
575 atc_dump_lli(atchan, &child->lli); in atc_handle_error()
577 spin_unlock_irqrestore(&atchan->lock, flags); in atc_handle_error()
584 * atc_handle_cyclic - at the end of a period, run callback function
590 struct dma_async_tx_descriptor *txd = &first->txd; in atc_handle_cyclic()
592 dev_vdbg(chan2dev(&atchan->chan_common), in atc_handle_cyclic()
593 "new cyclic period llp 0x%08x\n", in atc_handle_cyclic()
599 /*-- IRQ & Tasklet ---------------------------------------------------*/
605 if (test_and_clear_bit(ATC_IS_ERROR, &atchan->status)) in atc_tasklet()
630 dev_vdbg(atdma->dma_common.dev, in at_dma_interrupt()
634 for (i = 0; i < atdma->dma_common.chancnt; i++) { in at_dma_interrupt()
635 atchan = &atdma->chan[i]; in at_dma_interrupt()
640 AT_DMA_RES(i) | atchan->mask); in at_dma_interrupt()
642 set_bit(ATC_IS_ERROR, &atchan->status); in at_dma_interrupt()
644 tasklet_schedule(&atchan->tasklet); in at_dma_interrupt()
655 /*-- DMA Engine API --------------------------------------------------*/
658 * atc_tx_submit - set the prepared descriptor(s) to be executed by the engine
668 struct at_dma_chan *atchan = to_at_dma_chan(tx->chan); in atc_tx_submit()
672 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_submit()
675 if (list_empty(&atchan->active_list)) { in atc_tx_submit()
676 dev_vdbg(chan2dev(tx->chan), "tx_submit: started %u\n", in atc_tx_submit()
677 desc->txd.cookie); in atc_tx_submit()
679 list_add_tail(&desc->desc_node, &atchan->active_list); in atc_tx_submit()
681 dev_vdbg(chan2dev(tx->chan), "tx_submit: queued %u\n", in atc_tx_submit()
682 desc->txd.cookie); in atc_tx_submit()
683 list_add_tail(&desc->desc_node, &atchan->queue); in atc_tx_submit()
686 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_submit()
692 * atc_prep_dma_interleaved - prepare memory to memory interleaved operation
712 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved()
715 first = xt->sgl; in atc_prep_dma_interleaved()
719 __func__, &xt->src_start, &xt->dst_start, xt->numf, in atc_prep_dma_interleaved()
720 xt->frame_size, flags); in atc_prep_dma_interleaved()
728 for (i = 0; i < xt->frame_size; i++) { in atc_prep_dma_interleaved()
729 struct data_chunk *chunk = xt->sgl + i; in atc_prep_dma_interleaved()
731 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved()
740 len += chunk->size; in atc_prep_dma_interleaved()
743 dwidth = atc_get_xfer_width(xt->src_start, in atc_prep_dma_interleaved()
744 xt->dst_start, len); in atc_prep_dma_interleaved()
770 desc->lli.saddr = xt->src_start; in atc_prep_dma_interleaved()
771 desc->lli.daddr = xt->dst_start; in atc_prep_dma_interleaved()
772 desc->lli.ctrla = ctrla | xfer_count; in atc_prep_dma_interleaved()
773 desc->lli.ctrlb = ctrlb; in atc_prep_dma_interleaved()
775 desc->boundary = first->size >> dwidth; in atc_prep_dma_interleaved()
776 desc->dst_hole = (dmaengine_get_dst_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
777 desc->src_hole = (dmaengine_get_src_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
779 desc->txd.cookie = -EBUSY; in atc_prep_dma_interleaved()
780 desc->total_len = desc->len = len; in atc_prep_dma_interleaved()
782 /* set end-of-link to the last link descriptor of list*/ in atc_prep_dma_interleaved()
785 desc->txd.flags = flags; /* client is in control of this ack */ in atc_prep_dma_interleaved()
787 return &desc->txd; in atc_prep_dma_interleaved()
791 * atc_prep_dma_memcpy - prepare a memcpy operation
836 xfer_count = min_t(size_t, (len - offset) >> src_width, in atc_prep_dma_memcpy()
843 desc->lli.saddr = src + offset; in atc_prep_dma_memcpy()
844 desc->lli.daddr = dest + offset; in atc_prep_dma_memcpy()
845 desc->lli.ctrla = ctrla | xfer_count; in atc_prep_dma_memcpy()
846 desc->lli.ctrlb = ctrlb; in atc_prep_dma_memcpy()
848 desc->txd.cookie = 0; in atc_prep_dma_memcpy()
849 desc->len = xfer_count << src_width; in atc_prep_dma_memcpy()
855 first->txd.cookie = -EBUSY; in atc_prep_dma_memcpy()
856 first->total_len = len; in atc_prep_dma_memcpy()
858 /* set end-of-link to the last link descriptor of list*/ in atc_prep_dma_memcpy()
861 first->txd.flags = flags; /* client is in control of this ack */ in atc_prep_dma_memcpy()
863 return &first->txd; in atc_prep_dma_memcpy()
899 desc->lli.saddr = psrc; in atc_create_memset_desc()
900 desc->lli.daddr = pdst; in atc_create_memset_desc()
901 desc->lli.ctrla = ctrla | xfer_count; in atc_create_memset_desc()
902 desc->lli.ctrlb = ctrlb; in atc_create_memset_desc()
904 desc->txd.cookie = 0; in atc_create_memset_desc()
905 desc->len = len; in atc_create_memset_desc()
911 * atc_prep_dma_memset - prepare a memcpy operation
922 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset()
935 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset()
941 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset()
956 desc->memset_paddr = paddr; in atc_prep_dma_memset()
957 desc->memset_vaddr = vaddr; in atc_prep_dma_memset()
958 desc->memset_buffer = true; in atc_prep_dma_memset()
960 desc->txd.cookie = -EBUSY; in atc_prep_dma_memset()
961 desc->total_len = len; in atc_prep_dma_memset()
963 /* set end-of-link on the descriptor */ in atc_prep_dma_memset()
966 desc->txd.flags = flags; in atc_prep_dma_memset()
968 return &desc->txd; in atc_prep_dma_memset()
971 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset()
982 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset_sg()
999 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset_sg()
1014 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset_sg()
1033 desc->memset_paddr = paddr; in atc_prep_dma_memset_sg()
1034 desc->memset_vaddr = vaddr; in atc_prep_dma_memset_sg()
1035 desc->memset_buffer = true; in atc_prep_dma_memset_sg()
1037 first->txd.cookie = -EBUSY; in atc_prep_dma_memset_sg()
1038 first->total_len = total_len; in atc_prep_dma_memset_sg()
1040 /* set end-of-link on the descriptor */ in atc_prep_dma_memset_sg()
1043 first->txd.flags = flags; in atc_prep_dma_memset_sg()
1045 return &first->txd; in atc_prep_dma_memset_sg()
1053 * atc_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
1067 struct at_dma_slave *atslave = chan->private; in atc_prep_slave_sg()
1068 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_slave_sg()
1090 ctrla = ATC_SCSIZE(sconfig->src_maxburst) in atc_prep_slave_sg()
1091 | ATC_DCSIZE(sconfig->dst_maxburst); in atc_prep_slave_sg()
1096 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_slave_sg()
1101 | ATC_SIF(atchan->mem_if) | ATC_DIF(atchan->per_if); in atc_prep_slave_sg()
1102 reg = sconfig->dst_addr; in atc_prep_slave_sg()
1123 desc->lli.saddr = mem; in atc_prep_slave_sg()
1124 desc->lli.daddr = reg; in atc_prep_slave_sg()
1125 desc->lli.ctrla = ctrla in atc_prep_slave_sg()
1128 desc->lli.ctrlb = ctrlb; in atc_prep_slave_sg()
1129 desc->len = len; in atc_prep_slave_sg()
1136 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_slave_sg()
1141 | ATC_SIF(atchan->per_if) | ATC_DIF(atchan->mem_if); in atc_prep_slave_sg()
1143 reg = sconfig->src_addr; in atc_prep_slave_sg()
1164 desc->lli.saddr = reg; in atc_prep_slave_sg()
1165 desc->lli.daddr = mem; in atc_prep_slave_sg()
1166 desc->lli.ctrla = ctrla in atc_prep_slave_sg()
1169 desc->lli.ctrlb = ctrlb; in atc_prep_slave_sg()
1170 desc->len = len; in atc_prep_slave_sg()
1180 /* set end-of-link to the last link descriptor of list*/ in atc_prep_slave_sg()
1184 first->txd.cookie = -EBUSY; in atc_prep_slave_sg()
1185 first->total_len = total_len; in atc_prep_slave_sg()
1188 first->txd.flags = flags; /* client is in control of this ack */ in atc_prep_slave_sg()
1190 return &first->txd; in atc_prep_slave_sg()
1209 if (unlikely(period_len & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1211 if (unlikely(buf_addr & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1217 return -EINVAL; in atc_dma_cyclic_check_values()
1221 * atc_dma_cyclic_fill_desc - Fill one period descriptor
1230 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_dma_cyclic_fill_desc()
1234 ctrla = ATC_SCSIZE(sconfig->src_maxburst) in atc_dma_cyclic_fill_desc()
1235 | ATC_DCSIZE(sconfig->dst_maxburst) in atc_dma_cyclic_fill_desc()
1242 desc->lli.saddr = buf_addr + (period_len * period_index); in atc_dma_cyclic_fill_desc()
1243 desc->lli.daddr = sconfig->dst_addr; in atc_dma_cyclic_fill_desc()
1244 desc->lli.ctrla = ctrla; in atc_dma_cyclic_fill_desc()
1245 desc->lli.ctrlb = ATC_DST_ADDR_MODE_FIXED in atc_dma_cyclic_fill_desc()
1248 | ATC_SIF(atchan->mem_if) in atc_dma_cyclic_fill_desc()
1249 | ATC_DIF(atchan->per_if); in atc_dma_cyclic_fill_desc()
1250 desc->len = period_len; in atc_dma_cyclic_fill_desc()
1254 desc->lli.saddr = sconfig->src_addr; in atc_dma_cyclic_fill_desc()
1255 desc->lli.daddr = buf_addr + (period_len * period_index); in atc_dma_cyclic_fill_desc()
1256 desc->lli.ctrla = ctrla; in atc_dma_cyclic_fill_desc()
1257 desc->lli.ctrlb = ATC_DST_ADDR_MODE_INCR in atc_dma_cyclic_fill_desc()
1260 | ATC_SIF(atchan->per_if) in atc_dma_cyclic_fill_desc()
1261 | ATC_DIF(atchan->mem_if); in atc_dma_cyclic_fill_desc()
1262 desc->len = period_len; in atc_dma_cyclic_fill_desc()
1266 return -EINVAL; in atc_dma_cyclic_fill_desc()
1273 * atc_prep_dma_cyclic - prepare the cyclic DMA transfer
1287 struct at_dma_slave *atslave = chan->private; in atc_prep_dma_cyclic()
1288 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_dma_cyclic()
1296 dev_vdbg(chan2dev(chan), "prep_dma_cyclic: %s buf@%pad - %d (%d/%d)\n", in atc_prep_dma_cyclic()
1306 was_cyclic = test_and_set_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1316 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_dma_cyclic()
1318 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_dma_cyclic()
1340 prev->lli.dscr = first->txd.phys; in atc_prep_dma_cyclic()
1343 first->txd.cookie = -EBUSY; in atc_prep_dma_cyclic()
1344 first->total_len = buf_len; in atc_prep_dma_cyclic()
1346 return &first->txd; in atc_prep_dma_cyclic()
1352 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1364 if (!chan->private) in atc_config()
1365 return -EINVAL; in atc_config()
1367 memcpy(&atchan->dma_sconfig, sconfig, sizeof(*sconfig)); in atc_config()
1369 convert_burst(&atchan->dma_sconfig.src_maxburst); in atc_config()
1370 convert_burst(&atchan->dma_sconfig.dst_maxburst); in atc_config()
1378 struct at_dma *atdma = to_at_dma(chan->device); in atc_pause()
1379 int chan_id = atchan->chan_common.chan_id; in atc_pause()
1384 spin_lock_irqsave(&atchan->lock, flags); in atc_pause()
1387 set_bit(ATC_IS_PAUSED, &atchan->status); in atc_pause()
1389 spin_unlock_irqrestore(&atchan->lock, flags); in atc_pause()
1397 struct at_dma *atdma = to_at_dma(chan->device); in atc_resume()
1398 int chan_id = atchan->chan_common.chan_id; in atc_resume()
1406 spin_lock_irqsave(&atchan->lock, flags); in atc_resume()
1409 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_resume()
1411 spin_unlock_irqrestore(&atchan->lock, flags); in atc_resume()
1419 struct at_dma *atdma = to_at_dma(chan->device); in atc_terminate_all()
1420 int chan_id = atchan->chan_common.chan_id; in atc_terminate_all()
1434 spin_lock_irqsave(&atchan->lock, flags); in atc_terminate_all()
1437 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask); in atc_terminate_all()
1440 while (dma_readl(atdma, CHSR) & atchan->mask) in atc_terminate_all()
1444 list_splice_init(&atchan->queue, &list); in atc_terminate_all()
1445 list_splice_init(&atchan->active_list, &list); in atc_terminate_all()
1447 spin_unlock_irqrestore(&atchan->lock, flags); in atc_terminate_all()
1453 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_terminate_all()
1455 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_terminate_all()
1461 * atc_tx_status - poll for transaction completion
1468 * the status of multiple cookies without re-checking hardware state.
1490 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_status()
1495 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_status()
1511 * atc_issue_pending - try to finish work
1528 * atc_alloc_chan_resources - allocate resources for DMA channel
1531 * return - the number of allocated descriptors
1536 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_chan_resources()
1547 return -EIO; in atc_alloc_chan_resources()
1550 if (!list_empty(&atchan->free_list)) { in atc_alloc_chan_resources()
1552 return -EIO; in atc_alloc_chan_resources()
1557 atslave = chan->private; in atc_alloc_chan_resources()
1560 * We need controller-specific data to set up slave in atc_alloc_chan_resources()
1563 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_common.dev); in atc_alloc_chan_resources()
1566 if (atslave->cfg) in atc_alloc_chan_resources()
1567 cfg = atslave->cfg; in atc_alloc_chan_resources()
1574 dev_err(atdma->dma_common.dev, in atc_alloc_chan_resources()
1578 list_add_tail(&desc->desc_node, &atchan->free_list); in atc_alloc_chan_resources()
1593 * atc_free_chan_resources - free all channel resources
1599 struct at_dma *atdma = to_at_dma(chan->device); in atc_free_chan_resources()
1604 BUG_ON(!list_empty(&atchan->active_list)); in atc_free_chan_resources()
1605 BUG_ON(!list_empty(&atchan->queue)); in atc_free_chan_resources()
1608 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_free_chan_resources()
1610 list_del(&desc->desc_node); in atc_free_chan_resources()
1612 dma_pool_free(atdma->dma_desc_pool, desc, desc->txd.phys); in atc_free_chan_resources()
1614 list_splice_init(&atchan->free_list, &list); in atc_free_chan_resources()
1615 atchan->status = 0; in atc_free_chan_resources()
1620 kfree(chan->private); in atc_free_chan_resources()
1621 chan->private = NULL; in atc_free_chan_resources()
1631 if (atslave->dma_dev == chan->device->dev) { in at_dma_filter()
1632 chan->private = atslave; in at_dma_filter()
1649 if (dma_spec->args_count != 2) in at_dma_xlate()
1652 dmac_pdev = of_find_device_by_node(dma_spec->np); in at_dma_xlate()
1661 put_device(&dmac_pdev->dev); in at_dma_xlate()
1665 atslave->cfg = ATC_DST_H2SEL_HW | ATC_SRC_H2SEL_HW; in at_dma_xlate()
1670 per_id = dma_spec->args[1] & AT91_DMA_CFG_PER_ID_MASK; in at_dma_xlate()
1671 atslave->cfg |= ATC_DST_PER_MSB(per_id) | ATC_DST_PER(per_id) in at_dma_xlate()
1678 switch (dma_spec->args[1] & AT91_DMA_CFG_FIFOCFG_MASK) { in at_dma_xlate()
1680 atslave->cfg |= ATC_FIFOCFG_LARGESTBURST; in at_dma_xlate()
1683 atslave->cfg |= ATC_FIFOCFG_ENOUGHSPACE; in at_dma_xlate()
1687 atslave->cfg |= ATC_FIFOCFG_HALFFIFO; in at_dma_xlate()
1689 atslave->dma_dev = &dmac_pdev->dev; in at_dma_xlate()
1693 put_device(&dmac_pdev->dev); in at_dma_xlate()
1699 atchan->per_if = dma_spec->args[0] & 0xff; in at_dma_xlate()
1700 atchan->mem_if = (dma_spec->args[0] >> 16) & 0xff; in at_dma_xlate()
1712 /*-- Module Management -----------------------------------------------*/
1714 /* cap_mask is a multi-u32 bitfield, fill it with proper C code. */
1725 .compatible = "atmel,at91sam9rl-dma",
1728 .compatible = "atmel,at91sam9g45-dma",
1753 if (pdev->dev.of_node) { in at_dma_get_driver_data()
1755 match = of_match_node(atmel_dma_dt_ids, pdev->dev.of_node); in at_dma_get_driver_data()
1758 return match->data; in at_dma_get_driver_data()
1761 platform_get_device_id(pdev)->driver_data; in at_dma_get_driver_data()
1765 * at_dma_off - disable DMA controller
1773 dma_writel(atdma, EBCIDR, -1L); in at_dma_off()
1776 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask) in at_dma_off()
1802 return -ENODEV; in at_dma_probe()
1806 return -EINVAL; in at_dma_probe()
1813 size += plat_dat->nr_channels * sizeof(struct at_dma_chan); in at_dma_probe()
1816 return -ENOMEM; in at_dma_probe()
1819 atdma->dma_common.cap_mask = plat_dat->cap_mask; in at_dma_probe()
1820 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1; in at_dma_probe()
1823 if (!request_mem_region(io->start, size, pdev->dev.driver->name)) { in at_dma_probe()
1824 err = -EBUSY; in at_dma_probe()
1828 atdma->regs = ioremap(io->start, size); in at_dma_probe()
1829 if (!atdma->regs) { in at_dma_probe()
1830 err = -ENOMEM; in at_dma_probe()
1834 atdma->clk = clk_get(&pdev->dev, "dma_clk"); in at_dma_probe()
1835 if (IS_ERR(atdma->clk)) { in at_dma_probe()
1836 err = PTR_ERR(atdma->clk); in at_dma_probe()
1839 err = clk_prepare_enable(atdma->clk); in at_dma_probe()
1853 atdma->dma_desc_pool = dma_pool_create("at_hdmac_desc_pool", in at_dma_probe()
1854 &pdev->dev, sizeof(struct at_desc), in at_dma_probe()
1856 if (!atdma->dma_desc_pool) { in at_dma_probe()
1857 dev_err(&pdev->dev, "No memory for descriptors dma pool\n"); in at_dma_probe()
1858 err = -ENOMEM; in at_dma_probe()
1863 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool", in at_dma_probe()
1864 &pdev->dev, sizeof(int), 4, 0); in at_dma_probe()
1865 if (!atdma->memset_pool) { in at_dma_probe()
1866 dev_err(&pdev->dev, "No memory for memset dma pool\n"); in at_dma_probe()
1867 err = -ENOMEM; in at_dma_probe()
1876 INIT_LIST_HEAD(&atdma->dma_common.channels); in at_dma_probe()
1877 for (i = 0; i < plat_dat->nr_channels; i++) { in at_dma_probe()
1878 struct at_dma_chan *atchan = &atdma->chan[i]; in at_dma_probe()
1880 atchan->mem_if = AT_DMA_MEM_IF; in at_dma_probe()
1881 atchan->per_if = AT_DMA_PER_IF; in at_dma_probe()
1882 atchan->chan_common.device = &atdma->dma_common; in at_dma_probe()
1883 dma_cookie_init(&atchan->chan_common); in at_dma_probe()
1884 list_add_tail(&atchan->chan_common.device_node, in at_dma_probe()
1885 &atdma->dma_common.channels); in at_dma_probe()
1887 atchan->ch_regs = atdma->regs + ch_regs(i); in at_dma_probe()
1888 spin_lock_init(&atchan->lock); in at_dma_probe()
1889 atchan->mask = 1 << i; in at_dma_probe()
1891 INIT_LIST_HEAD(&atchan->active_list); in at_dma_probe()
1892 INIT_LIST_HEAD(&atchan->queue); in at_dma_probe()
1893 INIT_LIST_HEAD(&atchan->free_list); in at_dma_probe()
1895 tasklet_setup(&atchan->tasklet, atc_tasklet); in at_dma_probe()
1900 atdma->dma_common.device_alloc_chan_resources = atc_alloc_chan_resources; in at_dma_probe()
1901 atdma->dma_common.device_free_chan_resources = atc_free_chan_resources; in at_dma_probe()
1902 atdma->dma_common.device_tx_status = atc_tx_status; in at_dma_probe()
1903 atdma->dma_common.device_issue_pending = atc_issue_pending; in at_dma_probe()
1904 atdma->dma_common.dev = &pdev->dev; in at_dma_probe()
1906 /* set prep routines based on capability */ in at_dma_probe()
1907 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_common.cap_mask)) in at_dma_probe()
1908 atdma->dma_common.device_prep_interleaved_dma = atc_prep_dma_interleaved; in at_dma_probe()
1910 if (dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask)) in at_dma_probe()
1911 atdma->dma_common.device_prep_dma_memcpy = atc_prep_dma_memcpy; in at_dma_probe()
1913 if (dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask)) { in at_dma_probe()
1914 atdma->dma_common.device_prep_dma_memset = atc_prep_dma_memset; in at_dma_probe()
1915 atdma->dma_common.device_prep_dma_memset_sg = atc_prep_dma_memset_sg; in at_dma_probe()
1916 atdma->dma_common.fill_align = DMAENGINE_ALIGN_4_BYTES; in at_dma_probe()
1919 if (dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask)) { in at_dma_probe()
1920 atdma->dma_common.device_prep_slave_sg = atc_prep_slave_sg; in at_dma_probe()
1922 dma_cap_set(DMA_CYCLIC, atdma->dma_common.cap_mask); in at_dma_probe()
1923 atdma->dma_common.device_prep_dma_cyclic = atc_prep_dma_cyclic; in at_dma_probe()
1924 atdma->dma_common.device_config = atc_config; in at_dma_probe()
1925 atdma->dma_common.device_pause = atc_pause; in at_dma_probe()
1926 atdma->dma_common.device_resume = atc_resume; in at_dma_probe()
1927 atdma->dma_common.device_terminate_all = atc_terminate_all; in at_dma_probe()
1928 atdma->dma_common.src_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
1929 atdma->dma_common.dst_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
1930 atdma->dma_common.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in at_dma_probe()
1931 atdma->dma_common.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in at_dma_probe()
1936 dev_info(&pdev->dev, "Atmel AHB DMA Controller ( %s%s%s), %d channels\n", in at_dma_probe()
1937 dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask) ? "cpy " : "", in at_dma_probe()
1938 dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask) ? "set " : "", in at_dma_probe()
1939 dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask) ? "slave " : "", in at_dma_probe()
1940 plat_dat->nr_channels); in at_dma_probe()
1942 dma_async_device_register(&atdma->dma_common); in at_dma_probe()
1949 if (pdev->dev.of_node) { in at_dma_probe()
1950 err = of_dma_controller_register(pdev->dev.of_node, in at_dma_probe()
1953 dev_err(&pdev->dev, "could not register of_dma_controller\n"); in at_dma_probe()
1961 dma_async_device_unregister(&atdma->dma_common); in at_dma_probe()
1962 dma_pool_destroy(atdma->memset_pool); in at_dma_probe()
1964 dma_pool_destroy(atdma->dma_desc_pool); in at_dma_probe()
1968 clk_disable_unprepare(atdma->clk); in at_dma_probe()
1970 clk_put(atdma->clk); in at_dma_probe()
1972 iounmap(atdma->regs); in at_dma_probe()
1973 atdma->regs = NULL; in at_dma_probe()
1975 release_mem_region(io->start, size); in at_dma_probe()
1988 if (pdev->dev.of_node) in at_dma_remove()
1989 of_dma_controller_free(pdev->dev.of_node); in at_dma_remove()
1990 dma_async_device_unregister(&atdma->dma_common); in at_dma_remove()
1992 dma_pool_destroy(atdma->memset_pool); in at_dma_remove()
1993 dma_pool_destroy(atdma->dma_desc_pool); in at_dma_remove()
1996 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_remove()
2001 atc_disable_chan_irq(atdma, chan->chan_id); in at_dma_remove()
2003 tasklet_kill(&atchan->tasklet); in at_dma_remove()
2004 list_del(&chan->device_node); in at_dma_remove()
2007 clk_disable_unprepare(atdma->clk); in at_dma_remove()
2008 clk_put(atdma->clk); in at_dma_remove()
2010 iounmap(atdma->regs); in at_dma_remove()
2011 atdma->regs = NULL; in at_dma_remove()
2014 release_mem_region(io->start, resource_size(io)); in at_dma_remove()
2026 clk_disable_unprepare(atdma->clk); in at_dma_shutdown()
2034 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_prepare()
2039 return -EAGAIN; in at_dma_prepare()
2046 struct dma_chan *chan = &atchan->chan_common; in atc_suspend_cyclic()
2058 atchan->save_dscr = channel_readl(atchan, DSCR); in atc_suspend_cyclic()
2069 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_suspend_noirq()
2075 atchan->save_cfg = channel_readl(atchan, CFG); in at_dma_suspend_noirq()
2077 atdma->save_imr = dma_readl(atdma, EBCIMR); in at_dma_suspend_noirq()
2081 clk_disable_unprepare(atdma->clk); in at_dma_suspend_noirq()
2087 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_resume_cyclic()
2095 channel_writel(atchan, DSCR, atchan->save_dscr); in atc_resume_cyclic()
2096 dma_writel(atdma, CHER, atchan->mask); in atc_resume_cyclic()
2110 clk_prepare_enable(atdma->clk); in at_dma_resume_noirq()
2118 dma_writel(atdma, EBCIER, atdma->save_imr); in at_dma_resume_noirq()
2119 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_resume_noirq()
2123 channel_writel(atchan, CFG, atchan->save_cfg); in at_dma_resume_noirq()