• Home
  • Raw
  • Download

Lines Matching +full:llp +full:- +full:based

17 #include <dt-bindings/dma/at91.h>
20 #include <linux/dma-mapping.h>
35 * --------
68 /*----------------------------------------------------------------------*/
87 return list_first_entry(&atchan->active_list, in atc_first_active()
93 return list_first_entry(&atchan->queue, in atc_first_queued()
98 * atc_alloc_descriptor - allocate and return an initialized descriptor
102 * Note: The ack-bit is positioned in the descriptor flag at creation time
111 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_descriptor()
114 desc = dma_pool_zalloc(atdma->dma_desc_pool, gfp_flags, &phys); in atc_alloc_descriptor()
116 INIT_LIST_HEAD(&desc->tx_list); in atc_alloc_descriptor()
117 dma_async_tx_descriptor_init(&desc->txd, chan); in atc_alloc_descriptor()
119 desc->txd.flags = DMA_CTRL_ACK; in atc_alloc_descriptor()
120 desc->txd.tx_submit = atc_tx_submit; in atc_alloc_descriptor()
121 desc->txd.phys = phys; in atc_alloc_descriptor()
128 * atc_desc_get - get an unused descriptor from free_list
139 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_get()
140 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_desc_get()
142 if (async_tx_test_ack(&desc->txd)) { in atc_desc_get()
143 list_del(&desc->desc_node); in atc_desc_get()
147 dev_dbg(chan2dev(&atchan->chan_common), in atc_desc_get()
150 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_get()
151 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_get()
156 ret = atc_alloc_descriptor(&atchan->chan_common, GFP_ATOMIC); in atc_desc_get()
158 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_get()
159 atchan->descs_allocated++; in atc_desc_get()
160 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_get()
162 dev_err(chan2dev(&atchan->chan_common), in atc_desc_get()
171 * atc_desc_put - move a descriptor, including any children, to the free list
181 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_put()
182 list_for_each_entry(child, &desc->tx_list, desc_node) in atc_desc_put()
183 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
186 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_desc_put()
187 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
189 list_add(&desc->desc_node, &atchan->free_list); in atc_desc_put()
190 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_put()
195 * atc_desc_chain - build chain adding a descriptor
209 (*prev)->lli.dscr = desc->txd.phys; in atc_desc_chain()
211 list_add_tail(&desc->desc_node, in atc_desc_chain()
212 &(*first)->tx_list); in atc_desc_chain()
218 * atc_dostart - starts the DMA engine for real
222 * Called with atchan->lock held and bh disabled
226 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_dostart()
230 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
231 "BUG: Attempted to start non-idle channel\n"); in atc_dostart()
232 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
250 channel_writel(atchan, DSCR, first->txd.phys); in atc_dostart()
251 channel_writel(atchan, SPIP, ATC_SPIP_HOLE(first->src_hole) | in atc_dostart()
252 ATC_SPIP_BOUNDARY(first->boundary)); in atc_dostart()
253 channel_writel(atchan, DPIP, ATC_DPIP_HOLE(first->dst_hole) | in atc_dostart()
254 ATC_DPIP_BOUNDARY(first->boundary)); in atc_dostart()
255 dma_writel(atdma, CHER, atchan->mask); in atc_dostart()
261 * atc_get_desc_by_cookie - get the descriptor of a cookie
270 list_for_each_entry_safe(desc, _desc, &atchan->queue, desc_node) { in atc_get_desc_by_cookie()
271 if (desc->txd.cookie == cookie) in atc_get_desc_by_cookie()
275 list_for_each_entry_safe(desc, _desc, &atchan->active_list, desc_node) { in atc_get_desc_by_cookie()
276 if (desc->txd.cookie == cookie) in atc_get_desc_by_cookie()
284 * atc_calc_bytes_left - calculates the number of bytes left according to the
301 return current_len - (btsize << src_width); in atc_calc_bytes_left()
305 * atc_get_bytes_left - get the number of bytes residue for a cookie
324 return -EINVAL; in atc_get_bytes_left()
326 return desc->total_len; in atc_get_bytes_left()
329 ret = desc_first->total_len; in atc_get_bytes_left()
331 if (desc_first->lli.dscr) { in atc_get_bytes_left()
358 * - If the DMA transfer is paused, RX overruns or TX underruns in atc_get_bytes_left()
365 * - The atc_pause() function masks interrupts but we'd rather in atc_get_bytes_left()
412 return -ETIMEDOUT; in atc_get_bytes_left()
415 if (desc_first->lli.dscr == dscr) in atc_get_bytes_left()
418 ret -= desc_first->len; in atc_get_bytes_left()
419 list_for_each_entry(desc, &desc_first->tx_list, desc_node) { in atc_get_bytes_left()
420 if (desc->lli.dscr == dscr) in atc_get_bytes_left()
423 ret -= desc->len; in atc_get_bytes_left()
441 * atc_chain_complete - finish work for one transaction chain
445 * Called with atchan->lock held and bh disabled */
449 struct dma_async_tx_descriptor *txd = &desc->txd; in atc_chain_complete()
450 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_chain_complete()
452 dev_vdbg(chan2dev(&atchan->chan_common), in atc_chain_complete()
453 "descriptor %u complete\n", txd->cookie); in atc_chain_complete()
460 if (desc->memset_buffer) { in atc_chain_complete()
461 dma_pool_free(atdma->memset_pool, desc->memset_vaddr, in atc_chain_complete()
462 desc->memset_paddr); in atc_chain_complete()
463 desc->memset_buffer = false; in atc_chain_complete()
467 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_chain_complete()
469 list_move(&desc->desc_node, &atchan->free_list); in atc_chain_complete()
486 * atc_complete_all - finish work for all transactions
492 * Called with atchan->lock held and bh disabled
499 dev_vdbg(chan2dev(&atchan->chan_common), "complete all\n"); in atc_complete_all()
505 if (!list_empty(&atchan->queue)) in atc_complete_all()
508 list_splice_init(&atchan->active_list, &list); in atc_complete_all()
510 list_splice_init(&atchan->queue, &atchan->active_list); in atc_complete_all()
517 * atc_advance_work - at the end of a transaction, move forward
520 * Called with atchan->lock held and bh disabled
524 dev_vdbg(chan2dev(&atchan->chan_common), "advance_work\n"); in atc_advance_work()
529 if (list_empty(&atchan->active_list) || in atc_advance_work()
530 list_is_singular(&atchan->active_list)) { in atc_advance_work()
541 * atc_handle_error - handle errors reported by DMA controller
544 * Called with atchan->lock held and bh disabled
557 list_del_init(&bad_desc->desc_node); in atc_handle_error()
561 list_splice_init(&atchan->queue, atchan->active_list.prev); in atc_handle_error()
564 if (!list_empty(&atchan->active_list)) in atc_handle_error()
574 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
576 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
577 " cookie: %d\n", bad_desc->txd.cookie); in atc_handle_error()
578 atc_dump_lli(atchan, &bad_desc->lli); in atc_handle_error()
579 list_for_each_entry(child, &bad_desc->tx_list, desc_node) in atc_handle_error()
580 atc_dump_lli(atchan, &child->lli); in atc_handle_error()
587 * atc_handle_cyclic - at the end of a period, run callback function
590 * Called with atchan->lock held and bh disabled
595 struct dma_async_tx_descriptor *txd = &first->txd; in atc_handle_cyclic()
597 dev_vdbg(chan2dev(&atchan->chan_common), in atc_handle_cyclic()
598 "new cyclic period llp 0x%08x\n", in atc_handle_cyclic()
604 /*-- IRQ & Tasklet ---------------------------------------------------*/
611 spin_lock_irqsave(&atchan->lock, flags); in atc_tasklet()
612 if (test_and_clear_bit(ATC_IS_ERROR, &atchan->status)) in atc_tasklet()
619 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tasklet()
638 dev_vdbg(atdma->dma_common.dev, in at_dma_interrupt()
642 for (i = 0; i < atdma->dma_common.chancnt; i++) { in at_dma_interrupt()
643 atchan = &atdma->chan[i]; in at_dma_interrupt()
648 AT_DMA_RES(i) | atchan->mask); in at_dma_interrupt()
650 set_bit(ATC_IS_ERROR, &atchan->status); in at_dma_interrupt()
652 tasklet_schedule(&atchan->tasklet); in at_dma_interrupt()
663 /*-- DMA Engine API --------------------------------------------------*/
666 * atc_tx_submit - set the prepared descriptor(s) to be executed by the engine
676 struct at_dma_chan *atchan = to_at_dma_chan(tx->chan); in atc_tx_submit()
680 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_submit()
683 if (list_empty(&atchan->active_list)) { in atc_tx_submit()
684 dev_vdbg(chan2dev(tx->chan), "tx_submit: started %u\n", in atc_tx_submit()
685 desc->txd.cookie); in atc_tx_submit()
687 list_add_tail(&desc->desc_node, &atchan->active_list); in atc_tx_submit()
689 dev_vdbg(chan2dev(tx->chan), "tx_submit: queued %u\n", in atc_tx_submit()
690 desc->txd.cookie); in atc_tx_submit()
691 list_add_tail(&desc->desc_node, &atchan->queue); in atc_tx_submit()
694 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_submit()
700 * atc_prep_dma_interleaved - prepare memory to memory interleaved operation
720 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved()
723 first = xt->sgl; in atc_prep_dma_interleaved()
727 __func__, &xt->src_start, &xt->dst_start, xt->numf, in atc_prep_dma_interleaved()
728 xt->frame_size, flags); in atc_prep_dma_interleaved()
736 for (i = 0; i < xt->frame_size; i++) { in atc_prep_dma_interleaved()
737 struct data_chunk *chunk = xt->sgl + i; in atc_prep_dma_interleaved()
739 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved()
748 len += chunk->size; in atc_prep_dma_interleaved()
751 dwidth = atc_get_xfer_width(xt->src_start, in atc_prep_dma_interleaved()
752 xt->dst_start, len); in atc_prep_dma_interleaved()
778 desc->lli.saddr = xt->src_start; in atc_prep_dma_interleaved()
779 desc->lli.daddr = xt->dst_start; in atc_prep_dma_interleaved()
780 desc->lli.ctrla = ctrla | xfer_count; in atc_prep_dma_interleaved()
781 desc->lli.ctrlb = ctrlb; in atc_prep_dma_interleaved()
783 desc->boundary = first->size >> dwidth; in atc_prep_dma_interleaved()
784 desc->dst_hole = (dmaengine_get_dst_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
785 desc->src_hole = (dmaengine_get_src_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
787 desc->txd.cookie = -EBUSY; in atc_prep_dma_interleaved()
788 desc->total_len = desc->len = len; in atc_prep_dma_interleaved()
790 /* set end-of-link to the last link descriptor of list*/ in atc_prep_dma_interleaved()
793 desc->txd.flags = flags; /* client is in control of this ack */ in atc_prep_dma_interleaved()
795 return &desc->txd; in atc_prep_dma_interleaved()
799 * atc_prep_dma_memcpy - prepare a memcpy operation
844 xfer_count = min_t(size_t, (len - offset) >> src_width, in atc_prep_dma_memcpy()
851 desc->lli.saddr = src + offset; in atc_prep_dma_memcpy()
852 desc->lli.daddr = dest + offset; in atc_prep_dma_memcpy()
853 desc->lli.ctrla = ctrla | xfer_count; in atc_prep_dma_memcpy()
854 desc->lli.ctrlb = ctrlb; in atc_prep_dma_memcpy()
856 desc->txd.cookie = 0; in atc_prep_dma_memcpy()
857 desc->len = xfer_count << src_width; in atc_prep_dma_memcpy()
863 first->txd.cookie = -EBUSY; in atc_prep_dma_memcpy()
864 first->total_len = len; in atc_prep_dma_memcpy()
866 /* set end-of-link to the last link descriptor of list*/ in atc_prep_dma_memcpy()
869 first->txd.flags = flags; /* client is in control of this ack */ in atc_prep_dma_memcpy()
871 return &first->txd; in atc_prep_dma_memcpy()
907 desc->lli.saddr = psrc; in atc_create_memset_desc()
908 desc->lli.daddr = pdst; in atc_create_memset_desc()
909 desc->lli.ctrla = ctrla | xfer_count; in atc_create_memset_desc()
910 desc->lli.ctrlb = ctrlb; in atc_create_memset_desc()
912 desc->txd.cookie = 0; in atc_create_memset_desc()
913 desc->len = len; in atc_create_memset_desc()
919 * atc_prep_dma_memset - prepare a memcpy operation
930 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset()
943 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset()
949 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_ATOMIC, &paddr); in atc_prep_dma_memset()
964 desc->memset_paddr = paddr; in atc_prep_dma_memset()
965 desc->memset_vaddr = vaddr; in atc_prep_dma_memset()
966 desc->memset_buffer = true; in atc_prep_dma_memset()
968 desc->txd.cookie = -EBUSY; in atc_prep_dma_memset()
969 desc->total_len = len; in atc_prep_dma_memset()
971 /* set end-of-link on the descriptor */ in atc_prep_dma_memset()
974 desc->txd.flags = flags; in atc_prep_dma_memset()
976 return &desc->txd; in atc_prep_dma_memset()
979 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset()
990 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset_sg()
1007 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_ATOMIC, &paddr); in atc_prep_dma_memset_sg()
1022 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset_sg()
1041 desc->memset_paddr = paddr; in atc_prep_dma_memset_sg()
1042 desc->memset_vaddr = vaddr; in atc_prep_dma_memset_sg()
1043 desc->memset_buffer = true; in atc_prep_dma_memset_sg()
1045 first->txd.cookie = -EBUSY; in atc_prep_dma_memset_sg()
1046 first->total_len = total_len; in atc_prep_dma_memset_sg()
1048 /* set end-of-link on the descriptor */ in atc_prep_dma_memset_sg()
1051 first->txd.flags = flags; in atc_prep_dma_memset_sg()
1053 return &first->txd; in atc_prep_dma_memset_sg()
1061 * atc_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
1075 struct at_dma_slave *atslave = chan->private; in atc_prep_slave_sg()
1076 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_slave_sg()
1098 ctrla = ATC_SCSIZE(sconfig->src_maxburst) in atc_prep_slave_sg()
1099 | ATC_DCSIZE(sconfig->dst_maxburst); in atc_prep_slave_sg()
1104 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_slave_sg()
1109 | ATC_SIF(atchan->mem_if) | ATC_DIF(atchan->per_if); in atc_prep_slave_sg()
1110 reg = sconfig->dst_addr; in atc_prep_slave_sg()
1131 desc->lli.saddr = mem; in atc_prep_slave_sg()
1132 desc->lli.daddr = reg; in atc_prep_slave_sg()
1133 desc->lli.ctrla = ctrla in atc_prep_slave_sg()
1136 desc->lli.ctrlb = ctrlb; in atc_prep_slave_sg()
1137 desc->len = len; in atc_prep_slave_sg()
1144 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_slave_sg()
1149 | ATC_SIF(atchan->per_if) | ATC_DIF(atchan->mem_if); in atc_prep_slave_sg()
1151 reg = sconfig->src_addr; in atc_prep_slave_sg()
1172 desc->lli.saddr = reg; in atc_prep_slave_sg()
1173 desc->lli.daddr = mem; in atc_prep_slave_sg()
1174 desc->lli.ctrla = ctrla in atc_prep_slave_sg()
1177 desc->lli.ctrlb = ctrlb; in atc_prep_slave_sg()
1178 desc->len = len; in atc_prep_slave_sg()
1188 /* set end-of-link to the last link descriptor of list*/ in atc_prep_slave_sg()
1192 first->txd.cookie = -EBUSY; in atc_prep_slave_sg()
1193 first->total_len = total_len; in atc_prep_slave_sg()
1196 first->txd.flags = flags; /* client is in control of this ack */ in atc_prep_slave_sg()
1198 return &first->txd; in atc_prep_slave_sg()
1217 if (unlikely(period_len & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1219 if (unlikely(buf_addr & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1225 return -EINVAL; in atc_dma_cyclic_check_values()
1229 * atc_dma_cyclic_fill_desc - Fill one period descriptor
1238 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_dma_cyclic_fill_desc()
1242 ctrla = ATC_SCSIZE(sconfig->src_maxburst) in atc_dma_cyclic_fill_desc()
1243 | ATC_DCSIZE(sconfig->dst_maxburst) in atc_dma_cyclic_fill_desc()
1250 desc->lli.saddr = buf_addr + (period_len * period_index); in atc_dma_cyclic_fill_desc()
1251 desc->lli.daddr = sconfig->dst_addr; in atc_dma_cyclic_fill_desc()
1252 desc->lli.ctrla = ctrla; in atc_dma_cyclic_fill_desc()
1253 desc->lli.ctrlb = ATC_DST_ADDR_MODE_FIXED in atc_dma_cyclic_fill_desc()
1256 | ATC_SIF(atchan->mem_if) in atc_dma_cyclic_fill_desc()
1257 | ATC_DIF(atchan->per_if); in atc_dma_cyclic_fill_desc()
1258 desc->len = period_len; in atc_dma_cyclic_fill_desc()
1262 desc->lli.saddr = sconfig->src_addr; in atc_dma_cyclic_fill_desc()
1263 desc->lli.daddr = buf_addr + (period_len * period_index); in atc_dma_cyclic_fill_desc()
1264 desc->lli.ctrla = ctrla; in atc_dma_cyclic_fill_desc()
1265 desc->lli.ctrlb = ATC_DST_ADDR_MODE_INCR in atc_dma_cyclic_fill_desc()
1268 | ATC_SIF(atchan->per_if) in atc_dma_cyclic_fill_desc()
1269 | ATC_DIF(atchan->mem_if); in atc_dma_cyclic_fill_desc()
1270 desc->len = period_len; in atc_dma_cyclic_fill_desc()
1274 return -EINVAL; in atc_dma_cyclic_fill_desc()
1281 * atc_prep_dma_cyclic - prepare the cyclic DMA transfer
1295 struct at_dma_slave *atslave = chan->private; in atc_prep_dma_cyclic()
1296 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_dma_cyclic()
1304 dev_vdbg(chan2dev(chan), "prep_dma_cyclic: %s buf@%pad - %d (%d/%d)\n", in atc_prep_dma_cyclic()
1314 was_cyclic = test_and_set_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1323 if (sconfig->direction == DMA_MEM_TO_DEV) in atc_prep_dma_cyclic()
1324 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_dma_cyclic()
1326 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_dma_cyclic()
1348 prev->lli.dscr = first->txd.phys; in atc_prep_dma_cyclic()
1351 first->txd.cookie = -EBUSY; in atc_prep_dma_cyclic()
1352 first->total_len = buf_len; in atc_prep_dma_cyclic()
1354 return &first->txd; in atc_prep_dma_cyclic()
1360 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1372 if (!chan->private) in atc_config()
1373 return -EINVAL; in atc_config()
1375 memcpy(&atchan->dma_sconfig, sconfig, sizeof(*sconfig)); in atc_config()
1377 convert_burst(&atchan->dma_sconfig.src_maxburst); in atc_config()
1378 convert_burst(&atchan->dma_sconfig.dst_maxburst); in atc_config()
1386 struct at_dma *atdma = to_at_dma(chan->device); in atc_pause()
1387 int chan_id = atchan->chan_common.chan_id; in atc_pause()
1394 spin_lock_irqsave(&atchan->lock, flags); in atc_pause()
1397 set_bit(ATC_IS_PAUSED, &atchan->status); in atc_pause()
1399 spin_unlock_irqrestore(&atchan->lock, flags); in atc_pause()
1407 struct at_dma *atdma = to_at_dma(chan->device); in atc_resume()
1408 int chan_id = atchan->chan_common.chan_id; in atc_resume()
1418 spin_lock_irqsave(&atchan->lock, flags); in atc_resume()
1421 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_resume()
1423 spin_unlock_irqrestore(&atchan->lock, flags); in atc_resume()
1431 struct at_dma *atdma = to_at_dma(chan->device); in atc_terminate_all()
1432 int chan_id = atchan->chan_common.chan_id; in atc_terminate_all()
1446 spin_lock_irqsave(&atchan->lock, flags); in atc_terminate_all()
1449 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask); in atc_terminate_all()
1452 while (dma_readl(atdma, CHSR) & atchan->mask) in atc_terminate_all()
1456 list_splice_init(&atchan->queue, &list); in atc_terminate_all()
1457 list_splice_init(&atchan->active_list, &list); in atc_terminate_all()
1463 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_terminate_all()
1465 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_terminate_all()
1467 spin_unlock_irqrestore(&atchan->lock, flags); in atc_terminate_all()
1473 * atc_tx_status - poll for transaction completion
1480 * the status of multiple cookies without re-checking hardware state.
1502 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_status()
1507 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_status()
1523 * atc_issue_pending - try to finish work
1537 spin_lock_irqsave(&atchan->lock, flags); in atc_issue_pending()
1539 spin_unlock_irqrestore(&atchan->lock, flags); in atc_issue_pending()
1543 * atc_alloc_chan_resources - allocate resources for DMA channel
1547 * return - the number of allocated descriptors
1552 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_chan_resources()
1565 return -EIO; in atc_alloc_chan_resources()
1570 atslave = chan->private; in atc_alloc_chan_resources()
1573 * We need controller-specific data to set up slave in atc_alloc_chan_resources()
1576 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_common.dev); in atc_alloc_chan_resources()
1579 if (atslave->cfg) in atc_alloc_chan_resources()
1580 cfg = atslave->cfg; in atc_alloc_chan_resources()
1585 if (!list_empty(&atchan->free_list)) in atc_alloc_chan_resources()
1586 return atchan->descs_allocated; in atc_alloc_chan_resources()
1592 dev_err(atdma->dma_common.dev, in atc_alloc_chan_resources()
1596 list_add_tail(&desc->desc_node, &tmp_list); in atc_alloc_chan_resources()
1599 spin_lock_irqsave(&atchan->lock, flags); in atc_alloc_chan_resources()
1600 atchan->descs_allocated = i; in atc_alloc_chan_resources()
1601 list_splice(&tmp_list, &atchan->free_list); in atc_alloc_chan_resources()
1603 spin_unlock_irqrestore(&atchan->lock, flags); in atc_alloc_chan_resources()
1610 atchan->descs_allocated); in atc_alloc_chan_resources()
1612 return atchan->descs_allocated; in atc_alloc_chan_resources()
1616 * atc_free_chan_resources - free all channel resources
1622 struct at_dma *atdma = to_at_dma(chan->device); in atc_free_chan_resources()
1627 atchan->descs_allocated); in atc_free_chan_resources()
1630 BUG_ON(!list_empty(&atchan->active_list)); in atc_free_chan_resources()
1631 BUG_ON(!list_empty(&atchan->queue)); in atc_free_chan_resources()
1634 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_free_chan_resources()
1636 list_del(&desc->desc_node); in atc_free_chan_resources()
1638 dma_pool_free(atdma->dma_desc_pool, desc, desc->txd.phys); in atc_free_chan_resources()
1640 list_splice_init(&atchan->free_list, &list); in atc_free_chan_resources()
1641 atchan->descs_allocated = 0; in atc_free_chan_resources()
1642 atchan->status = 0; in atc_free_chan_resources()
1647 kfree(chan->private); in atc_free_chan_resources()
1648 chan->private = NULL; in atc_free_chan_resources()
1658 if (atslave->dma_dev == chan->device->dev) { in at_dma_filter()
1659 chan->private = atslave; in at_dma_filter()
1676 if (dma_spec->args_count != 2) in at_dma_xlate()
1679 dmac_pdev = of_find_device_by_node(dma_spec->np); in at_dma_xlate()
1690 atslave->cfg = ATC_DST_H2SEL_HW | ATC_SRC_H2SEL_HW; in at_dma_xlate()
1695 per_id = dma_spec->args[1] & AT91_DMA_CFG_PER_ID_MASK; in at_dma_xlate()
1696 atslave->cfg |= ATC_DST_PER_MSB(per_id) | ATC_DST_PER(per_id) in at_dma_xlate()
1703 switch (dma_spec->args[1] & AT91_DMA_CFG_FIFOCFG_MASK) { in at_dma_xlate()
1705 atslave->cfg |= ATC_FIFOCFG_LARGESTBURST; in at_dma_xlate()
1708 atslave->cfg |= ATC_FIFOCFG_ENOUGHSPACE; in at_dma_xlate()
1712 atslave->cfg |= ATC_FIFOCFG_HALFFIFO; in at_dma_xlate()
1714 atslave->dma_dev = &dmac_pdev->dev; in at_dma_xlate()
1721 atchan->per_if = dma_spec->args[0] & 0xff; in at_dma_xlate()
1722 atchan->mem_if = (dma_spec->args[0] >> 16) & 0xff; in at_dma_xlate()
1734 /*-- Module Management -----------------------------------------------*/
1736 /* cap_mask is a multi-u32 bitfield, fill it with proper C code. */
1747 .compatible = "atmel,at91sam9rl-dma",
1750 .compatible = "atmel,at91sam9g45-dma",
1775 if (pdev->dev.of_node) { in at_dma_get_driver_data()
1777 match = of_match_node(atmel_dma_dt_ids, pdev->dev.of_node); in at_dma_get_driver_data()
1780 return match->data; in at_dma_get_driver_data()
1783 platform_get_device_id(pdev)->driver_data; in at_dma_get_driver_data()
1787 * at_dma_off - disable DMA controller
1795 dma_writel(atdma, EBCIDR, -1L); in at_dma_off()
1798 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask) in at_dma_off()
1824 return -ENODEV; in at_dma_probe()
1828 return -EINVAL; in at_dma_probe()
1835 size += plat_dat->nr_channels * sizeof(struct at_dma_chan); in at_dma_probe()
1838 return -ENOMEM; in at_dma_probe()
1841 atdma->dma_common.cap_mask = plat_dat->cap_mask; in at_dma_probe()
1842 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1; in at_dma_probe()
1845 if (!request_mem_region(io->start, size, pdev->dev.driver->name)) { in at_dma_probe()
1846 err = -EBUSY; in at_dma_probe()
1850 atdma->regs = ioremap(io->start, size); in at_dma_probe()
1851 if (!atdma->regs) { in at_dma_probe()
1852 err = -ENOMEM; in at_dma_probe()
1856 atdma->clk = clk_get(&pdev->dev, "dma_clk"); in at_dma_probe()
1857 if (IS_ERR(atdma->clk)) { in at_dma_probe()
1858 err = PTR_ERR(atdma->clk); in at_dma_probe()
1861 err = clk_prepare_enable(atdma->clk); in at_dma_probe()
1875 atdma->dma_desc_pool = dma_pool_create("at_hdmac_desc_pool", in at_dma_probe()
1876 &pdev->dev, sizeof(struct at_desc), in at_dma_probe()
1878 if (!atdma->dma_desc_pool) { in at_dma_probe()
1879 dev_err(&pdev->dev, "No memory for descriptors dma pool\n"); in at_dma_probe()
1880 err = -ENOMEM; in at_dma_probe()
1885 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool", in at_dma_probe()
1886 &pdev->dev, sizeof(int), 4, 0); in at_dma_probe()
1887 if (!atdma->memset_pool) { in at_dma_probe()
1888 dev_err(&pdev->dev, "No memory for memset dma pool\n"); in at_dma_probe()
1889 err = -ENOMEM; in at_dma_probe()
1898 INIT_LIST_HEAD(&atdma->dma_common.channels); in at_dma_probe()
1899 for (i = 0; i < plat_dat->nr_channels; i++) { in at_dma_probe()
1900 struct at_dma_chan *atchan = &atdma->chan[i]; in at_dma_probe()
1902 atchan->mem_if = AT_DMA_MEM_IF; in at_dma_probe()
1903 atchan->per_if = AT_DMA_PER_IF; in at_dma_probe()
1904 atchan->chan_common.device = &atdma->dma_common; in at_dma_probe()
1905 dma_cookie_init(&atchan->chan_common); in at_dma_probe()
1906 list_add_tail(&atchan->chan_common.device_node, in at_dma_probe()
1907 &atdma->dma_common.channels); in at_dma_probe()
1909 atchan->ch_regs = atdma->regs + ch_regs(i); in at_dma_probe()
1910 spin_lock_init(&atchan->lock); in at_dma_probe()
1911 atchan->mask = 1 << i; in at_dma_probe()
1913 INIT_LIST_HEAD(&atchan->active_list); in at_dma_probe()
1914 INIT_LIST_HEAD(&atchan->queue); in at_dma_probe()
1915 INIT_LIST_HEAD(&atchan->free_list); in at_dma_probe()
1917 tasklet_init(&atchan->tasklet, atc_tasklet, in at_dma_probe()
1923 atdma->dma_common.device_alloc_chan_resources = atc_alloc_chan_resources; in at_dma_probe()
1924 atdma->dma_common.device_free_chan_resources = atc_free_chan_resources; in at_dma_probe()
1925 atdma->dma_common.device_tx_status = atc_tx_status; in at_dma_probe()
1926 atdma->dma_common.device_issue_pending = atc_issue_pending; in at_dma_probe()
1927 atdma->dma_common.dev = &pdev->dev; in at_dma_probe()
1929 /* set prep routines based on capability */ in at_dma_probe()
1930 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_common.cap_mask)) in at_dma_probe()
1931 atdma->dma_common.device_prep_interleaved_dma = atc_prep_dma_interleaved; in at_dma_probe()
1933 if (dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask)) in at_dma_probe()
1934 atdma->dma_common.device_prep_dma_memcpy = atc_prep_dma_memcpy; in at_dma_probe()
1936 if (dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask)) { in at_dma_probe()
1937 atdma->dma_common.device_prep_dma_memset = atc_prep_dma_memset; in at_dma_probe()
1938 atdma->dma_common.device_prep_dma_memset_sg = atc_prep_dma_memset_sg; in at_dma_probe()
1939 atdma->dma_common.fill_align = DMAENGINE_ALIGN_4_BYTES; in at_dma_probe()
1942 if (dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask)) { in at_dma_probe()
1943 atdma->dma_common.device_prep_slave_sg = atc_prep_slave_sg; in at_dma_probe()
1945 dma_cap_set(DMA_CYCLIC, atdma->dma_common.cap_mask); in at_dma_probe()
1946 atdma->dma_common.device_prep_dma_cyclic = atc_prep_dma_cyclic; in at_dma_probe()
1947 atdma->dma_common.device_config = atc_config; in at_dma_probe()
1948 atdma->dma_common.device_pause = atc_pause; in at_dma_probe()
1949 atdma->dma_common.device_resume = atc_resume; in at_dma_probe()
1950 atdma->dma_common.device_terminate_all = atc_terminate_all; in at_dma_probe()
1951 atdma->dma_common.src_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
1952 atdma->dma_common.dst_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
1953 atdma->dma_common.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in at_dma_probe()
1954 atdma->dma_common.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in at_dma_probe()
1959 dev_info(&pdev->dev, "Atmel AHB DMA Controller ( %s%s%s), %d channels\n", in at_dma_probe()
1960 dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask) ? "cpy " : "", in at_dma_probe()
1961 dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask) ? "set " : "", in at_dma_probe()
1962 dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask) ? "slave " : "", in at_dma_probe()
1963 plat_dat->nr_channels); in at_dma_probe()
1965 dma_async_device_register(&atdma->dma_common); in at_dma_probe()
1972 if (pdev->dev.of_node) { in at_dma_probe()
1973 err = of_dma_controller_register(pdev->dev.of_node, in at_dma_probe()
1976 dev_err(&pdev->dev, "could not register of_dma_controller\n"); in at_dma_probe()
1984 dma_async_device_unregister(&atdma->dma_common); in at_dma_probe()
1985 dma_pool_destroy(atdma->memset_pool); in at_dma_probe()
1987 dma_pool_destroy(atdma->dma_desc_pool); in at_dma_probe()
1991 clk_disable_unprepare(atdma->clk); in at_dma_probe()
1993 clk_put(atdma->clk); in at_dma_probe()
1995 iounmap(atdma->regs); in at_dma_probe()
1996 atdma->regs = NULL; in at_dma_probe()
1998 release_mem_region(io->start, size); in at_dma_probe()
2011 if (pdev->dev.of_node) in at_dma_remove()
2012 of_dma_controller_free(pdev->dev.of_node); in at_dma_remove()
2013 dma_async_device_unregister(&atdma->dma_common); in at_dma_remove()
2015 dma_pool_destroy(atdma->memset_pool); in at_dma_remove()
2016 dma_pool_destroy(atdma->dma_desc_pool); in at_dma_remove()
2019 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_remove()
2024 atc_disable_chan_irq(atdma, chan->chan_id); in at_dma_remove()
2026 tasklet_kill(&atchan->tasklet); in at_dma_remove()
2027 list_del(&chan->device_node); in at_dma_remove()
2030 clk_disable_unprepare(atdma->clk); in at_dma_remove()
2031 clk_put(atdma->clk); in at_dma_remove()
2033 iounmap(atdma->regs); in at_dma_remove()
2034 atdma->regs = NULL; in at_dma_remove()
2037 release_mem_region(io->start, resource_size(io)); in at_dma_remove()
2049 clk_disable_unprepare(atdma->clk); in at_dma_shutdown()
2057 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_prepare()
2062 return -EAGAIN; in at_dma_prepare()
2069 struct dma_chan *chan = &atchan->chan_common; in atc_suspend_cyclic()
2081 atchan->save_dscr = channel_readl(atchan, DSCR); in atc_suspend_cyclic()
2092 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_suspend_noirq()
2098 atchan->save_cfg = channel_readl(atchan, CFG); in at_dma_suspend_noirq()
2100 atdma->save_imr = dma_readl(atdma, EBCIMR); in at_dma_suspend_noirq()
2104 clk_disable_unprepare(atdma->clk); in at_dma_suspend_noirq()
2110 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_resume_cyclic()
2118 channel_writel(atchan, DSCR, atchan->save_dscr); in atc_resume_cyclic()
2119 dma_writel(atdma, CHER, atchan->mask); in atc_resume_cyclic()
2133 clk_prepare_enable(atdma->clk); in at_dma_resume_noirq()
2141 dma_writel(atdma, EBCIER, atdma->save_imr); in at_dma_resume_noirq()
2142 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_resume_noirq()
2146 channel_writel(atchan, CFG, atchan->save_cfg); in at_dma_resume_noirq()