Lines Matching refs:desc
238 struct ep93xx_dma_desc *desc) in ep93xx_dma_set_active() argument
242 list_add_tail(&desc->node, &edmac->active); in ep93xx_dma_set_active()
245 while (!list_empty(&desc->tx_list)) { in ep93xx_dma_set_active()
246 struct ep93xx_dma_desc *d = list_first_entry(&desc->tx_list, in ep93xx_dma_set_active()
255 d->txd.callback = desc->txd.callback; in ep93xx_dma_set_active()
256 d->txd.callback_param = desc->txd.callback_param; in ep93xx_dma_set_active()
284 struct ep93xx_dma_desc *desc; in ep93xx_dma_advance_active() local
291 desc = ep93xx_dma_get_active(edmac); in ep93xx_dma_advance_active()
292 if (!desc) in ep93xx_dma_advance_active()
299 return !desc->txd.cookie; in ep93xx_dma_advance_active()
362 struct ep93xx_dma_desc *desc; in m2p_fill_desc() local
365 desc = ep93xx_dma_get_active(edmac); in m2p_fill_desc()
366 if (!desc) { in m2p_fill_desc()
372 bus_addr = desc->src_addr; in m2p_fill_desc()
374 bus_addr = desc->dst_addr; in m2p_fill_desc()
377 writel(desc->size, edmac->regs + M2P_MAXCNT0); in m2p_fill_desc()
380 writel(desc->size, edmac->regs + M2P_MAXCNT1); in m2p_fill_desc()
408 struct ep93xx_dma_desc *desc = ep93xx_dma_get_active(edmac); in m2p_hw_interrupt() local
427 desc->txd.cookie, desc->src_addr, desc->dst_addr, in m2p_hw_interrupt()
428 desc->size); in m2p_hw_interrupt()
525 struct ep93xx_dma_desc *desc; in m2m_fill_desc() local
527 desc = ep93xx_dma_get_active(edmac); in m2m_fill_desc()
528 if (!desc) { in m2m_fill_desc()
534 writel(desc->src_addr, edmac->regs + M2M_SAR_BASE0); in m2m_fill_desc()
535 writel(desc->dst_addr, edmac->regs + M2M_DAR_BASE0); in m2m_fill_desc()
536 writel(desc->size, edmac->regs + M2M_BCR0); in m2m_fill_desc()
538 writel(desc->src_addr, edmac->regs + M2M_SAR_BASE1); in m2m_fill_desc()
539 writel(desc->dst_addr, edmac->regs + M2M_DAR_BASE1); in m2m_fill_desc()
540 writel(desc->size, edmac->regs + M2M_BCR1); in m2m_fill_desc()
602 struct ep93xx_dma_desc *desc; in m2m_hw_interrupt() local
617 desc = ep93xx_dma_get_active(edmac); in m2m_hw_interrupt()
618 last_done = !desc || desc->txd.cookie; in m2m_hw_interrupt()
676 struct ep93xx_dma_desc *desc, *_desc; in ep93xx_dma_desc_get() local
681 list_for_each_entry_safe(desc, _desc, &edmac->free_list, node) { in ep93xx_dma_desc_get()
682 if (async_tx_test_ack(&desc->txd)) { in ep93xx_dma_desc_get()
683 list_del_init(&desc->node); in ep93xx_dma_desc_get()
686 desc->src_addr = 0; in ep93xx_dma_desc_get()
687 desc->dst_addr = 0; in ep93xx_dma_desc_get()
688 desc->size = 0; in ep93xx_dma_desc_get()
689 desc->complete = false; in ep93xx_dma_desc_get()
690 desc->txd.cookie = 0; in ep93xx_dma_desc_get()
691 desc->txd.callback = NULL; in ep93xx_dma_desc_get()
692 desc->txd.callback_param = NULL; in ep93xx_dma_desc_get()
694 ret = desc; in ep93xx_dma_desc_get()
703 struct ep93xx_dma_desc *desc) in ep93xx_dma_desc_put() argument
705 if (desc) { in ep93xx_dma_desc_put()
709 list_splice_init(&desc->tx_list, &edmac->free_list); in ep93xx_dma_desc_put()
710 list_add(&desc->node, &edmac->free_list); in ep93xx_dma_desc_put()
748 struct ep93xx_dma_desc *desc, *d; in ep93xx_dma_tasklet() local
759 desc = ep93xx_dma_get_active(edmac); in ep93xx_dma_tasklet()
760 if (desc) { in ep93xx_dma_tasklet()
761 if (desc->complete) { in ep93xx_dma_tasklet()
764 dma_cookie_complete(&desc->txd); in ep93xx_dma_tasklet()
767 dmaengine_desc_get_callback(&desc->txd, &cb); in ep93xx_dma_tasklet()
775 list_for_each_entry_safe(desc, d, &list, node) { in ep93xx_dma_tasklet()
776 dma_descriptor_unmap(&desc->txd); in ep93xx_dma_tasklet()
777 ep93xx_dma_desc_put(edmac, desc); in ep93xx_dma_tasklet()
786 struct ep93xx_dma_desc *desc; in ep93xx_dma_interrupt() local
791 desc = ep93xx_dma_get_active(edmac); in ep93xx_dma_interrupt()
792 if (!desc) { in ep93xx_dma_interrupt()
801 desc->complete = true; in ep93xx_dma_interrupt()
831 struct ep93xx_dma_desc *desc; in ep93xx_dma_tx_submit() local
838 desc = container_of(tx, struct ep93xx_dma_desc, txd); in ep93xx_dma_tx_submit()
846 ep93xx_dma_set_active(edmac, desc); in ep93xx_dma_tx_submit()
849 list_add_tail(&desc->node, &edmac->queue); in ep93xx_dma_tx_submit()
914 struct ep93xx_dma_desc *desc; in ep93xx_dma_alloc_chan_resources() local
916 desc = kzalloc(sizeof(*desc), GFP_KERNEL); in ep93xx_dma_alloc_chan_resources()
917 if (!desc) { in ep93xx_dma_alloc_chan_resources()
922 INIT_LIST_HEAD(&desc->tx_list); in ep93xx_dma_alloc_chan_resources()
924 dma_async_tx_descriptor_init(&desc->txd, chan); in ep93xx_dma_alloc_chan_resources()
925 desc->txd.flags = DMA_CTRL_ACK; in ep93xx_dma_alloc_chan_resources()
926 desc->txd.tx_submit = ep93xx_dma_tx_submit; in ep93xx_dma_alloc_chan_resources()
928 ep93xx_dma_desc_put(edmac, desc); in ep93xx_dma_alloc_chan_resources()
951 struct ep93xx_dma_desc *desc, *d; in ep93xx_dma_free_chan_resources() local
966 list_for_each_entry_safe(desc, d, &list, node) in ep93xx_dma_free_chan_resources()
967 kfree(desc); in ep93xx_dma_free_chan_resources()
988 struct ep93xx_dma_desc *desc, *first; in ep93xx_dma_prep_dma_memcpy() local
993 desc = ep93xx_dma_desc_get(edmac); in ep93xx_dma_prep_dma_memcpy()
994 if (!desc) { in ep93xx_dma_prep_dma_memcpy()
1001 desc->src_addr = src + offset; in ep93xx_dma_prep_dma_memcpy()
1002 desc->dst_addr = dest + offset; in ep93xx_dma_prep_dma_memcpy()
1003 desc->size = bytes; in ep93xx_dma_prep_dma_memcpy()
1006 first = desc; in ep93xx_dma_prep_dma_memcpy()
1008 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_memcpy()
1037 struct ep93xx_dma_desc *desc, *first; in ep93xx_dma_prep_slave_sg() local
1063 desc = ep93xx_dma_desc_get(edmac); in ep93xx_dma_prep_slave_sg()
1064 if (!desc) { in ep93xx_dma_prep_slave_sg()
1070 desc->src_addr = sg_dma_address(sg); in ep93xx_dma_prep_slave_sg()
1071 desc->dst_addr = edmac->runtime_addr; in ep93xx_dma_prep_slave_sg()
1073 desc->src_addr = edmac->runtime_addr; in ep93xx_dma_prep_slave_sg()
1074 desc->dst_addr = sg_dma_address(sg); in ep93xx_dma_prep_slave_sg()
1076 desc->size = len; in ep93xx_dma_prep_slave_sg()
1079 first = desc; in ep93xx_dma_prep_slave_sg()
1081 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_slave_sg()
1117 struct ep93xx_dma_desc *desc, *first; in ep93xx_dma_prep_dma_cyclic() local
1141 desc = ep93xx_dma_desc_get(edmac); in ep93xx_dma_prep_dma_cyclic()
1142 if (!desc) { in ep93xx_dma_prep_dma_cyclic()
1148 desc->src_addr = dma_addr + offset; in ep93xx_dma_prep_dma_cyclic()
1149 desc->dst_addr = edmac->runtime_addr; in ep93xx_dma_prep_dma_cyclic()
1151 desc->src_addr = edmac->runtime_addr; in ep93xx_dma_prep_dma_cyclic()
1152 desc->dst_addr = dma_addr + offset; in ep93xx_dma_prep_dma_cyclic()
1155 desc->size = period_len; in ep93xx_dma_prep_dma_cyclic()
1158 first = desc; in ep93xx_dma_prep_dma_cyclic()
1160 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_cyclic()
1202 struct ep93xx_dma_desc *desc, *_d; in ep93xx_dma_terminate_all() local
1219 list_for_each_entry_safe(desc, _d, &list, node) in ep93xx_dma_terminate_all()
1220 ep93xx_dma_desc_put(edmac, desc); in ep93xx_dma_terminate_all()