Lines Matching refs:chan
200 struct xilinx_dpdma_chan *chan; member
249 container_of(_chan, struct xilinx_dpdma_chan, vchan.chan)
268 struct xilinx_dpdma_chan *chan[XILINX_DPDMA_NUM_CHAN]; member
299 static void xilinx_dpdma_debugfs_desc_done_irq(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_debugfs_desc_done_irq() argument
301 if (IS_ENABLED(CONFIG_DEBUG_FS) && chan->id == dpdma_debugfs.chan_id) in xilinx_dpdma_debugfs_desc_done_irq()
554 xilinx_dpdma_chan_alloc_sw_desc(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_alloc_sw_desc() argument
559 sw_desc = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &dma_addr); in xilinx_dpdma_chan_alloc_sw_desc()
576 xilinx_dpdma_chan_free_sw_desc(struct xilinx_dpdma_chan *chan, in xilinx_dpdma_chan_free_sw_desc() argument
579 dma_pool_free(chan->desc_pool, sw_desc, sw_desc->dma_addr); in xilinx_dpdma_chan_free_sw_desc()
589 static void xilinx_dpdma_chan_dump_tx_desc(struct xilinx_dpdma_chan *chan, in xilinx_dpdma_chan_dump_tx_desc() argument
593 struct device *dev = chan->xdev->dev; in xilinx_dpdma_chan_dump_tx_desc()
597 dev_dbg(dev, "------- channel ID = %d -------\n", chan->id); in xilinx_dpdma_chan_dump_tx_desc()
634 xilinx_dpdma_chan_alloc_tx_desc(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_alloc_tx_desc() argument
643 tx_desc->chan = chan; in xilinx_dpdma_chan_alloc_tx_desc()
667 xilinx_dpdma_chan_free_sw_desc(desc->chan, sw_desc); in xilinx_dpdma_chan_free_tx_desc()
685 xilinx_dpdma_chan_prep_interleaved_dma(struct xilinx_dpdma_chan *chan, in xilinx_dpdma_chan_prep_interleaved_dma() argument
695 dev_err(chan->xdev->dev, in xilinx_dpdma_chan_prep_interleaved_dma()
697 chan->id, XILINX_DPDMA_ALIGN_BYTES); in xilinx_dpdma_chan_prep_interleaved_dma()
701 tx_desc = xilinx_dpdma_chan_alloc_tx_desc(chan); in xilinx_dpdma_chan_prep_interleaved_dma()
705 sw_desc = xilinx_dpdma_chan_alloc_sw_desc(chan); in xilinx_dpdma_chan_prep_interleaved_dma()
711 xilinx_dpdma_sw_desc_set_dma_addrs(chan->xdev, sw_desc, sw_desc, in xilinx_dpdma_chan_prep_interleaved_dma()
741 static void xilinx_dpdma_chan_enable(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_enable() argument
745 reg = (XILINX_DPDMA_INTR_CHAN_MASK << chan->id) in xilinx_dpdma_chan_enable()
747 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, reg); in xilinx_dpdma_chan_enable()
748 reg = (XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id) in xilinx_dpdma_chan_enable()
750 dpdma_write(chan->xdev->reg, XILINX_DPDMA_EIEN, reg); in xilinx_dpdma_chan_enable()
759 dpdma_set(chan->reg, XILINX_DPDMA_CH_CNTL, reg); in xilinx_dpdma_chan_enable()
768 static void xilinx_dpdma_chan_disable(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_disable() argument
772 reg = XILINX_DPDMA_INTR_CHAN_MASK << chan->id; in xilinx_dpdma_chan_disable()
773 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, reg); in xilinx_dpdma_chan_disable()
774 reg = XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id; in xilinx_dpdma_chan_disable()
775 dpdma_write(chan->xdev->reg, XILINX_DPDMA_EIEN, reg); in xilinx_dpdma_chan_disable()
777 dpdma_clr(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_ENABLE); in xilinx_dpdma_chan_disable()
786 static void xilinx_dpdma_chan_pause(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_pause() argument
788 dpdma_set(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_PAUSE); in xilinx_dpdma_chan_pause()
797 static void xilinx_dpdma_chan_unpause(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_unpause() argument
799 dpdma_clr(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_PAUSE); in xilinx_dpdma_chan_unpause()
802 static u32 xilinx_dpdma_chan_video_group_ready(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_video_group_ready() argument
804 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_chan_video_group_ready()
809 if (xdev->chan[i]->video_group && !xdev->chan[i]->running) in xilinx_dpdma_chan_video_group_ready()
812 if (xdev->chan[i]->video_group) in xilinx_dpdma_chan_video_group_ready()
826 static void xilinx_dpdma_chan_queue_transfer(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_queue_transfer() argument
828 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_chan_queue_transfer()
835 lockdep_assert_held(&chan->lock); in xilinx_dpdma_chan_queue_transfer()
837 if (chan->desc.pending) in xilinx_dpdma_chan_queue_transfer()
840 if (!chan->running) { in xilinx_dpdma_chan_queue_transfer()
841 xilinx_dpdma_chan_unpause(chan); in xilinx_dpdma_chan_queue_transfer()
842 xilinx_dpdma_chan_enable(chan); in xilinx_dpdma_chan_queue_transfer()
843 chan->first_frame = true; in xilinx_dpdma_chan_queue_transfer()
844 chan->running = true; in xilinx_dpdma_chan_queue_transfer()
847 vdesc = vchan_next_desc(&chan->vchan); in xilinx_dpdma_chan_queue_transfer()
852 chan->desc.pending = desc; in xilinx_dpdma_chan_queue_transfer()
865 dpdma_write(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDR, in xilinx_dpdma_chan_queue_transfer()
868 dpdma_write(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDRE, in xilinx_dpdma_chan_queue_transfer()
872 first_frame = chan->first_frame; in xilinx_dpdma_chan_queue_transfer()
873 chan->first_frame = false; in xilinx_dpdma_chan_queue_transfer()
875 if (chan->video_group) { in xilinx_dpdma_chan_queue_transfer()
876 channels = xilinx_dpdma_chan_video_group_ready(chan); in xilinx_dpdma_chan_queue_transfer()
884 channels = BIT(chan->id); in xilinx_dpdma_chan_queue_transfer()
903 static u32 xilinx_dpdma_chan_ostand(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_ostand() argument
906 dpdma_read(chan->reg, XILINX_DPDMA_CH_STATUS)); in xilinx_dpdma_chan_ostand()
923 static int xilinx_dpdma_chan_notify_no_ostand(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_notify_no_ostand() argument
927 cnt = xilinx_dpdma_chan_ostand(chan); in xilinx_dpdma_chan_notify_no_ostand()
929 dev_dbg(chan->xdev->dev, in xilinx_dpdma_chan_notify_no_ostand()
931 chan->id, cnt); in xilinx_dpdma_chan_notify_no_ostand()
936 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IDS, in xilinx_dpdma_chan_notify_no_ostand()
937 XILINX_DPDMA_INTR_NO_OSTAND(chan->id)); in xilinx_dpdma_chan_notify_no_ostand()
938 wake_up(&chan->wait_to_stop); in xilinx_dpdma_chan_notify_no_ostand()
953 static int xilinx_dpdma_chan_wait_no_ostand(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_wait_no_ostand() argument
958 ret = wait_event_interruptible_timeout(chan->wait_to_stop, in xilinx_dpdma_chan_wait_no_ostand()
959 !xilinx_dpdma_chan_ostand(chan), in xilinx_dpdma_chan_wait_no_ostand()
962 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, in xilinx_dpdma_chan_wait_no_ostand()
963 XILINX_DPDMA_INTR_NO_OSTAND(chan->id)); in xilinx_dpdma_chan_wait_no_ostand()
967 dev_err(chan->xdev->dev, "chan%u: not ready to stop: %d trans\n", in xilinx_dpdma_chan_wait_no_ostand()
968 chan->id, xilinx_dpdma_chan_ostand(chan)); in xilinx_dpdma_chan_wait_no_ostand()
986 static int xilinx_dpdma_chan_poll_no_ostand(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_poll_no_ostand() argument
992 cnt = xilinx_dpdma_chan_ostand(chan); in xilinx_dpdma_chan_poll_no_ostand()
997 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, in xilinx_dpdma_chan_poll_no_ostand()
998 XILINX_DPDMA_INTR_NO_OSTAND(chan->id)); in xilinx_dpdma_chan_poll_no_ostand()
1002 dev_err(chan->xdev->dev, "chan%u: not ready to stop: %d trans\n", in xilinx_dpdma_chan_poll_no_ostand()
1003 chan->id, xilinx_dpdma_chan_ostand(chan)); in xilinx_dpdma_chan_poll_no_ostand()
1017 static int xilinx_dpdma_chan_stop(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_stop() argument
1022 ret = xilinx_dpdma_chan_wait_no_ostand(chan); in xilinx_dpdma_chan_stop()
1026 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_stop()
1027 xilinx_dpdma_chan_disable(chan); in xilinx_dpdma_chan_stop()
1028 chan->running = false; in xilinx_dpdma_chan_stop()
1029 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_stop()
1043 static void xilinx_dpdma_chan_done_irq(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_done_irq() argument
1048 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_done_irq()
1050 xilinx_dpdma_debugfs_desc_done_irq(chan); in xilinx_dpdma_chan_done_irq()
1052 active = chan->desc.active; in xilinx_dpdma_chan_done_irq()
1056 dev_warn(chan->xdev->dev, in xilinx_dpdma_chan_done_irq()
1058 chan->id); in xilinx_dpdma_chan_done_irq()
1060 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_done_irq()
1071 static void xilinx_dpdma_chan_vsync_irq(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_vsync_irq() argument
1078 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_vsync_irq()
1080 pending = chan->desc.pending; in xilinx_dpdma_chan_vsync_irq()
1081 if (!chan->running || !pending) in xilinx_dpdma_chan_vsync_irq()
1084 desc_id = dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_ID) in xilinx_dpdma_chan_vsync_irq()
1091 dev_dbg(chan->xdev->dev, in xilinx_dpdma_chan_vsync_irq()
1093 chan->id, sw_desc->hw.desc_id, desc_id); in xilinx_dpdma_chan_vsync_irq()
1101 spin_lock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq()
1102 if (chan->desc.active) in xilinx_dpdma_chan_vsync_irq()
1103 vchan_cookie_complete(&chan->desc.active->vdesc); in xilinx_dpdma_chan_vsync_irq()
1104 chan->desc.active = pending; in xilinx_dpdma_chan_vsync_irq()
1105 chan->desc.pending = NULL; in xilinx_dpdma_chan_vsync_irq()
1107 xilinx_dpdma_chan_queue_transfer(chan); in xilinx_dpdma_chan_vsync_irq()
1108 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq()
1111 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_vsync_irq()
1123 xilinx_dpdma_chan_err(struct xilinx_dpdma_chan *chan, u32 isr, u32 eisr) in xilinx_dpdma_chan_err() argument
1125 if (!chan) in xilinx_dpdma_chan_err()
1128 if (chan->running && in xilinx_dpdma_chan_err()
1129 ((isr & (XILINX_DPDMA_INTR_CHAN_ERR_MASK << chan->id)) || in xilinx_dpdma_chan_err()
1130 (eisr & (XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id)))) in xilinx_dpdma_chan_err()
1145 static void xilinx_dpdma_chan_handle_err(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_handle_err() argument
1147 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_chan_handle_err()
1151 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_handle_err()
1154 chan->id, in xilinx_dpdma_chan_handle_err()
1155 dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDRE), in xilinx_dpdma_chan_handle_err()
1156 dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDR)); in xilinx_dpdma_chan_handle_err()
1158 chan->id, in xilinx_dpdma_chan_handle_err()
1159 dpdma_read(chan->reg, XILINX_DPDMA_CH_PYLD_CUR_ADDRE), in xilinx_dpdma_chan_handle_err()
1160 dpdma_read(chan->reg, XILINX_DPDMA_CH_PYLD_CUR_ADDR)); in xilinx_dpdma_chan_handle_err()
1162 xilinx_dpdma_chan_disable(chan); in xilinx_dpdma_chan_handle_err()
1163 chan->running = false; in xilinx_dpdma_chan_handle_err()
1165 if (!chan->desc.active) in xilinx_dpdma_chan_handle_err()
1168 active = chan->desc.active; in xilinx_dpdma_chan_handle_err()
1169 chan->desc.active = NULL; in xilinx_dpdma_chan_handle_err()
1171 xilinx_dpdma_chan_dump_tx_desc(chan, active); in xilinx_dpdma_chan_handle_err()
1175 chan->id); in xilinx_dpdma_chan_handle_err()
1178 if (!chan->desc.pending && in xilinx_dpdma_chan_handle_err()
1179 list_empty(&chan->vchan.desc_issued)) { in xilinx_dpdma_chan_handle_err()
1182 &chan->vchan.desc_issued); in xilinx_dpdma_chan_handle_err()
1188 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_handle_err()
1200 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_prep_interleaved_dma() local
1212 desc = xilinx_dpdma_chan_prep_interleaved_dma(chan, xt); in xilinx_dpdma_prep_interleaved_dma()
1216 vchan_tx_prep(&chan->vchan, &desc->vdesc, flags | DMA_CTRL_ACK); in xilinx_dpdma_prep_interleaved_dma()
1231 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_alloc_chan_resources() local
1234 chan->desc_pool = dma_pool_create(dev_name(chan->xdev->dev), in xilinx_dpdma_alloc_chan_resources()
1235 chan->xdev->dev, in xilinx_dpdma_alloc_chan_resources()
1238 if (!chan->desc_pool) { in xilinx_dpdma_alloc_chan_resources()
1239 dev_err(chan->xdev->dev, in xilinx_dpdma_alloc_chan_resources()
1241 chan->id); in xilinx_dpdma_alloc_chan_resources()
1257 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_free_chan_resources() local
1259 vchan_free_chan_resources(&chan->vchan); in xilinx_dpdma_free_chan_resources()
1261 dma_pool_destroy(chan->desc_pool); in xilinx_dpdma_free_chan_resources()
1262 chan->desc_pool = NULL; in xilinx_dpdma_free_chan_resources()
1267 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_issue_pending() local
1270 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_issue_pending()
1271 spin_lock(&chan->vchan.lock); in xilinx_dpdma_issue_pending()
1272 if (vchan_issue_pending(&chan->vchan)) in xilinx_dpdma_issue_pending()
1273 xilinx_dpdma_chan_queue_transfer(chan); in xilinx_dpdma_issue_pending()
1274 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_issue_pending()
1275 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_issue_pending()
1281 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_config() local
1301 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_config()
1302 if (chan->id <= ZYNQMP_DPDMA_VIDEO2 && pconfig) in xilinx_dpdma_config()
1303 chan->video_group = pconfig->video_group; in xilinx_dpdma_config()
1304 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_config()
1340 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_terminate_all() local
1341 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_terminate_all()
1347 if (chan->video_group) { in xilinx_dpdma_terminate_all()
1349 if (xdev->chan[i]->video_group && in xilinx_dpdma_terminate_all()
1350 xdev->chan[i]->running) { in xilinx_dpdma_terminate_all()
1351 xilinx_dpdma_chan_pause(xdev->chan[i]); in xilinx_dpdma_terminate_all()
1352 xdev->chan[i]->video_group = false; in xilinx_dpdma_terminate_all()
1356 xilinx_dpdma_chan_pause(chan); in xilinx_dpdma_terminate_all()
1360 spin_lock_irqsave(&chan->vchan.lock, flags); in xilinx_dpdma_terminate_all()
1361 vchan_get_all_descriptors(&chan->vchan, &descriptors); in xilinx_dpdma_terminate_all()
1362 spin_unlock_irqrestore(&chan->vchan.lock, flags); in xilinx_dpdma_terminate_all()
1364 vchan_dma_desc_free_list(&chan->vchan, &descriptors); in xilinx_dpdma_terminate_all()
1384 struct xilinx_dpdma_chan *chan = to_xilinx_chan(dchan); in xilinx_dpdma_synchronize() local
1387 xilinx_dpdma_chan_stop(chan); in xilinx_dpdma_synchronize()
1389 spin_lock_irqsave(&chan->vchan.lock, flags); in xilinx_dpdma_synchronize()
1390 if (chan->desc.pending) { in xilinx_dpdma_synchronize()
1391 vchan_terminate_vdesc(&chan->desc.pending->vdesc); in xilinx_dpdma_synchronize()
1392 chan->desc.pending = NULL; in xilinx_dpdma_synchronize()
1394 if (chan->desc.active) { in xilinx_dpdma_synchronize()
1395 vchan_terminate_vdesc(&chan->desc.active->vdesc); in xilinx_dpdma_synchronize()
1396 chan->desc.active = NULL; in xilinx_dpdma_synchronize()
1398 spin_unlock_irqrestore(&chan->vchan.lock, flags); in xilinx_dpdma_synchronize()
1400 vchan_synchronize(&chan->vchan); in xilinx_dpdma_synchronize()
1449 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) in xilinx_dpdma_handle_err_irq()
1450 if (err || xilinx_dpdma_chan_err(xdev->chan[i], isr, eisr)) in xilinx_dpdma_handle_err_irq()
1451 tasklet_schedule(&xdev->chan[i]->err_task); in xilinx_dpdma_handle_err_irq()
1488 struct xilinx_dpdma_chan *chan = from_tasklet(chan, t, err_task); in xilinx_dpdma_chan_err_task() local
1489 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_chan_err_task()
1493 xilinx_dpdma_chan_poll_no_ostand(chan); in xilinx_dpdma_chan_err_task()
1495 xilinx_dpdma_chan_handle_err(chan); in xilinx_dpdma_chan_err_task()
1498 XILINX_DPDMA_INTR_CHAN_ERR_MASK << chan->id); in xilinx_dpdma_chan_err_task()
1500 XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id); in xilinx_dpdma_chan_err_task()
1502 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_err_task()
1503 spin_lock(&chan->vchan.lock); in xilinx_dpdma_chan_err_task()
1504 xilinx_dpdma_chan_queue_transfer(chan); in xilinx_dpdma_chan_err_task()
1505 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_chan_err_task()
1506 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_err_task()
1530 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) { in xilinx_dpdma_irq_handler()
1531 struct xilinx_dpdma_chan *chan = xdev->chan[i]; in xilinx_dpdma_irq_handler() local
1533 if (chan) in xilinx_dpdma_irq_handler()
1534 xilinx_dpdma_chan_vsync_irq(chan); in xilinx_dpdma_irq_handler()
1540 for_each_set_bit(i, &mask, ARRAY_SIZE(xdev->chan)) in xilinx_dpdma_irq_handler()
1541 xilinx_dpdma_chan_done_irq(xdev->chan[i]); in xilinx_dpdma_irq_handler()
1546 for_each_set_bit(i, &mask, ARRAY_SIZE(xdev->chan)) in xilinx_dpdma_irq_handler()
1547 xilinx_dpdma_chan_notify_no_ostand(xdev->chan[i]); in xilinx_dpdma_irq_handler()
1564 struct xilinx_dpdma_chan *chan; in xilinx_dpdma_chan_init() local
1566 chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL); in xilinx_dpdma_chan_init()
1567 if (!chan) in xilinx_dpdma_chan_init()
1570 chan->id = chan_id; in xilinx_dpdma_chan_init()
1571 chan->reg = xdev->reg + XILINX_DPDMA_CH_BASE in xilinx_dpdma_chan_init()
1572 + XILINX_DPDMA_CH_OFFSET * chan->id; in xilinx_dpdma_chan_init()
1573 chan->running = false; in xilinx_dpdma_chan_init()
1574 chan->xdev = xdev; in xilinx_dpdma_chan_init()
1576 spin_lock_init(&chan->lock); in xilinx_dpdma_chan_init()
1577 init_waitqueue_head(&chan->wait_to_stop); in xilinx_dpdma_chan_init()
1579 tasklet_setup(&chan->err_task, xilinx_dpdma_chan_err_task); in xilinx_dpdma_chan_init()
1581 chan->vchan.desc_free = xilinx_dpdma_chan_free_tx_desc; in xilinx_dpdma_chan_init()
1582 vchan_init(&chan->vchan, &xdev->common); in xilinx_dpdma_chan_init()
1584 xdev->chan[chan->id] = chan; in xilinx_dpdma_chan_init()
1589 static void xilinx_dpdma_chan_remove(struct xilinx_dpdma_chan *chan) in xilinx_dpdma_chan_remove() argument
1591 if (!chan) in xilinx_dpdma_chan_remove()
1594 tasklet_kill(&chan->err_task); in xilinx_dpdma_chan_remove()
1595 list_del(&chan->vchan.chan.device_node); in xilinx_dpdma_chan_remove()
1604 if (chan_id >= ARRAY_SIZE(xdev->chan)) in of_dma_xilinx_xlate()
1607 if (!xdev->chan[chan_id]) in of_dma_xilinx_xlate()
1610 return dma_get_slave_channel(&xdev->chan[chan_id]->vchan.chan); in of_dma_xilinx_xlate()
1622 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) { in dpdma_hw_init()
1697 for (i = 0; i < ARRAY_SIZE(xdev->chan); ++i) { in xilinx_dpdma_probe()
1738 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) in xilinx_dpdma_probe()
1739 xilinx_dpdma_chan_remove(xdev->chan[i]); in xilinx_dpdma_probe()
1759 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) in xilinx_dpdma_remove()
1760 xilinx_dpdma_chan_remove(xdev->chan[i]); in xilinx_dpdma_remove()