• Home
  • Raw
  • Download

Lines Matching refs:uc

290 static inline u32 udma_tchanrt_read(struct udma_chan *uc, int reg)  in udma_tchanrt_read()  argument
292 if (!uc->tchan) in udma_tchanrt_read()
294 return udma_read(uc->tchan->reg_rt, reg); in udma_tchanrt_read()
297 static inline void udma_tchanrt_write(struct udma_chan *uc, int reg, u32 val) in udma_tchanrt_write() argument
299 if (!uc->tchan) in udma_tchanrt_write()
301 udma_write(uc->tchan->reg_rt, reg, val); in udma_tchanrt_write()
304 static inline void udma_tchanrt_update_bits(struct udma_chan *uc, int reg, in udma_tchanrt_update_bits() argument
307 if (!uc->tchan) in udma_tchanrt_update_bits()
309 udma_update_bits(uc->tchan->reg_rt, reg, mask, val); in udma_tchanrt_update_bits()
313 static inline u32 udma_rchanrt_read(struct udma_chan *uc, int reg) in udma_rchanrt_read() argument
315 if (!uc->rchan) in udma_rchanrt_read()
317 return udma_read(uc->rchan->reg_rt, reg); in udma_rchanrt_read()
320 static inline void udma_rchanrt_write(struct udma_chan *uc, int reg, u32 val) in udma_rchanrt_write() argument
322 if (!uc->rchan) in udma_rchanrt_write()
324 udma_write(uc->rchan->reg_rt, reg, val); in udma_rchanrt_write()
327 static inline void udma_rchanrt_update_bits(struct udma_chan *uc, int reg, in udma_rchanrt_update_bits() argument
330 if (!uc->rchan) in udma_rchanrt_update_bits()
332 udma_update_bits(uc->rchan->reg_rt, reg, mask, val); in udma_rchanrt_update_bits()
356 static void udma_reset_uchan(struct udma_chan *uc) in udma_reset_uchan() argument
358 memset(&uc->config, 0, sizeof(uc->config)); in udma_reset_uchan()
359 uc->config.remote_thread_id = -1; in udma_reset_uchan()
360 uc->state = UDMA_CHAN_IS_IDLE; in udma_reset_uchan()
363 static void udma_dump_chan_stdata(struct udma_chan *uc) in udma_dump_chan_stdata() argument
365 struct device *dev = uc->ud->dev; in udma_dump_chan_stdata()
369 if (uc->config.dir == DMA_MEM_TO_DEV || uc->config.dir == DMA_MEM_TO_MEM) { in udma_dump_chan_stdata()
374 udma_tchanrt_read(uc, offset)); in udma_dump_chan_stdata()
378 if (uc->config.dir == DMA_DEV_TO_MEM || uc->config.dir == DMA_MEM_TO_MEM) { in udma_dump_chan_stdata()
383 udma_rchanrt_read(uc, offset)); in udma_dump_chan_stdata()
399 static struct udma_desc *udma_udma_desc_from_paddr(struct udma_chan *uc, in udma_udma_desc_from_paddr() argument
402 struct udma_desc *d = uc->terminated_desc; in udma_udma_desc_from_paddr()
413 d = uc->desc; in udma_udma_desc_from_paddr()
426 static void udma_free_hwdesc(struct udma_chan *uc, struct udma_desc *d) in udma_free_hwdesc() argument
428 if (uc->use_dma_pool) { in udma_free_hwdesc()
435 dma_pool_free(uc->hdesc_pool, in udma_free_hwdesc()
442 struct udma_dev *ud = uc->ud; in udma_free_hwdesc()
464 struct udma_chan *uc = to_udma_chan(vd->tx.chan); in udma_purge_desc_work() local
467 udma_free_hwdesc(uc, d); in udma_purge_desc_work()
480 struct udma_chan *uc = to_udma_chan(vd->tx.chan); in udma_desc_free() local
484 if (uc->terminated_desc == d) in udma_desc_free()
485 uc->terminated_desc = NULL; in udma_desc_free()
487 if (uc->use_dma_pool) { in udma_desc_free()
488 udma_free_hwdesc(uc, d); in udma_desc_free()
500 static bool udma_is_chan_running(struct udma_chan *uc) in udma_is_chan_running() argument
505 if (uc->tchan) in udma_is_chan_running()
506 trt_ctl = udma_tchanrt_read(uc, UDMA_CHAN_RT_CTL_REG); in udma_is_chan_running()
507 if (uc->rchan) in udma_is_chan_running()
508 rrt_ctl = udma_rchanrt_read(uc, UDMA_CHAN_RT_CTL_REG); in udma_is_chan_running()
516 static bool udma_is_chan_paused(struct udma_chan *uc) in udma_is_chan_paused() argument
520 switch (uc->config.dir) { in udma_is_chan_paused()
522 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_PEER_RT_EN_REG); in udma_is_chan_paused()
526 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_PEER_RT_EN_REG); in udma_is_chan_paused()
530 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_CTL_REG); in udma_is_chan_paused()
543 static inline dma_addr_t udma_get_rx_flush_hwdesc_paddr(struct udma_chan *uc) in udma_get_rx_flush_hwdesc_paddr() argument
545 return uc->ud->rx_flush.hwdescs[uc->config.pkt_mode].cppi5_desc_paddr; in udma_get_rx_flush_hwdesc_paddr()
548 static int udma_push_to_ring(struct udma_chan *uc, int idx) in udma_push_to_ring() argument
550 struct udma_desc *d = uc->desc; in udma_push_to_ring()
554 switch (uc->config.dir) { in udma_push_to_ring()
556 ring = uc->rflow->fd_ring; in udma_push_to_ring()
560 ring = uc->tchan->t_ring; in udma_push_to_ring()
568 paddr = udma_get_rx_flush_hwdesc_paddr(uc); in udma_push_to_ring()
578 static bool udma_desc_is_rx_flush(struct udma_chan *uc, dma_addr_t addr) in udma_desc_is_rx_flush() argument
580 if (uc->config.dir != DMA_DEV_TO_MEM) in udma_desc_is_rx_flush()
583 if (addr == udma_get_rx_flush_hwdesc_paddr(uc)) in udma_desc_is_rx_flush()
589 static int udma_pop_from_ring(struct udma_chan *uc, dma_addr_t *addr) in udma_pop_from_ring() argument
594 switch (uc->config.dir) { in udma_pop_from_ring()
596 ring = uc->rflow->r_ring; in udma_pop_from_ring()
600 ring = uc->tchan->tc_ring; in udma_pop_from_ring()
617 if (udma_desc_is_rx_flush(uc, *addr)) in udma_pop_from_ring()
623 static void udma_reset_rings(struct udma_chan *uc) in udma_reset_rings() argument
628 switch (uc->config.dir) { in udma_reset_rings()
630 if (uc->rchan) { in udma_reset_rings()
631 ring1 = uc->rflow->fd_ring; in udma_reset_rings()
632 ring2 = uc->rflow->r_ring; in udma_reset_rings()
637 if (uc->tchan) { in udma_reset_rings()
638 ring1 = uc->tchan->t_ring; in udma_reset_rings()
639 ring2 = uc->tchan->tc_ring; in udma_reset_rings()
653 if (uc->terminated_desc) { in udma_reset_rings()
654 udma_desc_free(&uc->terminated_desc->vd); in udma_reset_rings()
655 uc->terminated_desc = NULL; in udma_reset_rings()
659 static void udma_reset_counters(struct udma_chan *uc) in udma_reset_counters() argument
663 if (uc->tchan) { in udma_reset_counters()
664 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_reset_counters()
665 udma_tchanrt_write(uc, UDMA_CHAN_RT_BCNT_REG, val); in udma_reset_counters()
667 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_SBCNT_REG); in udma_reset_counters()
668 udma_tchanrt_write(uc, UDMA_CHAN_RT_SBCNT_REG, val); in udma_reset_counters()
670 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_PCNT_REG); in udma_reset_counters()
671 udma_tchanrt_write(uc, UDMA_CHAN_RT_PCNT_REG, val); in udma_reset_counters()
673 val = udma_tchanrt_read(uc, UDMA_CHAN_RT_PEER_BCNT_REG); in udma_reset_counters()
674 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_BCNT_REG, val); in udma_reset_counters()
677 if (uc->rchan) { in udma_reset_counters()
678 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_reset_counters()
679 udma_rchanrt_write(uc, UDMA_CHAN_RT_BCNT_REG, val); in udma_reset_counters()
681 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_SBCNT_REG); in udma_reset_counters()
682 udma_rchanrt_write(uc, UDMA_CHAN_RT_SBCNT_REG, val); in udma_reset_counters()
684 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_PCNT_REG); in udma_reset_counters()
685 udma_rchanrt_write(uc, UDMA_CHAN_RT_PCNT_REG, val); in udma_reset_counters()
687 val = udma_rchanrt_read(uc, UDMA_CHAN_RT_PEER_BCNT_REG); in udma_reset_counters()
688 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_BCNT_REG, val); in udma_reset_counters()
691 uc->bcnt = 0; in udma_reset_counters()
694 static int udma_reset_chan(struct udma_chan *uc, bool hard) in udma_reset_chan() argument
696 switch (uc->config.dir) { in udma_reset_chan()
698 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, 0); in udma_reset_chan()
699 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
702 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
703 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, 0); in udma_reset_chan()
706 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
707 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, 0); in udma_reset_chan()
714 udma_reset_counters(uc); in udma_reset_chan()
721 memcpy(&ucc_backup, &uc->config, sizeof(uc->config)); in udma_reset_chan()
722 uc->ud->ddev.device_free_chan_resources(&uc->vc.chan); in udma_reset_chan()
725 memcpy(&uc->config, &ucc_backup, sizeof(uc->config)); in udma_reset_chan()
726 ret = uc->ud->ddev.device_alloc_chan_resources(&uc->vc.chan); in udma_reset_chan()
734 if (uc->config.dir == DMA_DEV_TO_MEM) in udma_reset_chan()
735 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_reset_chan()
740 uc->state = UDMA_CHAN_IS_IDLE; in udma_reset_chan()
745 static void udma_start_desc(struct udma_chan *uc) in udma_start_desc() argument
747 struct udma_chan_config *ucc = &uc->config; in udma_start_desc()
749 if (ucc->pkt_mode && (uc->cyclic || ucc->dir == DMA_DEV_TO_MEM)) { in udma_start_desc()
753 for (i = 0; i < uc->desc->sglen; i++) in udma_start_desc()
754 udma_push_to_ring(uc, i); in udma_start_desc()
756 udma_push_to_ring(uc, 0); in udma_start_desc()
760 static bool udma_chan_needs_reconfiguration(struct udma_chan *uc) in udma_chan_needs_reconfiguration() argument
763 if (uc->config.ep_type == PSIL_EP_NATIVE) in udma_chan_needs_reconfiguration()
767 if (memcmp(&uc->static_tr, &uc->desc->static_tr, sizeof(uc->static_tr))) in udma_chan_needs_reconfiguration()
773 static int udma_start(struct udma_chan *uc) in udma_start() argument
775 struct virt_dma_desc *vd = vchan_next_desc(&uc->vc); in udma_start()
778 uc->desc = NULL; in udma_start()
784 uc->desc = to_udma_desc(&vd->tx); in udma_start()
787 if (udma_is_chan_running(uc) && !udma_chan_needs_reconfiguration(uc)) { in udma_start()
788 udma_start_desc(uc); in udma_start()
793 udma_reset_chan(uc, false); in udma_start()
796 udma_start_desc(uc); in udma_start()
798 switch (uc->desc->dir) { in udma_start()
801 if (uc->config.ep_type == PSIL_EP_PDMA_XY) { in udma_start()
802 u32 val = PDMA_STATIC_TR_Y(uc->desc->static_tr.elcnt) | in udma_start()
803 PDMA_STATIC_TR_X(uc->desc->static_tr.elsize); in udma_start()
805 uc->ud->match_data; in udma_start()
807 if (uc->config.enable_acc32) in udma_start()
809 if (uc->config.enable_burst) in udma_start()
812 udma_rchanrt_write(uc, in udma_start()
816 udma_rchanrt_write(uc, in udma_start()
818 PDMA_STATIC_TR_Z(uc->desc->static_tr.bstcnt, in udma_start()
822 memcpy(&uc->static_tr, &uc->desc->static_tr, in udma_start()
823 sizeof(uc->static_tr)); in udma_start()
826 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
830 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_start()
836 if (uc->config.ep_type == PSIL_EP_PDMA_XY) { in udma_start()
837 u32 val = PDMA_STATIC_TR_Y(uc->desc->static_tr.elcnt) | in udma_start()
838 PDMA_STATIC_TR_X(uc->desc->static_tr.elsize); in udma_start()
840 if (uc->config.enable_acc32) in udma_start()
842 if (uc->config.enable_burst) in udma_start()
845 udma_tchanrt_write(uc, in udma_start()
850 memcpy(&uc->static_tr, &uc->desc->static_tr, in udma_start()
851 sizeof(uc->static_tr)); in udma_start()
855 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_start()
858 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
863 udma_rchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
865 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_start()
873 uc->state = UDMA_CHAN_IS_ACTIVE; in udma_start()
879 static int udma_stop(struct udma_chan *uc) in udma_stop() argument
881 enum udma_chan_state old_state = uc->state; in udma_stop()
883 uc->state = UDMA_CHAN_IS_TERMINATING; in udma_stop()
884 reinit_completion(&uc->teardown_completed); in udma_stop()
886 switch (uc->config.dir) { in udma_stop()
888 if (!uc->cyclic && !uc->desc) in udma_stop()
889 udma_push_to_ring(uc, -1); in udma_stop()
891 udma_rchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_stop()
896 udma_tchanrt_write(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_stop()
899 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_stop()
904 udma_tchanrt_write(uc, UDMA_CHAN_RT_CTL_REG, in udma_stop()
909 uc->state = old_state; in udma_stop()
910 complete_all(&uc->teardown_completed); in udma_stop()
917 static void udma_cyclic_packet_elapsed(struct udma_chan *uc) in udma_cyclic_packet_elapsed() argument
919 struct udma_desc *d = uc->desc; in udma_cyclic_packet_elapsed()
924 udma_push_to_ring(uc, d->desc_idx); in udma_cyclic_packet_elapsed()
928 static inline void udma_fetch_epib(struct udma_chan *uc, struct udma_desc *d) in udma_fetch_epib() argument
935 static bool udma_is_desc_really_done(struct udma_chan *uc, struct udma_desc *d) in udma_is_desc_really_done() argument
940 if (uc->config.ep_type == PSIL_EP_NATIVE || in udma_is_desc_really_done()
941 uc->config.dir != DMA_MEM_TO_DEV) in udma_is_desc_really_done()
944 peer_bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_PEER_BCNT_REG); in udma_is_desc_really_done()
945 bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_is_desc_really_done()
949 uc->tx_drain.residue = bcnt - peer_bcnt; in udma_is_desc_really_done()
950 uc->tx_drain.tstamp = ktime_get(); in udma_is_desc_really_done()
959 struct udma_chan *uc = container_of(work, typeof(*uc), in udma_check_tx_completion() local
967 if (uc->desc) { in udma_check_tx_completion()
969 residue_diff = uc->tx_drain.residue; in udma_check_tx_completion()
970 time_diff = uc->tx_drain.tstamp; in udma_check_tx_completion()
975 desc_done = udma_is_desc_really_done(uc, uc->desc); in udma_check_tx_completion()
983 time_diff = ktime_sub(uc->tx_drain.tstamp, in udma_check_tx_completion()
985 residue_diff -= uc->tx_drain.residue; in udma_check_tx_completion()
994 uc->tx_drain.residue; in udma_check_tx_completion()
997 schedule_delayed_work(&uc->tx_drain.work, HZ); in udma_check_tx_completion()
1006 if (uc->desc) { in udma_check_tx_completion()
1007 struct udma_desc *d = uc->desc; in udma_check_tx_completion()
1009 uc->bcnt += d->residue; in udma_check_tx_completion()
1010 udma_start(uc); in udma_check_tx_completion()
1021 struct udma_chan *uc = data; in udma_ring_irq_handler() local
1026 if (udma_pop_from_ring(uc, &paddr) || !paddr) in udma_ring_irq_handler()
1029 spin_lock_irqsave(&uc->vc.lock, flags); in udma_ring_irq_handler()
1033 complete_all(&uc->teardown_completed); in udma_ring_irq_handler()
1035 if (uc->terminated_desc) { in udma_ring_irq_handler()
1036 udma_desc_free(&uc->terminated_desc->vd); in udma_ring_irq_handler()
1037 uc->terminated_desc = NULL; in udma_ring_irq_handler()
1040 if (!uc->desc) in udma_ring_irq_handler()
1041 udma_start(uc); in udma_ring_irq_handler()
1046 d = udma_udma_desc_from_paddr(uc, paddr); in udma_ring_irq_handler()
1052 dev_err(uc->ud->dev, "not matching descriptors!\n"); in udma_ring_irq_handler()
1056 if (d == uc->desc) { in udma_ring_irq_handler()
1058 if (uc->cyclic) { in udma_ring_irq_handler()
1059 udma_cyclic_packet_elapsed(uc); in udma_ring_irq_handler()
1062 if (udma_is_desc_really_done(uc, d)) { in udma_ring_irq_handler()
1063 uc->bcnt += d->residue; in udma_ring_irq_handler()
1064 udma_start(uc); in udma_ring_irq_handler()
1067 schedule_delayed_work(&uc->tx_drain.work, in udma_ring_irq_handler()
1080 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_ring_irq_handler()
1087 struct udma_chan *uc = data; in udma_udma_irq_handler() local
1091 spin_lock_irqsave(&uc->vc.lock, flags); in udma_udma_irq_handler()
1092 d = uc->desc; in udma_udma_irq_handler()
1096 if (uc->cyclic) { in udma_udma_irq_handler()
1100 uc->bcnt += d->residue; in udma_udma_irq_handler()
1101 udma_start(uc); in udma_udma_irq_handler()
1106 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_udma_irq_handler()
1237 static int udma_get_tchan(struct udma_chan *uc) in udma_get_tchan() argument
1239 struct udma_dev *ud = uc->ud; in udma_get_tchan()
1241 if (uc->tchan) { in udma_get_tchan()
1243 uc->id, uc->tchan->id); in udma_get_tchan()
1247 uc->tchan = __udma_reserve_tchan(ud, uc->config.channel_tpl, -1); in udma_get_tchan()
1249 return PTR_ERR_OR_ZERO(uc->tchan); in udma_get_tchan()
1252 static int udma_get_rchan(struct udma_chan *uc) in udma_get_rchan() argument
1254 struct udma_dev *ud = uc->ud; in udma_get_rchan()
1256 if (uc->rchan) { in udma_get_rchan()
1258 uc->id, uc->rchan->id); in udma_get_rchan()
1262 uc->rchan = __udma_reserve_rchan(ud, uc->config.channel_tpl, -1); in udma_get_rchan()
1264 return PTR_ERR_OR_ZERO(uc->rchan); in udma_get_rchan()
1267 static int udma_get_chan_pair(struct udma_chan *uc) in udma_get_chan_pair() argument
1269 struct udma_dev *ud = uc->ud; in udma_get_chan_pair()
1272 if ((uc->tchan && uc->rchan) && uc->tchan->id == uc->rchan->id) { in udma_get_chan_pair()
1274 uc->id, uc->tchan->id); in udma_get_chan_pair()
1278 if (uc->tchan) { in udma_get_chan_pair()
1280 uc->id, uc->tchan->id); in udma_get_chan_pair()
1282 } else if (uc->rchan) { in udma_get_chan_pair()
1284 uc->id, uc->rchan->id); in udma_get_chan_pair()
1303 uc->tchan = &ud->tchans[chan_id]; in udma_get_chan_pair()
1304 uc->rchan = &ud->rchans[chan_id]; in udma_get_chan_pair()
1309 static int udma_get_rflow(struct udma_chan *uc, int flow_id) in udma_get_rflow() argument
1311 struct udma_dev *ud = uc->ud; in udma_get_rflow()
1313 if (!uc->rchan) { in udma_get_rflow()
1314 dev_err(ud->dev, "chan%d: does not have rchan??\n", uc->id); in udma_get_rflow()
1318 if (uc->rflow) { in udma_get_rflow()
1320 uc->id, uc->rflow->id); in udma_get_rflow()
1324 uc->rflow = __udma_get_rflow(ud, flow_id); in udma_get_rflow()
1326 return PTR_ERR_OR_ZERO(uc->rflow); in udma_get_rflow()
1329 static void udma_put_rchan(struct udma_chan *uc) in udma_put_rchan() argument
1331 struct udma_dev *ud = uc->ud; in udma_put_rchan()
1333 if (uc->rchan) { in udma_put_rchan()
1334 dev_dbg(ud->dev, "chan%d: put rchan%d\n", uc->id, in udma_put_rchan()
1335 uc->rchan->id); in udma_put_rchan()
1336 clear_bit(uc->rchan->id, ud->rchan_map); in udma_put_rchan()
1337 uc->rchan = NULL; in udma_put_rchan()
1341 static void udma_put_tchan(struct udma_chan *uc) in udma_put_tchan() argument
1343 struct udma_dev *ud = uc->ud; in udma_put_tchan()
1345 if (uc->tchan) { in udma_put_tchan()
1346 dev_dbg(ud->dev, "chan%d: put tchan%d\n", uc->id, in udma_put_tchan()
1347 uc->tchan->id); in udma_put_tchan()
1348 clear_bit(uc->tchan->id, ud->tchan_map); in udma_put_tchan()
1349 uc->tchan = NULL; in udma_put_tchan()
1353 static void udma_put_rflow(struct udma_chan *uc) in udma_put_rflow() argument
1355 struct udma_dev *ud = uc->ud; in udma_put_rflow()
1357 if (uc->rflow) { in udma_put_rflow()
1358 dev_dbg(ud->dev, "chan%d: put rflow%d\n", uc->id, in udma_put_rflow()
1359 uc->rflow->id); in udma_put_rflow()
1360 __udma_put_rflow(ud, uc->rflow); in udma_put_rflow()
1361 uc->rflow = NULL; in udma_put_rflow()
1365 static void udma_free_tx_resources(struct udma_chan *uc) in udma_free_tx_resources() argument
1367 if (!uc->tchan) in udma_free_tx_resources()
1370 k3_ringacc_ring_free(uc->tchan->t_ring); in udma_free_tx_resources()
1371 k3_ringacc_ring_free(uc->tchan->tc_ring); in udma_free_tx_resources()
1372 uc->tchan->t_ring = NULL; in udma_free_tx_resources()
1373 uc->tchan->tc_ring = NULL; in udma_free_tx_resources()
1375 udma_put_tchan(uc); in udma_free_tx_resources()
1378 static int udma_alloc_tx_resources(struct udma_chan *uc) in udma_alloc_tx_resources() argument
1381 struct udma_dev *ud = uc->ud; in udma_alloc_tx_resources()
1384 ret = udma_get_tchan(uc); in udma_alloc_tx_resources()
1388 ret = k3_ringacc_request_rings_pair(ud->ringacc, uc->tchan->id, -1, in udma_alloc_tx_resources()
1389 &uc->tchan->t_ring, in udma_alloc_tx_resources()
1390 &uc->tchan->tc_ring); in udma_alloc_tx_resources()
1401 ret = k3_ringacc_ring_cfg(uc->tchan->t_ring, &ring_cfg); in udma_alloc_tx_resources()
1402 ret |= k3_ringacc_ring_cfg(uc->tchan->tc_ring, &ring_cfg); in udma_alloc_tx_resources()
1410 k3_ringacc_ring_free(uc->tchan->tc_ring); in udma_alloc_tx_resources()
1411 uc->tchan->tc_ring = NULL; in udma_alloc_tx_resources()
1412 k3_ringacc_ring_free(uc->tchan->t_ring); in udma_alloc_tx_resources()
1413 uc->tchan->t_ring = NULL; in udma_alloc_tx_resources()
1415 udma_put_tchan(uc); in udma_alloc_tx_resources()
1420 static void udma_free_rx_resources(struct udma_chan *uc) in udma_free_rx_resources() argument
1422 if (!uc->rchan) in udma_free_rx_resources()
1425 if (uc->rflow) { in udma_free_rx_resources()
1426 struct udma_rflow *rflow = uc->rflow; in udma_free_rx_resources()
1433 udma_put_rflow(uc); in udma_free_rx_resources()
1436 udma_put_rchan(uc); in udma_free_rx_resources()
1439 static int udma_alloc_rx_resources(struct udma_chan *uc) in udma_alloc_rx_resources() argument
1441 struct udma_dev *ud = uc->ud; in udma_alloc_rx_resources()
1447 ret = udma_get_rchan(uc); in udma_alloc_rx_resources()
1452 if (uc->config.dir == DMA_MEM_TO_MEM) in udma_alloc_rx_resources()
1455 ret = udma_get_rflow(uc, uc->rchan->id); in udma_alloc_rx_resources()
1461 rflow = uc->rflow; in udma_alloc_rx_resources()
1462 fd_ring_id = ud->tchan_cnt + ud->echan_cnt + uc->rchan->id; in udma_alloc_rx_resources()
1472 if (uc->config.pkt_mode) in udma_alloc_rx_resources()
1495 udma_put_rflow(uc); in udma_alloc_rx_resources()
1497 udma_put_rchan(uc); in udma_alloc_rx_resources()
1523 static int udma_tisci_m2m_channel_config(struct udma_chan *uc) in udma_tisci_m2m_channel_config() argument
1525 struct udma_dev *ud = uc->ud; in udma_tisci_m2m_channel_config()
1528 struct udma_tchan *tchan = uc->tchan; in udma_tisci_m2m_channel_config()
1529 struct udma_rchan *rchan = uc->rchan; in udma_tisci_m2m_channel_config()
1566 static int udma_tisci_tx_channel_config(struct udma_chan *uc) in udma_tisci_tx_channel_config() argument
1568 struct udma_dev *ud = uc->ud; in udma_tisci_tx_channel_config()
1571 struct udma_tchan *tchan = uc->tchan; in udma_tisci_tx_channel_config()
1577 if (uc->config.pkt_mode) { in udma_tisci_tx_channel_config()
1579 fetch_size = cppi5_hdesc_calc_size(uc->config.needs_epib, in udma_tisci_tx_channel_config()
1580 uc->config.psd_size, 0); in udma_tisci_tx_channel_config()
1590 req_tx.tx_supr_tdpkt = uc->config.notdpkt; in udma_tisci_tx_channel_config()
1593 req_tx.tx_atype = uc->config.atype; in udma_tisci_tx_channel_config()
1602 static int udma_tisci_rx_channel_config(struct udma_chan *uc) in udma_tisci_rx_channel_config() argument
1604 struct udma_dev *ud = uc->ud; in udma_tisci_rx_channel_config()
1607 struct udma_rchan *rchan = uc->rchan; in udma_tisci_rx_channel_config()
1608 int fd_ring = k3_ringacc_get_ring_id(uc->rflow->fd_ring); in udma_tisci_rx_channel_config()
1609 int rx_ring = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_tisci_rx_channel_config()
1615 if (uc->config.pkt_mode) { in udma_tisci_rx_channel_config()
1617 fetch_size = cppi5_hdesc_calc_size(uc->config.needs_epib, in udma_tisci_rx_channel_config()
1618 uc->config.psd_size, 0); in udma_tisci_rx_channel_config()
1630 req_rx.rx_atype = uc->config.atype; in udma_tisci_rx_channel_config()
1656 if (uc->config.needs_epib) in udma_tisci_rx_channel_config()
1660 if (uc->config.psd_size) in udma_tisci_rx_channel_config()
1685 struct udma_chan *uc = to_udma_chan(chan); in udma_alloc_chan_resources() local
1692 if (uc->config.pkt_mode || uc->config.dir == DMA_MEM_TO_MEM) { in udma_alloc_chan_resources()
1693 uc->use_dma_pool = true; in udma_alloc_chan_resources()
1695 if (uc->config.dir == DMA_MEM_TO_MEM) { in udma_alloc_chan_resources()
1696 uc->config.hdesc_size = cppi5_trdesc_calc_size( in udma_alloc_chan_resources()
1698 uc->config.pkt_mode = false; in udma_alloc_chan_resources()
1702 if (uc->use_dma_pool) { in udma_alloc_chan_resources()
1703 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in udma_alloc_chan_resources()
1704 uc->config.hdesc_size, in udma_alloc_chan_resources()
1707 if (!uc->hdesc_pool) { in udma_alloc_chan_resources()
1710 uc->use_dma_pool = false; in udma_alloc_chan_resources()
1720 reinit_completion(&uc->teardown_completed); in udma_alloc_chan_resources()
1721 complete_all(&uc->teardown_completed); in udma_alloc_chan_resources()
1722 uc->state = UDMA_CHAN_IS_IDLE; in udma_alloc_chan_resources()
1724 switch (uc->config.dir) { in udma_alloc_chan_resources()
1727 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in udma_alloc_chan_resources()
1728 uc->id); in udma_alloc_chan_resources()
1730 ret = udma_get_chan_pair(uc); in udma_alloc_chan_resources()
1734 ret = udma_alloc_tx_resources(uc); in udma_alloc_chan_resources()
1736 udma_put_rchan(uc); in udma_alloc_chan_resources()
1740 ret = udma_alloc_rx_resources(uc); in udma_alloc_chan_resources()
1742 udma_free_tx_resources(uc); in udma_alloc_chan_resources()
1746 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
1747 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
1750 irq_ring = uc->tchan->tc_ring; in udma_alloc_chan_resources()
1751 irq_udma_idx = uc->tchan->id; in udma_alloc_chan_resources()
1753 ret = udma_tisci_m2m_channel_config(uc); in udma_alloc_chan_resources()
1757 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in udma_alloc_chan_resources()
1758 uc->id); in udma_alloc_chan_resources()
1760 ret = udma_alloc_tx_resources(uc); in udma_alloc_chan_resources()
1764 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
1765 uc->config.dst_thread = uc->config.remote_thread_id; in udma_alloc_chan_resources()
1766 uc->config.dst_thread |= K3_PSIL_DST_THREAD_ID_OFFSET; in udma_alloc_chan_resources()
1768 irq_ring = uc->tchan->tc_ring; in udma_alloc_chan_resources()
1769 irq_udma_idx = uc->tchan->id; in udma_alloc_chan_resources()
1771 ret = udma_tisci_tx_channel_config(uc); in udma_alloc_chan_resources()
1775 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in udma_alloc_chan_resources()
1776 uc->id); in udma_alloc_chan_resources()
1778 ret = udma_alloc_rx_resources(uc); in udma_alloc_chan_resources()
1782 uc->config.src_thread = uc->config.remote_thread_id; in udma_alloc_chan_resources()
1783 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
1786 irq_ring = uc->rflow->r_ring; in udma_alloc_chan_resources()
1787 irq_udma_idx = soc_data->rchan_oes_offset + uc->rchan->id; in udma_alloc_chan_resources()
1789 ret = udma_tisci_rx_channel_config(uc); in udma_alloc_chan_resources()
1793 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in udma_alloc_chan_resources()
1794 __func__, uc->id, uc->config.dir); in udma_alloc_chan_resources()
1804 if (udma_is_chan_running(uc)) { in udma_alloc_chan_resources()
1805 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in udma_alloc_chan_resources()
1806 udma_reset_chan(uc, false); in udma_alloc_chan_resources()
1807 if (udma_is_chan_running(uc)) { in udma_alloc_chan_resources()
1808 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in udma_alloc_chan_resources()
1815 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
1818 uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
1822 uc->psil_paired = true; in udma_alloc_chan_resources()
1824 uc->irq_num_ring = k3_ringacc_get_ring_irq_num(irq_ring); in udma_alloc_chan_resources()
1825 if (uc->irq_num_ring <= 0) { in udma_alloc_chan_resources()
1832 ret = request_irq(uc->irq_num_ring, udma_ring_irq_handler, in udma_alloc_chan_resources()
1833 IRQF_TRIGGER_HIGH, uc->name, uc); in udma_alloc_chan_resources()
1835 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in udma_alloc_chan_resources()
1840 if (is_slave_direction(uc->config.dir) && !uc->config.pkt_mode) { in udma_alloc_chan_resources()
1841 uc->irq_num_udma = ti_sci_inta_msi_get_virq(ud->dev, in udma_alloc_chan_resources()
1843 if (uc->irq_num_udma <= 0) { in udma_alloc_chan_resources()
1846 free_irq(uc->irq_num_ring, uc); in udma_alloc_chan_resources()
1851 ret = request_irq(uc->irq_num_udma, udma_udma_irq_handler, 0, in udma_alloc_chan_resources()
1852 uc->name, uc); in udma_alloc_chan_resources()
1855 uc->id); in udma_alloc_chan_resources()
1856 free_irq(uc->irq_num_ring, uc); in udma_alloc_chan_resources()
1860 uc->irq_num_udma = 0; in udma_alloc_chan_resources()
1863 udma_reset_rings(uc); in udma_alloc_chan_resources()
1868 uc->irq_num_ring = 0; in udma_alloc_chan_resources()
1869 uc->irq_num_udma = 0; in udma_alloc_chan_resources()
1871 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
1872 uc->psil_paired = false; in udma_alloc_chan_resources()
1874 udma_free_tx_resources(uc); in udma_alloc_chan_resources()
1875 udma_free_rx_resources(uc); in udma_alloc_chan_resources()
1877 udma_reset_uchan(uc); in udma_alloc_chan_resources()
1879 if (uc->use_dma_pool) { in udma_alloc_chan_resources()
1880 dma_pool_destroy(uc->hdesc_pool); in udma_alloc_chan_resources()
1881 uc->use_dma_pool = false; in udma_alloc_chan_resources()
1890 struct udma_chan *uc = to_udma_chan(chan); in udma_slave_config() local
1892 memcpy(&uc->cfg, cfg, sizeof(uc->cfg)); in udma_slave_config()
1897 static struct udma_desc *udma_alloc_tr_desc(struct udma_chan *uc, in udma_alloc_tr_desc() argument
1914 dev_err(uc->ud->dev, "Unsupported TR size of %zu\n", tr_size); in udma_alloc_tr_desc()
1929 if (uc->use_dma_pool) { in udma_alloc_tr_desc()
1930 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_alloc_tr_desc()
1931 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_alloc_tr_desc()
1938 uc->ud->desc_align); in udma_alloc_tr_desc()
1939 hwdesc->cppi5_desc_vaddr = dma_alloc_coherent(uc->ud->dev, in udma_alloc_tr_desc()
1957 if (uc->cyclic) in udma_alloc_tr_desc()
1961 ring_id = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_alloc_tr_desc()
1963 ring_id = k3_ringacc_get_ring_id(uc->tchan->tc_ring); in udma_alloc_tr_desc()
1966 cppi5_desc_set_pktids(tr_desc, uc->id, in udma_alloc_tr_desc()
2019 udma_prep_slave_sg_tr(struct udma_chan *uc, struct scatterlist *sgl, in udma_prep_slave_sg_tr() argument
2042 d = udma_alloc_tr_desc(uc, tr_size, num_tr, dir); in udma_prep_slave_sg_tr()
2055 dev_err(uc->ud->dev, "size %u is not supported\n", in udma_prep_slave_sg_tr()
2057 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_tr()
2095 static int udma_configure_statictr(struct udma_chan *uc, struct udma_desc *d, in udma_configure_statictr() argument
2099 if (uc->config.ep_type != PSIL_EP_PDMA_XY) in udma_configure_statictr()
2131 if (uc->config.pkt_mode || !uc->cyclic) { in udma_configure_statictr()
2134 if (uc->cyclic) in udma_configure_statictr()
2139 if (uc->config.dir == DMA_DEV_TO_MEM && in udma_configure_statictr()
2140 d->static_tr.bstcnt > uc->ud->match_data->statictr_z_mask) in udma_configure_statictr()
2150 udma_prep_slave_sg_pkt(struct udma_chan *uc, struct scatterlist *sgl, in udma_prep_slave_sg_pkt() argument
2168 ring_id = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_prep_slave_sg_pkt()
2170 ring_id = k3_ringacc_get_ring_id(uc->tchan->tc_ring); in udma_prep_slave_sg_pkt()
2178 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_slave_sg_pkt()
2182 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
2185 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_pkt()
2191 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_slave_sg_pkt()
2197 cppi5_desc_set_pktids(&desc->hdr, uc->id, in udma_prep_slave_sg_pkt()
2218 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
2221 udma_free_hwdesc(uc, d); in udma_prep_slave_sg_pkt()
2236 struct udma_chan *uc = to_udma_chan(desc->chan); in udma_attach_metadata() local
2241 if (!uc->config.pkt_mode || !uc->config.metadata_size) in udma_attach_metadata()
2244 if (!data || len > uc->config.metadata_size) in udma_attach_metadata()
2247 if (uc->config.needs_epib && len < CPPI5_INFO0_HDESC_EPIB_SIZE) in udma_attach_metadata()
2254 if (uc->config.needs_epib) in udma_attach_metadata()
2259 if (uc->config.needs_epib) in udma_attach_metadata()
2272 struct udma_chan *uc = to_udma_chan(desc->chan); in udma_get_metadata_ptr() local
2275 if (!uc->config.pkt_mode || !uc->config.metadata_size) in udma_get_metadata_ptr()
2280 *max_len = uc->config.metadata_size; in udma_get_metadata_ptr()
2293 struct udma_chan *uc = to_udma_chan(desc->chan); in udma_set_metadata_len() local
2298 if (!uc->config.pkt_mode || !uc->config.metadata_size) in udma_set_metadata_len()
2301 if (payload_len > uc->config.metadata_size) in udma_set_metadata_len()
2304 if (uc->config.needs_epib && payload_len < CPPI5_INFO0_HDESC_EPIB_SIZE) in udma_set_metadata_len()
2309 if (uc->config.needs_epib) { in udma_set_metadata_len()
2331 struct udma_chan *uc = to_udma_chan(chan); in udma_prep_slave_sg() local
2336 if (dir != uc->config.dir) { in udma_prep_slave_sg()
2339 __func__, uc->id, in udma_prep_slave_sg()
2340 dmaengine_get_direction_text(uc->config.dir), in udma_prep_slave_sg()
2346 dev_width = uc->cfg.src_addr_width; in udma_prep_slave_sg()
2347 burst = uc->cfg.src_maxburst; in udma_prep_slave_sg()
2349 dev_width = uc->cfg.dst_addr_width; in udma_prep_slave_sg()
2350 burst = uc->cfg.dst_maxburst; in udma_prep_slave_sg()
2359 if (uc->config.pkt_mode) in udma_prep_slave_sg()
2360 d = udma_prep_slave_sg_pkt(uc, sgl, sglen, dir, tx_flags, in udma_prep_slave_sg()
2363 d = udma_prep_slave_sg_tr(uc, sgl, sglen, dir, tx_flags, in udma_prep_slave_sg()
2374 if (udma_configure_statictr(uc, d, dev_width, burst)) { in udma_prep_slave_sg()
2375 dev_err(uc->ud->dev, in udma_prep_slave_sg()
2379 udma_free_hwdesc(uc, d); in udma_prep_slave_sg()
2384 if (uc->config.metadata_size) in udma_prep_slave_sg()
2387 return vchan_tx_prep(&uc->vc, &d->vd, tx_flags); in udma_prep_slave_sg()
2391 udma_prep_dma_cyclic_tr(struct udma_chan *uc, dma_addr_t buf_addr, in udma_prep_dma_cyclic_tr() argument
2406 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_cyclic_tr()
2413 d = udma_alloc_tr_desc(uc, tr_size, periods * num_tr, dir); in udma_prep_dma_cyclic_tr()
2456 udma_prep_dma_cyclic_pkt(struct udma_chan *uc, dma_addr_t buf_addr, in udma_prep_dma_cyclic_pkt() argument
2479 ring_id = k3_ringacc_get_ring_id(uc->rflow->r_ring); in udma_prep_dma_cyclic_pkt()
2481 ring_id = k3_ringacc_get_ring_id(uc->tchan->tc_ring); in udma_prep_dma_cyclic_pkt()
2488 hwdesc->cppi5_desc_vaddr = dma_pool_zalloc(uc->hdesc_pool, in udma_prep_dma_cyclic_pkt()
2492 dev_err(uc->ud->dev, in udma_prep_dma_cyclic_pkt()
2495 udma_free_hwdesc(uc, d); in udma_prep_dma_cyclic_pkt()
2500 hwdesc->cppi5_desc_size = uc->config.hdesc_size; in udma_prep_dma_cyclic_pkt()
2507 cppi5_desc_set_pktids(&h_desc->hdr, uc->id, in udma_prep_dma_cyclic_pkt()
2525 struct udma_chan *uc = to_udma_chan(chan); in udma_prep_dma_cyclic() local
2530 if (dir != uc->config.dir) { in udma_prep_dma_cyclic()
2533 __func__, uc->id, in udma_prep_dma_cyclic()
2534 dmaengine_get_direction_text(uc->config.dir), in udma_prep_dma_cyclic()
2539 uc->cyclic = true; in udma_prep_dma_cyclic()
2542 dev_width = uc->cfg.src_addr_width; in udma_prep_dma_cyclic()
2543 burst = uc->cfg.src_maxburst; in udma_prep_dma_cyclic()
2545 dev_width = uc->cfg.dst_addr_width; in udma_prep_dma_cyclic()
2546 burst = uc->cfg.dst_maxburst; in udma_prep_dma_cyclic()
2548 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_dma_cyclic()
2555 if (uc->config.pkt_mode) in udma_prep_dma_cyclic()
2556 d = udma_prep_dma_cyclic_pkt(uc, buf_addr, buf_len, period_len, in udma_prep_dma_cyclic()
2559 d = udma_prep_dma_cyclic_tr(uc, buf_addr, buf_len, period_len, in udma_prep_dma_cyclic()
2571 if (udma_configure_statictr(uc, d, dev_width, burst)) { in udma_prep_dma_cyclic()
2572 dev_err(uc->ud->dev, in udma_prep_dma_cyclic()
2576 udma_free_hwdesc(uc, d); in udma_prep_dma_cyclic()
2581 if (uc->config.metadata_size) in udma_prep_dma_cyclic()
2584 return vchan_tx_prep(&uc->vc, &d->vd, flags); in udma_prep_dma_cyclic()
2591 struct udma_chan *uc = to_udma_chan(chan); in udma_prep_dma_memcpy() local
2598 if (uc->config.dir != DMA_MEM_TO_MEM) { in udma_prep_dma_memcpy()
2601 __func__, uc->id, in udma_prep_dma_memcpy()
2602 dmaengine_get_direction_text(uc->config.dir), in udma_prep_dma_memcpy()
2610 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_memcpy()
2615 d = udma_alloc_tr_desc(uc, tr_size, num_tr, DMA_MEM_TO_MEM); in udma_prep_dma_memcpy()
2665 if (uc->config.metadata_size) in udma_prep_dma_memcpy()
2668 return vchan_tx_prep(&uc->vc, &d->vd, tx_flags); in udma_prep_dma_memcpy()
2673 struct udma_chan *uc = to_udma_chan(chan); in udma_issue_pending() local
2676 spin_lock_irqsave(&uc->vc.lock, flags); in udma_issue_pending()
2679 if (vchan_issue_pending(&uc->vc) && !uc->desc) { in udma_issue_pending()
2685 if (!(uc->state == UDMA_CHAN_IS_TERMINATING && in udma_issue_pending()
2686 udma_is_chan_running(uc))) in udma_issue_pending()
2687 udma_start(uc); in udma_issue_pending()
2690 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_issue_pending()
2697 struct udma_chan *uc = to_udma_chan(chan); in udma_tx_status() local
2701 spin_lock_irqsave(&uc->vc.lock, flags); in udma_tx_status()
2705 if (!udma_is_chan_running(uc)) in udma_tx_status()
2708 if (ret == DMA_IN_PROGRESS && udma_is_chan_paused(uc)) in udma_tx_status()
2714 if (uc->desc && uc->desc->vd.tx.cookie == cookie) { in udma_tx_status()
2717 u32 residue = uc->desc->residue; in udma_tx_status()
2720 if (uc->desc->dir == DMA_MEM_TO_DEV) { in udma_tx_status()
2721 bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_SBCNT_REG); in udma_tx_status()
2723 if (uc->config.ep_type != PSIL_EP_NATIVE) { in udma_tx_status()
2724 peer_bcnt = udma_tchanrt_read(uc, in udma_tx_status()
2730 } else if (uc->desc->dir == DMA_DEV_TO_MEM) { in udma_tx_status()
2731 bcnt = udma_rchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_tx_status()
2733 if (uc->config.ep_type != PSIL_EP_NATIVE) { in udma_tx_status()
2734 peer_bcnt = udma_rchanrt_read(uc, in udma_tx_status()
2741 bcnt = udma_tchanrt_read(uc, UDMA_CHAN_RT_BCNT_REG); in udma_tx_status()
2744 bcnt -= uc->bcnt; in udma_tx_status()
2745 if (bcnt && !(bcnt % uc->desc->residue)) in udma_tx_status()
2748 residue -= bcnt % uc->desc->residue; in udma_tx_status()
2750 if (!residue && (uc->config.dir == DMA_DEV_TO_MEM || !delay)) { in udma_tx_status()
2763 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_tx_status()
2769 struct udma_chan *uc = to_udma_chan(chan); in udma_pause() local
2772 switch (uc->config.dir) { in udma_pause()
2774 udma_rchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_pause()
2779 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_pause()
2784 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_CTL_REG, in udma_pause()
2797 struct udma_chan *uc = to_udma_chan(chan); in udma_resume() local
2800 switch (uc->config.dir) { in udma_resume()
2802 udma_rchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_resume()
2807 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_PEER_RT_EN_REG, in udma_resume()
2811 udma_tchanrt_update_bits(uc, UDMA_CHAN_RT_CTL_REG, in udma_resume()
2823 struct udma_chan *uc = to_udma_chan(chan); in udma_terminate_all() local
2827 spin_lock_irqsave(&uc->vc.lock, flags); in udma_terminate_all()
2829 if (udma_is_chan_running(uc)) in udma_terminate_all()
2830 udma_stop(uc); in udma_terminate_all()
2832 if (uc->desc) { in udma_terminate_all()
2833 uc->terminated_desc = uc->desc; in udma_terminate_all()
2834 uc->desc = NULL; in udma_terminate_all()
2835 uc->terminated_desc->terminated = true; in udma_terminate_all()
2836 cancel_delayed_work(&uc->tx_drain.work); in udma_terminate_all()
2839 uc->paused = false; in udma_terminate_all()
2841 vchan_get_all_descriptors(&uc->vc, &head); in udma_terminate_all()
2842 spin_unlock_irqrestore(&uc->vc.lock, flags); in udma_terminate_all()
2843 vchan_dma_desc_free_list(&uc->vc, &head); in udma_terminate_all()
2850 struct udma_chan *uc = to_udma_chan(chan); in udma_synchronize() local
2853 vchan_synchronize(&uc->vc); in udma_synchronize()
2855 if (uc->state == UDMA_CHAN_IS_TERMINATING) { in udma_synchronize()
2856 timeout = wait_for_completion_timeout(&uc->teardown_completed, in udma_synchronize()
2859 dev_warn(uc->ud->dev, "chan%d teardown timeout!\n", in udma_synchronize()
2860 uc->id); in udma_synchronize()
2861 udma_dump_chan_stdata(uc); in udma_synchronize()
2862 udma_reset_chan(uc, true); in udma_synchronize()
2866 udma_reset_chan(uc, false); in udma_synchronize()
2867 if (udma_is_chan_running(uc)) in udma_synchronize()
2868 dev_warn(uc->ud->dev, "chan%d refused to stop!\n", uc->id); in udma_synchronize()
2870 cancel_delayed_work_sync(&uc->tx_drain.work); in udma_synchronize()
2871 udma_reset_rings(uc); in udma_synchronize()
2878 struct udma_chan *uc = to_udma_chan(&vc->chan); in udma_desc_pre_callback() local
2887 udma_fetch_epib(uc, d); in udma_desc_pre_callback()
2949 struct udma_chan *uc = to_udma_chan(chan); in udma_free_chan_resources() local
2953 if (uc->terminated_desc) { in udma_free_chan_resources()
2954 udma_reset_chan(uc, false); in udma_free_chan_resources()
2955 udma_reset_rings(uc); in udma_free_chan_resources()
2958 cancel_delayed_work_sync(&uc->tx_drain.work); in udma_free_chan_resources()
2960 if (uc->irq_num_ring > 0) { in udma_free_chan_resources()
2961 free_irq(uc->irq_num_ring, uc); in udma_free_chan_resources()
2963 uc->irq_num_ring = 0; in udma_free_chan_resources()
2965 if (uc->irq_num_udma > 0) { in udma_free_chan_resources()
2966 free_irq(uc->irq_num_udma, uc); in udma_free_chan_resources()
2968 uc->irq_num_udma = 0; in udma_free_chan_resources()
2972 if (uc->psil_paired) { in udma_free_chan_resources()
2973 navss_psil_unpair(ud, uc->config.src_thread, in udma_free_chan_resources()
2974 uc->config.dst_thread); in udma_free_chan_resources()
2975 uc->psil_paired = false; in udma_free_chan_resources()
2978 vchan_free_chan_resources(&uc->vc); in udma_free_chan_resources()
2979 tasklet_kill(&uc->vc.task); in udma_free_chan_resources()
2981 udma_free_tx_resources(uc); in udma_free_chan_resources()
2982 udma_free_rx_resources(uc); in udma_free_chan_resources()
2983 udma_reset_uchan(uc); in udma_free_chan_resources()
2985 if (uc->use_dma_pool) { in udma_free_chan_resources()
2986 dma_pool_destroy(uc->hdesc_pool); in udma_free_chan_resources()
2987 uc->use_dma_pool = false; in udma_free_chan_resources()
3003 struct udma_chan *uc; in udma_dma_filter_fn() local
3009 uc = to_udma_chan(chan); in udma_dma_filter_fn()
3010 ucc = &uc->config; in udma_dma_filter_fn()
3011 ud = uc->ud; in udma_dma_filter_fn()
3062 dev_dbg(ud->dev, "chan%d: Remote thread: 0x%04x (%s)\n", uc->id, in udma_dma_filter_fn()
3441 struct udma_chan *uc = to_udma_chan(chan); in udma_dbg_summary_show_chan() local
3442 struct udma_chan_config *ucc = &uc->config; in udma_dbg_summary_show_chan()
3446 seq_printf(s, " (%s, ", dmaengine_get_direction_text(uc->config.dir)); in udma_dbg_summary_show_chan()
3448 switch (uc->config.dir) { in udma_dbg_summary_show_chan()
3450 seq_printf(s, "chan%d pair [0x%04x -> 0x%04x], ", uc->tchan->id, in udma_dbg_summary_show_chan()
3454 seq_printf(s, "rchan%d [0x%04x -> 0x%04x], ", uc->rchan->id, in udma_dbg_summary_show_chan()
3458 seq_printf(s, "tchan%d [0x%04x -> 0x%04x], ", uc->tchan->id, in udma_dbg_summary_show_chan()
3652 struct udma_chan *uc = &ud->channels[i]; in udma_probe() local
3654 uc->ud = ud; in udma_probe()
3655 uc->vc.desc_free = udma_desc_free; in udma_probe()
3656 uc->id = i; in udma_probe()
3657 uc->tchan = NULL; in udma_probe()
3658 uc->rchan = NULL; in udma_probe()
3659 uc->config.remote_thread_id = -1; in udma_probe()
3660 uc->config.dir = DMA_MEM_TO_MEM; in udma_probe()
3661 uc->name = devm_kasprintf(dev, GFP_KERNEL, "%s chan%d", in udma_probe()
3664 vchan_init(&uc->vc, &ud->ddev); in udma_probe()
3666 tasklet_setup(&uc->vc.task, udma_vchan_complete); in udma_probe()
3667 init_completion(&uc->teardown_completed); in udma_probe()
3668 INIT_DELAYED_WORK(&uc->tx_drain.work, udma_check_tx_completion); in udma_probe()