Lines Matching full:ud
280 struct udma_dev *ud; member
402 static int navss_psil_pair(struct udma_dev *ud, u32 src_thread, u32 dst_thread) in navss_psil_pair() argument
404 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in navss_psil_pair()
412 static int navss_psil_unpair(struct udma_dev *ud, u32 src_thread, in navss_psil_unpair() argument
415 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in navss_psil_unpair()
470 struct device *dev = uc->ud->dev; in udma_dump_chan_stdata()
557 struct udma_dev *ud = container_of(work, typeof(*ud), purge_work); in udma_purge_desc_work() local
562 spin_lock_irqsave(&ud->lock, flags); in udma_purge_desc_work()
563 list_splice_tail_init(&ud->desc_to_purge, &head); in udma_purge_desc_work()
564 spin_unlock_irqrestore(&ud->lock, flags); in udma_purge_desc_work()
576 if (!list_empty(&ud->desc_to_purge)) in udma_purge_desc_work()
577 schedule_work(&ud->purge_work); in udma_purge_desc_work()
582 struct udma_dev *ud = to_udma_dev(vd->tx.chan->device); in udma_desc_free() local
596 spin_lock_irqsave(&ud->lock, flags); in udma_desc_free()
597 list_add_tail(&vd->node, &ud->desc_to_purge); in udma_desc_free()
598 spin_unlock_irqrestore(&ud->lock, flags); in udma_desc_free()
600 schedule_work(&ud->purge_work); in udma_desc_free()
648 return uc->ud->rx_flush.hwdescs[uc->config.pkt_mode].cppi5_desc_paddr; in udma_get_rx_flush_hwdesc_paddr()
840 uc->ud->ddev.device_free_chan_resources(&uc->vc.chan); in udma_reset_chan()
844 ret = uc->ud->ddev.device_alloc_chan_resources(&uc->vc.chan); in udma_reset_chan()
867 if (uc->ud->match_data->type == DMA_TYPE_UDMA && ucc->pkt_mode && in udma_start_desc()
929 uc->ud->match_data; in udma_start()
1180 dev_err(uc->ud->dev, "not matching descriptors!\n"); in udma_ring_irq_handler()
1240 * @ud: UDMA device
1254 static int __udma_alloc_gp_rflow_range(struct udma_dev *ud, int from, int cnt) in __udma_alloc_gp_rflow_range() argument
1261 tmp_from = ud->rchan_cnt; in __udma_alloc_gp_rflow_range()
1263 if (tmp_from < ud->rchan_cnt) in __udma_alloc_gp_rflow_range()
1266 if (tmp_from + cnt > ud->rflow_cnt) in __udma_alloc_gp_rflow_range()
1269 bitmap_or(tmp, ud->rflow_gp_map, ud->rflow_gp_map_allocated, in __udma_alloc_gp_rflow_range()
1270 ud->rflow_cnt); in __udma_alloc_gp_rflow_range()
1273 ud->rflow_cnt, in __udma_alloc_gp_rflow_range()
1275 if (start >= ud->rflow_cnt) in __udma_alloc_gp_rflow_range()
1281 bitmap_set(ud->rflow_gp_map_allocated, start, cnt); in __udma_alloc_gp_rflow_range()
1285 static int __udma_free_gp_rflow_range(struct udma_dev *ud, int from, int cnt) in __udma_free_gp_rflow_range() argument
1287 if (from < ud->rchan_cnt) in __udma_free_gp_rflow_range()
1289 if (from + cnt > ud->rflow_cnt) in __udma_free_gp_rflow_range()
1292 bitmap_clear(ud->rflow_gp_map_allocated, from, cnt); in __udma_free_gp_rflow_range()
1296 static struct udma_rflow *__udma_get_rflow(struct udma_dev *ud, int id) in __udma_get_rflow() argument
1305 if (id < 0 || id >= ud->rflow_cnt) in __udma_get_rflow()
1308 if (test_bit(id, ud->rflow_in_use)) in __udma_get_rflow()
1311 if (ud->rflow_gp_map) { in __udma_get_rflow()
1313 if (!test_bit(id, ud->rflow_gp_map) && in __udma_get_rflow()
1314 !test_bit(id, ud->rflow_gp_map_allocated)) in __udma_get_rflow()
1318 dev_dbg(ud->dev, "get rflow%d\n", id); in __udma_get_rflow()
1319 set_bit(id, ud->rflow_in_use); in __udma_get_rflow()
1320 return &ud->rflows[id]; in __udma_get_rflow()
1323 static void __udma_put_rflow(struct udma_dev *ud, struct udma_rflow *rflow) in __udma_put_rflow() argument
1325 if (!test_bit(rflow->id, ud->rflow_in_use)) { in __udma_put_rflow()
1326 dev_err(ud->dev, "attempt to put unused rflow%d\n", rflow->id); in __udma_put_rflow()
1330 dev_dbg(ud->dev, "put rflow%d\n", rflow->id); in __udma_put_rflow()
1331 clear_bit(rflow->id, ud->rflow_in_use); in __udma_put_rflow()
1335 static struct udma_##res *__udma_reserve_##res(struct udma_dev *ud, \
1340 if (test_bit(id, ud->res##_map)) { \
1341 dev_err(ud->dev, "res##%d is in use\n", id); \
1347 if (tpl >= ud->res##_tpl.levels) \
1348 tpl = ud->res##_tpl.levels - 1; \
1350 start = ud->res##_tpl.start_idx[tpl]; \
1352 id = find_next_zero_bit(ud->res##_map, ud->res##_cnt, \
1354 if (id == ud->res##_cnt) { \
1359 set_bit(id, ud->res##_map); \
1360 return &ud->res##s[id]; \
1369 struct udma_dev *ud = uc->ud; in bcdma_get_bchan() local
1374 dev_dbg(ud->dev, "chan%d: already have bchan%d allocated\n", in bcdma_get_bchan()
1386 tpl = ud->bchan_tpl.levels - 1; in bcdma_get_bchan()
1388 uc->bchan = __udma_reserve_bchan(ud, tpl, -1); in bcdma_get_bchan()
1402 struct udma_dev *ud = uc->ud; in udma_get_tchan() local
1406 dev_dbg(ud->dev, "chan%d: already have tchan%d allocated\n", in udma_get_tchan()
1416 uc->tchan = __udma_reserve_tchan(ud, uc->config.channel_tpl, in udma_get_tchan()
1424 if (ud->tflow_cnt) { in udma_get_tchan()
1433 if (test_bit(tflow_id, ud->tflow_map)) { in udma_get_tchan()
1434 dev_err(ud->dev, "tflow%d is in use\n", tflow_id); in udma_get_tchan()
1435 clear_bit(uc->tchan->id, ud->tchan_map); in udma_get_tchan()
1441 set_bit(tflow_id, ud->tflow_map); in udma_get_tchan()
1451 struct udma_dev *ud = uc->ud; in udma_get_rchan() local
1455 dev_dbg(ud->dev, "chan%d: already have rchan%d allocated\n", in udma_get_rchan()
1465 uc->rchan = __udma_reserve_rchan(ud, uc->config.channel_tpl, in udma_get_rchan()
1478 struct udma_dev *ud = uc->ud; in udma_get_chan_pair() local
1482 dev_info(ud->dev, "chan%d: already have %d pair allocated\n", in udma_get_chan_pair()
1488 dev_err(ud->dev, "chan%d: already have tchan%d allocated\n", in udma_get_chan_pair()
1492 dev_err(ud->dev, "chan%d: already have rchan%d allocated\n", in udma_get_chan_pair()
1498 end = min(ud->tchan_cnt, ud->rchan_cnt); in udma_get_chan_pair()
1503 chan_id = ud->tchan_tpl.start_idx[ud->tchan_tpl.levels - 1]; in udma_get_chan_pair()
1505 if (!test_bit(chan_id, ud->tchan_map) && in udma_get_chan_pair()
1506 !test_bit(chan_id, ud->rchan_map)) in udma_get_chan_pair()
1513 set_bit(chan_id, ud->tchan_map); in udma_get_chan_pair()
1514 set_bit(chan_id, ud->rchan_map); in udma_get_chan_pair()
1515 uc->tchan = &ud->tchans[chan_id]; in udma_get_chan_pair()
1516 uc->rchan = &ud->rchans[chan_id]; in udma_get_chan_pair()
1526 struct udma_dev *ud = uc->ud; in udma_get_rflow() local
1530 dev_err(ud->dev, "chan%d: does not have rchan??\n", uc->id); in udma_get_rflow()
1535 dev_dbg(ud->dev, "chan%d: already have rflow%d allocated\n", in udma_get_rflow()
1540 uc->rflow = __udma_get_rflow(ud, flow_id); in udma_get_rflow()
1552 struct udma_dev *ud = uc->ud; in bcdma_put_bchan() local
1555 dev_dbg(ud->dev, "chan%d: put bchan%d\n", uc->id, in bcdma_put_bchan()
1557 clear_bit(uc->bchan->id, ud->bchan_map); in bcdma_put_bchan()
1565 struct udma_dev *ud = uc->ud; in udma_put_rchan() local
1568 dev_dbg(ud->dev, "chan%d: put rchan%d\n", uc->id, in udma_put_rchan()
1570 clear_bit(uc->rchan->id, ud->rchan_map); in udma_put_rchan()
1577 struct udma_dev *ud = uc->ud; in udma_put_tchan() local
1580 dev_dbg(ud->dev, "chan%d: put tchan%d\n", uc->id, in udma_put_tchan()
1582 clear_bit(uc->tchan->id, ud->tchan_map); in udma_put_tchan()
1585 clear_bit(uc->tchan->tflow_id, ud->tflow_map); in udma_put_tchan()
1593 struct udma_dev *ud = uc->ud; in udma_put_rflow() local
1596 dev_dbg(ud->dev, "chan%d: put rflow%d\n", uc->id, in udma_put_rflow()
1598 __udma_put_rflow(ud, uc->rflow); in udma_put_rflow()
1620 struct udma_dev *ud = uc->ud; in bcdma_alloc_bchan_resources() local
1627 ret = k3_ringacc_request_rings_pair(ud->ringacc, uc->bchan->id, -1, in bcdma_alloc_bchan_resources()
1640 k3_configure_chan_coherency(&uc->vc.chan, ud->asel); in bcdma_alloc_bchan_resources()
1641 ring_cfg.asel = ud->asel; in bcdma_alloc_bchan_resources()
1678 struct udma_dev *ud = uc->ud; in udma_alloc_tx_resources() local
1690 ring_idx = ud->bchan_cnt + tchan->id; in udma_alloc_tx_resources()
1692 ret = k3_ringacc_request_rings_pair(ud->ringacc, ring_idx, -1, in udma_alloc_tx_resources()
1703 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_alloc_tx_resources()
1753 struct udma_dev *ud = uc->ud; in udma_alloc_rx_resources() local
1778 if (ud->tflow_cnt) in udma_alloc_rx_resources()
1779 fd_ring_id = ud->tflow_cnt + rflow->id; in udma_alloc_rx_resources()
1781 fd_ring_id = ud->bchan_cnt + ud->tchan_cnt + ud->echan_cnt + in udma_alloc_rx_resources()
1784 ret = k3_ringacc_request_rings_pair(ud->ringacc, fd_ring_id, -1, in udma_alloc_rx_resources()
1794 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_alloc_rx_resources()
1867 struct udma_dev *ud = uc->ud; in udma_tisci_m2m_channel_config() local
1868 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_tisci_m2m_channel_config()
1881 if (ud->match_data->flags & UDMA_FLAG_BURST_SIZE) { in udma_tisci_m2m_channel_config()
1882 tpl = udma_get_chan_tpl_index(&ud->tchan_tpl, tchan->id); in udma_tisci_m2m_channel_config()
1884 burst_size = ud->match_data->burst_size[tpl]; in udma_tisci_m2m_channel_config()
1893 req_tx.tx_atype = ud->atype; in udma_tisci_m2m_channel_config()
1901 dev_err(ud->dev, "tchan%d cfg failed %d\n", tchan->id, ret); in udma_tisci_m2m_channel_config()
1911 req_rx.rx_atype = ud->atype; in udma_tisci_m2m_channel_config()
1919 dev_err(ud->dev, "rchan%d alloc failed %d\n", rchan->id, ret); in udma_tisci_m2m_channel_config()
1926 struct udma_dev *ud = uc->ud; in bcdma_tisci_m2m_channel_config() local
1927 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_tisci_m2m_channel_config()
1935 if (ud->match_data->flags & UDMA_FLAG_BURST_SIZE) { in bcdma_tisci_m2m_channel_config()
1936 tpl = udma_get_chan_tpl_index(&ud->bchan_tpl, bchan->id); in bcdma_tisci_m2m_channel_config()
1938 burst_size = ud->match_data->burst_size[tpl]; in bcdma_tisci_m2m_channel_config()
1952 dev_err(ud->dev, "bchan%d cfg failed %d\n", bchan->id, ret); in bcdma_tisci_m2m_channel_config()
1959 struct udma_dev *ud = uc->ud; in udma_tisci_tx_channel_config() local
1960 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_tisci_tx_channel_config()
1986 ud->match_data->flags & UDMA_FLAG_TDTYPE) { in udma_tisci_tx_channel_config()
1995 dev_err(ud->dev, "tchan%d cfg failed %d\n", tchan->id, ret); in udma_tisci_tx_channel_config()
2002 struct udma_dev *ud = uc->ud; in bcdma_tisci_tx_channel_config() local
2003 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_tisci_tx_channel_config()
2013 if (ud->match_data->flags & UDMA_FLAG_TDTYPE) { in bcdma_tisci_tx_channel_config()
2022 dev_err(ud->dev, "tchan%d cfg failed %d\n", tchan->id, ret); in bcdma_tisci_tx_channel_config()
2031 struct udma_dev *ud = uc->ud; in udma_tisci_rx_channel_config() local
2032 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_tisci_rx_channel_config()
2061 dev_err(ud->dev, "rchan%d cfg failed %d\n", rchan->id, ret); in udma_tisci_rx_channel_config()
2105 dev_err(ud->dev, "flow%d config failed: %d\n", rchan->id, ret); in udma_tisci_rx_channel_config()
2112 struct udma_dev *ud = uc->ud; in bcdma_tisci_rx_channel_config() local
2113 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_tisci_rx_channel_config()
2125 dev_err(ud->dev, "rchan%d cfg failed %d\n", rchan->id, ret); in bcdma_tisci_rx_channel_config()
2132 struct udma_dev *ud = uc->ud; in pktdma_tisci_rx_channel_config() local
2133 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in pktdma_tisci_rx_channel_config()
2145 dev_err(ud->dev, "rchan%d cfg failed %d\n", uc->rchan->id, ret); in pktdma_tisci_rx_channel_config()
2170 dev_err(ud->dev, "flow%d config failed: %d\n", uc->rflow->id, in pktdma_tisci_rx_channel_config()
2179 struct udma_dev *ud = to_udma_dev(chan->device); in udma_alloc_chan_resources() local
2180 const struct udma_soc_data *soc_data = ud->soc_data; in udma_alloc_chan_resources()
2185 uc->dma_dev = ud->dev; in udma_alloc_chan_resources()
2198 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in udma_alloc_chan_resources()
2200 ud->desc_align, in udma_alloc_chan_resources()
2203 dev_err(ud->ddev.dev, in udma_alloc_chan_resources()
2222 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in udma_alloc_chan_resources()
2241 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2242 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2252 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in udma_alloc_chan_resources()
2259 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2270 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in udma_alloc_chan_resources()
2278 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2288 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in udma_alloc_chan_resources()
2300 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in udma_alloc_chan_resources()
2303 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in udma_alloc_chan_resources()
2310 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2312 dev_err(ud->dev, "PSI-L pairing failed: 0x%04x -> 0x%04x\n", in udma_alloc_chan_resources()
2321 dev_err(ud->dev, "Failed to get ring irq (index: %u)\n", in udma_alloc_chan_resources()
2330 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in udma_alloc_chan_resources()
2336 uc->irq_num_udma = msi_get_virq(ud->dev, irq_udma_idx); in udma_alloc_chan_resources()
2338 dev_err(ud->dev, "Failed to get udma irq (index: %u)\n", in udma_alloc_chan_resources()
2348 dev_err(ud->dev, "chan%d: UDMA irq request failed\n", in udma_alloc_chan_resources()
2365 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2384 struct udma_dev *ud = to_udma_dev(chan->device); in bcdma_alloc_chan_resources() local
2385 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in bcdma_alloc_chan_resources()
2403 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2417 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in bcdma_alloc_chan_resources()
2426 uc->config.src_thread = ud->psil_base + uc->tchan->id; in bcdma_alloc_chan_resources()
2437 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2447 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in bcdma_alloc_chan_resources()
2457 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in bcdma_alloc_chan_resources()
2467 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in bcdma_alloc_chan_resources()
2470 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in bcdma_alloc_chan_resources()
2481 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in bcdma_alloc_chan_resources()
2483 ud->desc_align, in bcdma_alloc_chan_resources()
2486 dev_err(ud->ddev.dev, in bcdma_alloc_chan_resources()
2496 ret = navss_psil_pair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2499 dev_err(ud->dev, in bcdma_alloc_chan_resources()
2508 uc->irq_num_ring = msi_get_virq(ud->dev, irq_ring_idx); in bcdma_alloc_chan_resources()
2510 dev_err(ud->dev, "Failed to get ring irq (index: %u)\n", in bcdma_alloc_chan_resources()
2519 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in bcdma_alloc_chan_resources()
2525 uc->irq_num_udma = msi_get_virq(ud->dev, irq_udma_idx); in bcdma_alloc_chan_resources()
2527 dev_err(ud->dev, "Failed to get bcdma irq (index: %u)\n", in bcdma_alloc_chan_resources()
2537 dev_err(ud->dev, "chan%d: BCDMA irq request failed\n", in bcdma_alloc_chan_resources()
2557 navss_psil_unpair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2587 trigger_event = uc->ud->soc_data->bcdma_trigger_event_offset; in bcdma_router_config()
2596 struct udma_dev *ud = to_udma_dev(chan->device); in pktdma_alloc_chan_resources() local
2597 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in pktdma_alloc_chan_resources()
2612 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in pktdma_alloc_chan_resources()
2621 uc->config.src_thread = ud->psil_base + uc->tchan->id; in pktdma_alloc_chan_resources()
2631 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in pktdma_alloc_chan_resources()
2641 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in pktdma_alloc_chan_resources()
2650 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in pktdma_alloc_chan_resources()
2660 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in pktdma_alloc_chan_resources()
2663 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in pktdma_alloc_chan_resources()
2671 uc->config.hdesc_size, ud->desc_align, in pktdma_alloc_chan_resources()
2674 dev_err(ud->ddev.dev, in pktdma_alloc_chan_resources()
2684 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2686 dev_err(ud->dev, "PSI-L pairing failed: 0x%04x -> 0x%04x\n", in pktdma_alloc_chan_resources()
2693 uc->irq_num_ring = msi_get_virq(ud->dev, irq_ring_idx); in pktdma_alloc_chan_resources()
2695 dev_err(ud->dev, "Failed to get ring irq (index: %u)\n", in pktdma_alloc_chan_resources()
2704 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in pktdma_alloc_chan_resources()
2716 dev_dbg(ud->dev, in pktdma_alloc_chan_resources()
2721 dev_dbg(ud->dev, in pktdma_alloc_chan_resources()
2730 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2771 dev_err(uc->ud->dev, "Unsupported TR size of %zu\n", tr_size); in udma_alloc_tr_desc()
2795 uc->ud->desc_align); in udma_alloc_tr_desc()
2796 hwdesc->cppi5_desc_vaddr = dma_alloc_coherent(uc->ud->dev, in udma_alloc_tr_desc()
2906 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_tr()
2918 dev_err(uc->ud->dev, "size %u is not supported\n", in udma_prep_slave_sg_tr()
2990 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_slave_sg_triggered_tr()
2999 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
3017 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
3037 if (uc->ud->match_data->type == DMA_TYPE_UDMA) { in udma_prep_slave_sg_triggered_tr()
3054 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_slave_sg_triggered_tr()
3203 d->static_tr.bstcnt > uc->ud->match_data->statictr_z_mask) in udma_configure_statictr()
3236 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_pkt()
3251 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3283 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA || in udma_prep_slave_sg_pkt()
3289 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3452 dev_err(uc->ud->dev, in udma_prep_slave_sg()
3483 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_cyclic_tr()
3495 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_dma_cyclic_tr()
3565 if (uc->ud->match_data->type != DMA_TYPE_UDMA) in udma_prep_dma_cyclic_pkt()
3577 dev_err(uc->ud->dev, in udma_prep_dma_cyclic_pkt()
3633 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_dma_cyclic()
3657 dev_err(uc->ud->dev, in udma_prep_dma_cyclic()
3696 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_memcpy()
3710 if (uc->ud->match_data->type != DMA_TYPE_UDMA) { in udma_prep_dma_memcpy()
3711 src |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3712 dest |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3950 dev_warn(uc->ud->dev, "chan%d teardown timeout!\n", in udma_synchronize()
3959 dev_warn(uc->ud->dev, "chan%d refused to stop!\n", uc->id); in udma_synchronize()
4047 struct udma_dev *ud = to_udma_dev(chan->device); in udma_free_chan_resources() local
4070 navss_psil_unpair(ud, uc->config.src_thread, in udma_free_chan_resources()
4106 struct udma_dev *ud; in udma_dma_filter_fn() local
4115 ud = uc->ud; in udma_dma_filter_fn()
4119 dev_err(ud->dev, "Invalid channel atype: %u\n", in udma_dma_filter_fn()
4125 dev_err(ud->dev, "Invalid channel asel: %u\n", in udma_dma_filter_fn()
4146 dev_err(ud->dev, "No configuration for psi-l thread 0x%04x\n", in udma_dma_filter_fn()
4155 if (ud->match_data->type == DMA_TYPE_BCDMA && in udma_dma_filter_fn()
4157 dev_err(ud->dev, in udma_dma_filter_fn()
4172 if (ud->match_data->type == DMA_TYPE_PKTDMA && in udma_dma_filter_fn()
4182 const struct udma_match_data *match_data = ud->match_data; in udma_dma_filter_fn()
4198 ucc->metadata_size, ud->desc_align); in udma_dma_filter_fn()
4200 dev_dbg(ud->dev, "chan%d: Remote thread: 0x%04x (%s)\n", uc->id, in udma_dma_filter_fn()
4206 dev_dbg(ud->dev, "chan%d: triggered channel (type: %u)\n", uc->id, in udma_dma_filter_fn()
4216 struct udma_dev *ud = ofdma->of_dma_data; in udma_of_xlate() local
4217 dma_cap_mask_t mask = ud->ddev.cap_mask; in udma_of_xlate()
4221 if (ud->match_data->type == DMA_TYPE_BCDMA) { in udma_of_xlate()
4236 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_of_xlate()
4252 dev_err(ud->dev, "get channel fail in %s.\n", __func__); in udma_of_xlate()
4453 static int udma_get_mmrs(struct platform_device *pdev, struct udma_dev *ud) in udma_get_mmrs() argument
4458 ud->mmrs[MMR_GCFG] = devm_platform_ioremap_resource_byname(pdev, mmr_names[MMR_GCFG]); in udma_get_mmrs()
4459 if (IS_ERR(ud->mmrs[MMR_GCFG])) in udma_get_mmrs()
4460 return PTR_ERR(ud->mmrs[MMR_GCFG]); in udma_get_mmrs()
4462 cap2 = udma_read(ud->mmrs[MMR_GCFG], 0x28); in udma_get_mmrs()
4463 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in udma_get_mmrs()
4465 switch (ud->match_data->type) { in udma_get_mmrs()
4467 ud->rflow_cnt = UDMA_CAP3_RFLOW_CNT(cap3); in udma_get_mmrs()
4468 ud->tchan_cnt = UDMA_CAP2_TCHAN_CNT(cap2); in udma_get_mmrs()
4469 ud->echan_cnt = UDMA_CAP2_ECHAN_CNT(cap2); in udma_get_mmrs()
4470 ud->rchan_cnt = UDMA_CAP2_RCHAN_CNT(cap2); in udma_get_mmrs()
4473 ud->bchan_cnt = BCDMA_CAP2_BCHAN_CNT(cap2); in udma_get_mmrs()
4474 ud->tchan_cnt = BCDMA_CAP2_TCHAN_CNT(cap2); in udma_get_mmrs()
4475 ud->rchan_cnt = BCDMA_CAP2_RCHAN_CNT(cap2); in udma_get_mmrs()
4476 ud->rflow_cnt = ud->rchan_cnt; in udma_get_mmrs()
4479 cap4 = udma_read(ud->mmrs[MMR_GCFG], 0x30); in udma_get_mmrs()
4480 ud->tchan_cnt = UDMA_CAP2_TCHAN_CNT(cap2); in udma_get_mmrs()
4481 ud->rchan_cnt = UDMA_CAP2_RCHAN_CNT(cap2); in udma_get_mmrs()
4482 ud->rflow_cnt = UDMA_CAP3_RFLOW_CNT(cap3); in udma_get_mmrs()
4483 ud->tflow_cnt = PKTDMA_CAP4_TFLOW_CNT(cap4); in udma_get_mmrs()
4490 if (i == MMR_BCHANRT && ud->bchan_cnt == 0) in udma_get_mmrs()
4492 if (i == MMR_TCHANRT && ud->tchan_cnt == 0) in udma_get_mmrs()
4494 if (i == MMR_RCHANRT && ud->rchan_cnt == 0) in udma_get_mmrs()
4497 ud->mmrs[i] = devm_platform_ioremap_resource_byname(pdev, mmr_names[i]); in udma_get_mmrs()
4498 if (IS_ERR(ud->mmrs[i])) in udma_get_mmrs()
4499 return PTR_ERR(ud->mmrs[i]); in udma_get_mmrs()
4505 static void udma_mark_resource_ranges(struct udma_dev *ud, unsigned long *map, in udma_mark_resource_ranges() argument
4511 dev_dbg(ud->dev, "ti_sci resource range for %s: %d:%d | %d:%d\n", name, in udma_mark_resource_ranges()
4524 static int udma_setup_resources(struct udma_dev *ud) in udma_setup_resources() argument
4527 struct device *dev = ud->dev; in udma_setup_resources()
4529 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_setup_resources()
4533 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in udma_setup_resources()
4536 ud->tchan_tpl.levels = 2; in udma_setup_resources()
4537 ud->tchan_tpl.start_idx[0] = 8; in udma_setup_resources()
4540 ud->tchan_tpl.levels = 2; in udma_setup_resources()
4541 ud->tchan_tpl.start_idx[0] = 2; in udma_setup_resources()
4543 ud->tchan_tpl.levels = 3; in udma_setup_resources()
4544 ud->tchan_tpl.start_idx[1] = UDMA_CAP3_UCHAN_CNT(cap3); in udma_setup_resources()
4545 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in udma_setup_resources()
4547 ud->tchan_tpl.levels = 2; in udma_setup_resources()
4548 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in udma_setup_resources()
4550 ud->tchan_tpl.levels = 1; in udma_setup_resources()
4553 ud->rchan_tpl.levels = ud->tchan_tpl.levels; in udma_setup_resources()
4554 ud->rchan_tpl.start_idx[0] = ud->tchan_tpl.start_idx[0]; in udma_setup_resources()
4555 ud->rchan_tpl.start_idx[1] = ud->tchan_tpl.start_idx[1]; in udma_setup_resources()
4557 ud->tchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tchan_cnt), in udma_setup_resources()
4559 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, sizeof(*ud->tchans), in udma_setup_resources()
4561 ud->rchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rchan_cnt), in udma_setup_resources()
4563 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rchans), in udma_setup_resources()
4565 ud->rflow_gp_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rflow_cnt), in udma_setup_resources()
4568 ud->rflow_gp_map_allocated = devm_kcalloc(dev, in udma_setup_resources()
4569 BITS_TO_LONGS(ud->rflow_cnt), in udma_setup_resources()
4572 ud->rflow_in_use = devm_kcalloc(dev, BITS_TO_LONGS(ud->rflow_cnt), in udma_setup_resources()
4575 ud->rflows = devm_kcalloc(dev, ud->rflow_cnt, sizeof(*ud->rflows), in udma_setup_resources()
4578 if (!ud->tchan_map || !ud->rchan_map || !ud->rflow_gp_map || in udma_setup_resources()
4579 !ud->rflow_gp_map_allocated || !ud->tchans || !ud->rchans || in udma_setup_resources()
4580 !ud->rflows || !ud->rflow_in_use) in udma_setup_resources()
4588 bitmap_set(ud->rflow_gp_map_allocated, 0, ud->rchan_cnt); in udma_setup_resources()
4591 bitmap_set(ud->rflow_gp_map, 0, ud->rflow_cnt); in udma_setup_resources()
4607 bitmap_zero(ud->tchan_map, ud->tchan_cnt); in udma_setup_resources()
4610 bitmap_fill(ud->tchan_map, ud->tchan_cnt); in udma_setup_resources()
4612 udma_mark_resource_ranges(ud, ud->tchan_map, in udma_setup_resources()
4620 bitmap_zero(ud->rchan_map, ud->rchan_cnt); in udma_setup_resources()
4623 bitmap_fill(ud->rchan_map, ud->rchan_cnt); in udma_setup_resources()
4625 udma_mark_resource_ranges(ud, ud->rchan_map, in udma_setup_resources()
4636 irq_res.desc[0].num = ud->tchan_cnt; in udma_setup_resources()
4649 irq_res.desc[i].num = ud->rchan_cnt; in udma_setup_resources()
4654 ud->soc_data->oes.udma_rchan; in udma_setup_resources()
4659 ud->soc_data->oes.udma_rchan; in udma_setup_resources()
4664 ret = ti_sci_inta_msi_domain_alloc_irqs(ud->dev, &irq_res); in udma_setup_resources()
4667 dev_err(ud->dev, "Failed to allocate MSI interrupts\n"); in udma_setup_resources()
4675 bitmap_clear(ud->rflow_gp_map, ud->rchan_cnt, in udma_setup_resources()
4676 ud->rflow_cnt - ud->rchan_cnt); in udma_setup_resources()
4679 udma_mark_resource_ranges(ud, ud->rflow_gp_map, in udma_setup_resources()
4686 static int bcdma_setup_resources(struct udma_dev *ud) in bcdma_setup_resources() argument
4689 struct device *dev = ud->dev; in bcdma_setup_resources()
4691 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_setup_resources()
4692 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in bcdma_setup_resources()
4696 cap = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in bcdma_setup_resources()
4698 ud->bchan_tpl.levels = 3; in bcdma_setup_resources()
4699 ud->bchan_tpl.start_idx[1] = BCDMA_CAP3_UBCHAN_CNT(cap); in bcdma_setup_resources()
4700 ud->bchan_tpl.start_idx[0] = BCDMA_CAP3_HBCHAN_CNT(cap); in bcdma_setup_resources()
4702 ud->bchan_tpl.levels = 2; in bcdma_setup_resources()
4703 ud->bchan_tpl.start_idx[0] = BCDMA_CAP3_HBCHAN_CNT(cap); in bcdma_setup_resources()
4705 ud->bchan_tpl.levels = 1; in bcdma_setup_resources()
4708 cap = udma_read(ud->mmrs[MMR_GCFG], 0x30); in bcdma_setup_resources()
4710 ud->rchan_tpl.levels = 3; in bcdma_setup_resources()
4711 ud->rchan_tpl.start_idx[1] = BCDMA_CAP4_URCHAN_CNT(cap); in bcdma_setup_resources()
4712 ud->rchan_tpl.start_idx[0] = BCDMA_CAP4_HRCHAN_CNT(cap); in bcdma_setup_resources()
4714 ud->rchan_tpl.levels = 2; in bcdma_setup_resources()
4715 ud->rchan_tpl.start_idx[0] = BCDMA_CAP4_HRCHAN_CNT(cap); in bcdma_setup_resources()
4717 ud->rchan_tpl.levels = 1; in bcdma_setup_resources()
4721 ud->tchan_tpl.levels = 3; in bcdma_setup_resources()
4722 ud->tchan_tpl.start_idx[1] = BCDMA_CAP4_UTCHAN_CNT(cap); in bcdma_setup_resources()
4723 ud->tchan_tpl.start_idx[0] = BCDMA_CAP4_HTCHAN_CNT(cap); in bcdma_setup_resources()
4725 ud->tchan_tpl.levels = 2; in bcdma_setup_resources()
4726 ud->tchan_tpl.start_idx[0] = BCDMA_CAP4_HTCHAN_CNT(cap); in bcdma_setup_resources()
4728 ud->tchan_tpl.levels = 1; in bcdma_setup_resources()
4731 ud->bchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->bchan_cnt), in bcdma_setup_resources()
4733 ud->bchans = devm_kcalloc(dev, ud->bchan_cnt, sizeof(*ud->bchans), in bcdma_setup_resources()
4735 ud->tchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tchan_cnt), in bcdma_setup_resources()
4737 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, sizeof(*ud->tchans), in bcdma_setup_resources()
4739 ud->rchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rchan_cnt), in bcdma_setup_resources()
4741 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rchans), in bcdma_setup_resources()
4744 ud->rflow_in_use = devm_kcalloc(dev, BITS_TO_LONGS(ud->rchan_cnt), in bcdma_setup_resources()
4747 ud->rflows = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rflows), in bcdma_setup_resources()
4750 if (!ud->bchan_map || !ud->tchan_map || !ud->rchan_map || in bcdma_setup_resources()
4751 !ud->rflow_in_use || !ud->bchans || !ud->tchans || !ud->rchans || in bcdma_setup_resources()
4752 !ud->rflows) in bcdma_setup_resources()
4759 if (i == RM_RANGE_BCHAN && ud->bchan_cnt == 0) in bcdma_setup_resources()
4761 if (i == RM_RANGE_TCHAN && ud->tchan_cnt == 0) in bcdma_setup_resources()
4763 if (i == RM_RANGE_RCHAN && ud->rchan_cnt == 0) in bcdma_setup_resources()
4775 if (ud->bchan_cnt) { in bcdma_setup_resources()
4778 bitmap_zero(ud->bchan_map, ud->bchan_cnt); in bcdma_setup_resources()
4781 bitmap_fill(ud->bchan_map, ud->bchan_cnt); in bcdma_setup_resources()
4783 udma_mark_resource_ranges(ud, ud->bchan_map, in bcdma_setup_resources()
4791 if (ud->tchan_cnt) { in bcdma_setup_resources()
4794 bitmap_zero(ud->tchan_map, ud->tchan_cnt); in bcdma_setup_resources()
4797 bitmap_fill(ud->tchan_map, ud->tchan_cnt); in bcdma_setup_resources()
4799 udma_mark_resource_ranges(ud, ud->tchan_map, in bcdma_setup_resources()
4807 if (ud->rchan_cnt) { in bcdma_setup_resources()
4810 bitmap_zero(ud->rchan_map, ud->rchan_cnt); in bcdma_setup_resources()
4813 bitmap_fill(ud->rchan_map, ud->rchan_cnt); in bcdma_setup_resources()
4815 udma_mark_resource_ranges(ud, ud->rchan_map, in bcdma_setup_resources()
4825 if (ud->bchan_cnt) { in bcdma_setup_resources()
4829 irq_res.desc[0].num = ud->bchan_cnt; in bcdma_setup_resources()
4842 if (ud->tchan_cnt) { in bcdma_setup_resources()
4846 irq_res.desc[i].num = ud->tchan_cnt; in bcdma_setup_resources()
4848 irq_res.desc[i + 1].num = ud->tchan_cnt; in bcdma_setup_resources()
4862 if (ud->rchan_cnt) { in bcdma_setup_resources()
4866 irq_res.desc[i].num = ud->rchan_cnt; in bcdma_setup_resources()
4868 irq_res.desc[i + 1].num = ud->rchan_cnt; in bcdma_setup_resources()
4883 ret = ti_sci_inta_msi_domain_alloc_irqs(ud->dev, &irq_res); in bcdma_setup_resources()
4886 dev_err(ud->dev, "Failed to allocate MSI interrupts\n"); in bcdma_setup_resources()
4893 static int pktdma_setup_resources(struct udma_dev *ud) in pktdma_setup_resources() argument
4896 struct device *dev = ud->dev; in pktdma_setup_resources()
4898 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in pktdma_setup_resources()
4899 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in pktdma_setup_resources()
4903 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in pktdma_setup_resources()
4905 ud->tchan_tpl.levels = 3; in pktdma_setup_resources()
4906 ud->tchan_tpl.start_idx[1] = UDMA_CAP3_UCHAN_CNT(cap3); in pktdma_setup_resources()
4907 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in pktdma_setup_resources()
4909 ud->tchan_tpl.levels = 2; in pktdma_setup_resources()
4910 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in pktdma_setup_resources()
4912 ud->tchan_tpl.levels = 1; in pktdma_setup_resources()
4915 ud->rchan_tpl.levels = ud->tchan_tpl.levels; in pktdma_setup_resources()
4916 ud->rchan_tpl.start_idx[0] = ud->tchan_tpl.start_idx[0]; in pktdma_setup_resources()
4917 ud->rchan_tpl.start_idx[1] = ud->tchan_tpl.start_idx[1]; in pktdma_setup_resources()
4919 ud->tchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tchan_cnt), in pktdma_setup_resources()
4921 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, sizeof(*ud->tchans), in pktdma_setup_resources()
4923 ud->rchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rchan_cnt), in pktdma_setup_resources()
4925 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rchans), in pktdma_setup_resources()
4927 ud->rflow_in_use = devm_kcalloc(dev, BITS_TO_LONGS(ud->rflow_cnt), in pktdma_setup_resources()
4930 ud->rflows = devm_kcalloc(dev, ud->rflow_cnt, sizeof(*ud->rflows), in pktdma_setup_resources()
4932 ud->tflow_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tflow_cnt), in pktdma_setup_resources()
4935 if (!ud->tchan_map || !ud->rchan_map || !ud->tflow_map || !ud->tchans || in pktdma_setup_resources()
4936 !ud->rchans || !ud->rflows || !ud->rflow_in_use) in pktdma_setup_resources()
4953 bitmap_zero(ud->tchan_map, ud->tchan_cnt); in pktdma_setup_resources()
4955 bitmap_fill(ud->tchan_map, ud->tchan_cnt); in pktdma_setup_resources()
4957 udma_mark_resource_ranges(ud, ud->tchan_map, in pktdma_setup_resources()
4964 bitmap_zero(ud->rchan_map, ud->rchan_cnt); in pktdma_setup_resources()
4966 bitmap_fill(ud->rchan_map, ud->rchan_cnt); in pktdma_setup_resources()
4968 udma_mark_resource_ranges(ud, ud->rchan_map, in pktdma_setup_resources()
4976 bitmap_zero(ud->rflow_in_use, ud->rflow_cnt); in pktdma_setup_resources()
4979 bitmap_fill(ud->rflow_in_use, ud->rflow_cnt); in pktdma_setup_resources()
4981 udma_mark_resource_ranges(ud, ud->rflow_in_use, in pktdma_setup_resources()
4990 bitmap_zero(ud->tflow_map, ud->tflow_cnt); in pktdma_setup_resources()
4993 bitmap_fill(ud->tflow_map, ud->tflow_cnt); in pktdma_setup_resources()
4995 udma_mark_resource_ranges(ud, ud->tflow_map, in pktdma_setup_resources()
5006 irq_res.desc[0].num = ud->tflow_cnt; in pktdma_setup_resources()
5018 irq_res.desc[i].num = ud->rflow_cnt; in pktdma_setup_resources()
5026 ret = ti_sci_inta_msi_domain_alloc_irqs(ud->dev, &irq_res); in pktdma_setup_resources()
5029 dev_err(ud->dev, "Failed to allocate MSI interrupts\n"); in pktdma_setup_resources()
5036 static int setup_resources(struct udma_dev *ud) in setup_resources() argument
5038 struct device *dev = ud->dev; in setup_resources()
5041 switch (ud->match_data->type) { in setup_resources()
5043 ret = udma_setup_resources(ud); in setup_resources()
5046 ret = bcdma_setup_resources(ud); in setup_resources()
5049 ret = pktdma_setup_resources(ud); in setup_resources()
5058 ch_count = ud->bchan_cnt + ud->tchan_cnt + ud->rchan_cnt; in setup_resources()
5059 if (ud->bchan_cnt) in setup_resources()
5060 ch_count -= bitmap_weight(ud->bchan_map, ud->bchan_cnt); in setup_resources()
5061 ch_count -= bitmap_weight(ud->tchan_map, ud->tchan_cnt); in setup_resources()
5062 ch_count -= bitmap_weight(ud->rchan_map, ud->rchan_cnt); in setup_resources()
5066 ud->channels = devm_kcalloc(dev, ch_count, sizeof(*ud->channels), in setup_resources()
5068 if (!ud->channels) in setup_resources()
5071 switch (ud->match_data->type) { in setup_resources()
5076 ud->tchan_cnt - bitmap_weight(ud->tchan_map, in setup_resources()
5077 ud->tchan_cnt), in setup_resources()
5078 ud->rchan_cnt - bitmap_weight(ud->rchan_map, in setup_resources()
5079 ud->rchan_cnt), in setup_resources()
5080 ud->rflow_cnt - bitmap_weight(ud->rflow_gp_map, in setup_resources()
5081 ud->rflow_cnt)); in setup_resources()
5087 ud->bchan_cnt - bitmap_weight(ud->bchan_map, in setup_resources()
5088 ud->bchan_cnt), in setup_resources()
5089 ud->tchan_cnt - bitmap_weight(ud->tchan_map, in setup_resources()
5090 ud->tchan_cnt), in setup_resources()
5091 ud->rchan_cnt - bitmap_weight(ud->rchan_map, in setup_resources()
5092 ud->rchan_cnt)); in setup_resources()
5098 ud->tchan_cnt - bitmap_weight(ud->tchan_map, in setup_resources()
5099 ud->tchan_cnt), in setup_resources()
5100 ud->rchan_cnt - bitmap_weight(ud->rchan_map, in setup_resources()
5101 ud->rchan_cnt)); in setup_resources()
5110 static int udma_setup_rx_flush(struct udma_dev *ud) in udma_setup_rx_flush() argument
5112 struct udma_rx_flush *rx_flush = &ud->rx_flush; in udma_setup_rx_flush()
5116 struct device *dev = ud->dev; in udma_setup_rx_flush()
5138 ud->desc_align); in udma_setup_rx_flush()
5178 ud->desc_align); in udma_setup_rx_flush()
5222 if (uc->ud->match_data->type == DMA_TYPE_BCDMA) { in udma_dbg_summary_show_chan()
5233 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5239 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5278 static enum dmaengine_alignment udma_get_copy_align(struct udma_dev *ud) in udma_get_copy_align() argument
5280 const struct udma_match_data *match_data = ud->match_data; in udma_get_copy_align()
5287 if (ud->bchan_cnt) in udma_get_copy_align()
5288 tpl = udma_get_chan_tpl_index(&ud->bchan_tpl, 0); in udma_get_copy_align()
5289 else if (ud->tchan_cnt) in udma_get_copy_align()
5290 tpl = udma_get_chan_tpl_index(&ud->tchan_tpl, 0); in udma_get_copy_align()
5317 struct udma_dev *ud; in udma_probe() local
5326 ud = devm_kzalloc(dev, sizeof(*ud), GFP_KERNEL); in udma_probe()
5327 if (!ud) in udma_probe()
5335 ud->match_data = match->data; in udma_probe()
5337 ud->soc_data = ud->match_data->soc_data; in udma_probe()
5338 if (!ud->soc_data) { in udma_probe()
5344 ud->soc_data = soc->data; in udma_probe()
5347 ret = udma_get_mmrs(pdev, ud); in udma_probe()
5351 ud->tisci_rm.tisci = ti_sci_get_by_phandle(dev->of_node, "ti,sci"); in udma_probe()
5352 if (IS_ERR(ud->tisci_rm.tisci)) in udma_probe()
5353 return PTR_ERR(ud->tisci_rm.tisci); in udma_probe()
5356 &ud->tisci_rm.tisci_dev_id); in udma_probe()
5361 pdev->id = ud->tisci_rm.tisci_dev_id; in udma_probe()
5364 &ud->tisci_rm.tisci_navss_dev_id); in udma_probe()
5370 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_probe()
5372 &ud->atype); in udma_probe()
5373 if (!ret && ud->atype > 2) { in udma_probe()
5374 dev_err(dev, "Invalid atype: %u\n", ud->atype); in udma_probe()
5379 &ud->asel); in udma_probe()
5380 if (!ret && ud->asel > 15) { in udma_probe()
5381 dev_err(dev, "Invalid asel: %u\n", ud->asel); in udma_probe()
5386 ud->tisci_rm.tisci_udmap_ops = &ud->tisci_rm.tisci->ops.rm_udmap_ops; in udma_probe()
5387 ud->tisci_rm.tisci_psil_ops = &ud->tisci_rm.tisci->ops.rm_psil_ops; in udma_probe()
5389 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_probe()
5390 ud->ringacc = of_k3_ringacc_get_by_phandle(dev->of_node, "ti,ringacc"); in udma_probe()
5394 ring_init_data.tisci = ud->tisci_rm.tisci; in udma_probe()
5395 ring_init_data.tisci_dev_id = ud->tisci_rm.tisci_dev_id; in udma_probe()
5396 if (ud->match_data->type == DMA_TYPE_BCDMA) { in udma_probe()
5397 ring_init_data.num_rings = ud->bchan_cnt + in udma_probe()
5398 ud->tchan_cnt + in udma_probe()
5399 ud->rchan_cnt; in udma_probe()
5401 ring_init_data.num_rings = ud->rflow_cnt + in udma_probe()
5402 ud->tflow_cnt; in udma_probe()
5405 ud->ringacc = k3_ringacc_dmarings_init(pdev, &ring_init_data); in udma_probe()
5408 if (IS_ERR(ud->ringacc)) in udma_probe()
5409 return PTR_ERR(ud->ringacc); in udma_probe()
5417 dma_cap_set(DMA_SLAVE, ud->ddev.cap_mask); in udma_probe()
5419 if (ud->match_data->type != DMA_TYPE_PKTDMA) { in udma_probe()
5420 dma_cap_set(DMA_CYCLIC, ud->ddev.cap_mask); in udma_probe()
5421 ud->ddev.device_prep_dma_cyclic = udma_prep_dma_cyclic; in udma_probe()
5424 ud->ddev.device_config = udma_slave_config; in udma_probe()
5425 ud->ddev.device_prep_slave_sg = udma_prep_slave_sg; in udma_probe()
5426 ud->ddev.device_issue_pending = udma_issue_pending; in udma_probe()
5427 ud->ddev.device_tx_status = udma_tx_status; in udma_probe()
5428 ud->ddev.device_pause = udma_pause; in udma_probe()
5429 ud->ddev.device_resume = udma_resume; in udma_probe()
5430 ud->ddev.device_terminate_all = udma_terminate_all; in udma_probe()
5431 ud->ddev.device_synchronize = udma_synchronize; in udma_probe()
5433 ud->ddev.dbg_summary_show = udma_dbg_summary_show; in udma_probe()
5436 switch (ud->match_data->type) { in udma_probe()
5438 ud->ddev.device_alloc_chan_resources = in udma_probe()
5442 ud->ddev.device_alloc_chan_resources = in udma_probe()
5444 ud->ddev.device_router_config = bcdma_router_config; in udma_probe()
5447 ud->ddev.device_alloc_chan_resources = in udma_probe()
5453 ud->ddev.device_free_chan_resources = udma_free_chan_resources; in udma_probe()
5455 ud->ddev.src_addr_widths = TI_UDMAC_BUSWIDTHS; in udma_probe()
5456 ud->ddev.dst_addr_widths = TI_UDMAC_BUSWIDTHS; in udma_probe()
5457 ud->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in udma_probe()
5458 ud->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in udma_probe()
5459 ud->ddev.desc_metadata_modes = DESC_METADATA_CLIENT | in udma_probe()
5461 if (ud->match_data->enable_memcpy_support && in udma_probe()
5462 !(ud->match_data->type == DMA_TYPE_BCDMA && ud->bchan_cnt == 0)) { in udma_probe()
5463 dma_cap_set(DMA_MEMCPY, ud->ddev.cap_mask); in udma_probe()
5464 ud->ddev.device_prep_dma_memcpy = udma_prep_dma_memcpy; in udma_probe()
5465 ud->ddev.directions |= BIT(DMA_MEM_TO_MEM); in udma_probe()
5468 ud->ddev.dev = dev; in udma_probe()
5469 ud->dev = dev; in udma_probe()
5470 ud->psil_base = ud->match_data->psil_base; in udma_probe()
5472 INIT_LIST_HEAD(&ud->ddev.channels); in udma_probe()
5473 INIT_LIST_HEAD(&ud->desc_to_purge); in udma_probe()
5475 ch_count = setup_resources(ud); in udma_probe()
5479 spin_lock_init(&ud->lock); in udma_probe()
5480 INIT_WORK(&ud->purge_work, udma_purge_desc_work); in udma_probe()
5482 ud->desc_align = 64; in udma_probe()
5483 if (ud->desc_align < dma_get_cache_alignment()) in udma_probe()
5484 ud->desc_align = dma_get_cache_alignment(); in udma_probe()
5486 ret = udma_setup_rx_flush(ud); in udma_probe()
5490 for (i = 0; i < ud->bchan_cnt; i++) { in udma_probe()
5491 struct udma_bchan *bchan = &ud->bchans[i]; in udma_probe()
5494 bchan->reg_rt = ud->mmrs[MMR_BCHANRT] + i * 0x1000; in udma_probe()
5497 for (i = 0; i < ud->tchan_cnt; i++) { in udma_probe()
5498 struct udma_tchan *tchan = &ud->tchans[i]; in udma_probe()
5501 tchan->reg_rt = ud->mmrs[MMR_TCHANRT] + i * 0x1000; in udma_probe()
5504 for (i = 0; i < ud->rchan_cnt; i++) { in udma_probe()
5505 struct udma_rchan *rchan = &ud->rchans[i]; in udma_probe()
5508 rchan->reg_rt = ud->mmrs[MMR_RCHANRT] + i * 0x1000; in udma_probe()
5511 for (i = 0; i < ud->rflow_cnt; i++) { in udma_probe()
5512 struct udma_rflow *rflow = &ud->rflows[i]; in udma_probe()
5518 struct udma_chan *uc = &ud->channels[i]; in udma_probe()
5520 uc->ud = ud; in udma_probe()
5533 vchan_init(&uc->vc, &ud->ddev); in udma_probe()
5541 ud->ddev.copy_align = udma_get_copy_align(ud); in udma_probe()
5543 ret = dma_async_device_register(&ud->ddev); in udma_probe()
5549 platform_set_drvdata(pdev, ud); in udma_probe()
5551 ret = of_dma_controller_register(dev->of_node, udma_of_xlate, ud); in udma_probe()
5554 dma_async_device_unregister(&ud->ddev); in udma_probe()
5562 struct udma_dev *ud = dev_get_drvdata(dev); in udma_pm_suspend() local
5563 struct dma_device *dma_dev = &ud->ddev; in udma_pm_suspend()
5575 ud->ddev.device_free_chan_resources(chan); in udma_pm_suspend()
5584 struct udma_dev *ud = dev_get_drvdata(dev); in udma_pm_resume() local
5585 struct dma_device *dma_dev = &ud->ddev; in udma_pm_resume()
5598 ret = ud->ddev.device_alloc_chan_resources(chan); in udma_pm_resume()