• Home
  • Raw
  • Download

Lines Matching refs:ud

100 	struct udma_dev *ud;  member
203 static inline int udma_navss_psil_pair(struct udma_dev *ud, u32 src_thread, in udma_navss_psil_pair() argument
207 return ud->tisci_psil_ops->pair(ud->tisci, in udma_navss_psil_pair()
208 ud->tisci_navss_dev_id, in udma_navss_psil_pair()
212 static inline int udma_navss_psil_unpair(struct udma_dev *ud, u32 src_thread, in udma_navss_psil_unpair() argument
216 return ud->tisci_psil_ops->unpair(ud->tisci, in udma_navss_psil_unpair()
217 ud->tisci_navss_dev_id, in udma_navss_psil_unpair()
275 return uc->ud->is_coherent; in udma_is_coherent()
542 static struct udma_##res *__udma_reserve_##res(struct udma_dev *ud, \
546 if (test_bit(id, ud->res##_map)) { \
547 dev_err(ud->dev, "res##%d is in use\n", id); \
551 id = find_first_zero_bit(ud->res##_map, ud->res##_cnt); \
552 if (id == ud->res##_cnt) { \
557 __set_bit(id, ud->res##_map); \
558 return &ud->res##s[id]; \
567 struct udma_dev *ud = uc->ud; in udma_get_tchan() local
570 dev_dbg(ud->dev, "chan%d: already have tchan%d allocated\n", in udma_get_tchan()
575 uc->tchan = __udma_reserve_tchan(ud, -1); in udma_get_tchan()
586 struct udma_dev *ud = uc->ud; in udma_get_rchan() local
589 dev_dbg(ud->dev, "chan%d: already have rchan%d allocated\n", in udma_get_rchan()
594 uc->rchan = __udma_reserve_rchan(ud, -1); in udma_get_rchan()
605 struct udma_dev *ud = uc->ud; in udma_get_chan_pair() local
609 dev_info(ud->dev, "chan%d: already have %d pair allocated\n", in udma_get_chan_pair()
615 dev_err(ud->dev, "chan%d: already have tchan%d allocated\n", in udma_get_chan_pair()
619 dev_err(ud->dev, "chan%d: already have rchan%d allocated\n", in udma_get_chan_pair()
625 end = min(ud->tchan_cnt, ud->rchan_cnt); in udma_get_chan_pair()
627 if (!test_bit(chan_id, ud->tchan_map) && in udma_get_chan_pair()
628 !test_bit(chan_id, ud->rchan_map)) in udma_get_chan_pair()
635 __set_bit(chan_id, ud->tchan_map); in udma_get_chan_pair()
636 __set_bit(chan_id, ud->rchan_map); in udma_get_chan_pair()
637 uc->tchan = &ud->tchans[chan_id]; in udma_get_chan_pair()
638 uc->rchan = &ud->rchans[chan_id]; in udma_get_chan_pair()
647 struct udma_dev *ud = uc->ud; in udma_get_rflow() local
650 dev_dbg(ud->dev, "chan%d: already have rflow%d allocated\n", in udma_get_rflow()
656 dev_warn(ud->dev, "chan%d: does not have rchan??\n", uc->id); in udma_get_rflow()
658 uc->rflow = __udma_reserve_rflow(ud, flow_id); in udma_get_rflow()
668 struct udma_dev *ud = uc->ud; in udma_put_rchan() local
671 dev_dbg(ud->dev, "chan%d: put rchan%d\n", uc->id, in udma_put_rchan()
673 __clear_bit(uc->rchan->id, ud->rchan_map); in udma_put_rchan()
680 struct udma_dev *ud = uc->ud; in udma_put_tchan() local
683 dev_dbg(ud->dev, "chan%d: put tchan%d\n", uc->id, in udma_put_tchan()
685 __clear_bit(uc->tchan->id, ud->tchan_map); in udma_put_tchan()
692 struct udma_dev *ud = uc->ud; in udma_put_rflow() local
695 dev_dbg(ud->dev, "chan%d: put rflow%d\n", uc->id, in udma_put_rflow()
697 __clear_bit(uc->rflow->id, ud->rflow_map); in udma_put_rflow()
718 struct udma_dev *ud = uc->ud; in udma_alloc_tx_resources() local
726 ud->ringacc, uc->tchan->id, in udma_alloc_tx_resources()
734 ud->ringacc, -1, RINGACC_RING_USE_PROXY); in udma_alloc_tx_resources()
782 struct udma_dev *ud = uc->ud; in udma_alloc_rx_resources() local
800 fd_ring_id = ud->tchan_cnt + ud->echan_cnt + uc->rchan->id; in udma_alloc_rx_resources()
803 ud->ringacc, fd_ring_id, in udma_alloc_rx_resources()
811 ud->ringacc, -1, RINGACC_RING_USE_PROXY); in udma_alloc_rx_resources()
846 struct udma_dev *ud = uc->ud; in udma_alloc_tchan_sci_req() local
860 req.nav_id = ud->tisci_dev_id; in udma_alloc_tchan_sci_req()
871 ret = ud->tisci_udmap_ops->tx_ch_cfg(ud->tisci, &req); in udma_alloc_tchan_sci_req()
873 dev_err(ud->dev, "tisci tx alloc failed %d\n", ret); in udma_alloc_tchan_sci_req()
880 struct udma_dev *ud = uc->ud; in udma_alloc_rchan_sci_req() local
897 req.nav_id = ud->tisci_dev_id; in udma_alloc_rchan_sci_req()
917 ret = ud->tisci_udmap_ops->rx_ch_cfg(ud->tisci, &req); in udma_alloc_rchan_sci_req()
919 dev_err(ud->dev, "tisci rx %u cfg failed %d\n", in udma_alloc_rchan_sci_req()
942 flow_req.nav_id = ud->tisci_dev_id; in udma_alloc_rchan_sci_req()
968 ret = ud->tisci_udmap_ops->rx_flow_cfg(ud->tisci, &flow_req); in udma_alloc_rchan_sci_req()
970 dev_err(ud->dev, "tisci rx %u flow %u cfg failed %d\n", in udma_alloc_rchan_sci_req()
978 struct udma_dev *ud = uc->ud; in udma_alloc_chan_resources() local
999 uc->src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
1000 uc->dst_thread = (ud->psil_base + uc->rchan->id) | 0x8000; in udma_alloc_chan_resources()
1008 uc->src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
1021 uc->dst_thread = (ud->psil_base + uc->rchan->id) | 0x8000; in udma_alloc_chan_resources()
1054 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in udma_alloc_chan_resources()
1057 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in udma_alloc_chan_resources()
1063 ret = udma_navss_psil_pair(ud, uc->src_thread, uc->dst_thread); in udma_alloc_chan_resources()
1065 dev_err(ud->dev, "k3_nav_psil_request_link fail\n"); in udma_alloc_chan_resources()
1083 udma_navss_psil_unpair(uc->ud, uc->src_thread, uc->dst_thread); in udma_free_chan_resources()
1096 struct udma_dev *ud = dev_get_priv(dev); in udma_get_mmrs() local
1100 ud->mmrs[i] = (uint32_t *)devfdt_get_addr_name(dev, in udma_get_mmrs()
1102 if (!ud->mmrs[i]) in udma_get_mmrs()
1114 struct udma_dev *ud = dev_get_priv(dev); in udma_probe() local
1126 ud->ringacc = dev_get_priv(tmp); in udma_probe()
1127 if (IS_ERR(ud->ringacc)) in udma_probe()
1128 return PTR_ERR(ud->ringacc); in udma_probe()
1130 ud->psil_base = dev_read_u32_default(dev, "ti,psil-base", 0); in udma_probe()
1131 if (!ud->psil_base) { in udma_probe()
1140 ud->tisci = NULL; in udma_probe()
1143 ud->tisci = (struct ti_sci_handle *) in udma_probe()
1149 ud->tisci = NULL; in udma_probe()
1152 if (ud->tisci) { in udma_probe()
1155 ud->tisci_dev_id = -1; in udma_probe()
1156 ret = dev_read_u32(dev, "ti,sci-dev-id", &ud->tisci_dev_id); in udma_probe()
1162 ud->tisci_navss_dev_id = -1; in udma_probe()
1164 &ud->tisci_navss_dev_id); in udma_probe()
1170 ud->tisci_udmap_ops = &ud->tisci->ops.rm_udmap_ops; in udma_probe()
1171 ud->tisci_psil_ops = &ud->tisci->ops.rm_psil_ops; in udma_probe()
1174 ud->is_coherent = dev_read_bool(dev, "dma-coherent"); in udma_probe()
1176 cap2 = udma_read(ud->mmrs[MMR_GCFG], 0x28); in udma_probe()
1177 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in udma_probe()
1179 ud->rflow_cnt = cap3 & 0x3fff; in udma_probe()
1180 ud->tchan_cnt = cap2 & 0x1ff; in udma_probe()
1181 ud->echan_cnt = (cap2 >> 9) & 0x1ff; in udma_probe()
1182 ud->rchan_cnt = (cap2 >> 18) & 0x1ff; in udma_probe()
1183 ud->ch_count = ud->tchan_cnt + ud->rchan_cnt; in udma_probe()
1187 ud->ch_count, ud->tchan_cnt, ud->echan_cnt, ud->rchan_cnt, in udma_probe()
1188 ud->tisci_dev_id); in udma_probe()
1189 dev_info(dev, "Number of rflows: %u\n", ud->rflow_cnt); in udma_probe()
1191 ud->channels = devm_kcalloc(dev, ud->ch_count, sizeof(*ud->channels), in udma_probe()
1193 ud->tchan_map = devm_kcalloc(dev, BITS_TO_LONGS(ud->tchan_cnt), in udma_probe()
1195 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, in udma_probe()
1196 sizeof(*ud->tchans), GFP_KERNEL); in udma_probe()
1197 ud->rchan_map = devm_kcalloc(dev, BITS_TO_LONGS(ud->rchan_cnt), in udma_probe()
1199 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, in udma_probe()
1200 sizeof(*ud->rchans), GFP_KERNEL); in udma_probe()
1201 ud->rflow_map = devm_kcalloc(dev, BITS_TO_LONGS(ud->rflow_cnt), in udma_probe()
1203 ud->rflows = devm_kcalloc(dev, ud->rflow_cnt, in udma_probe()
1204 sizeof(*ud->rflows), GFP_KERNEL); in udma_probe()
1206 if (!ud->channels || !ud->tchan_map || !ud->rchan_map || in udma_probe()
1207 !ud->rflow_map || !ud->tchans || !ud->rchans || !ud->rflows) in udma_probe()
1210 for (i = 0; i < ud->tchan_cnt; i++) { in udma_probe()
1211 struct udma_tchan *tchan = &ud->tchans[i]; in udma_probe()
1214 tchan->reg_rt = ud->mmrs[MMR_TCHANRT] + UDMA_CH_1000(i); in udma_probe()
1217 for (i = 0; i < ud->rchan_cnt; i++) { in udma_probe()
1218 struct udma_rchan *rchan = &ud->rchans[i]; in udma_probe()
1221 rchan->reg_rt = ud->mmrs[MMR_RCHANRT] + UDMA_CH_1000(i); in udma_probe()
1224 for (i = 0; i < ud->rflow_cnt; i++) { in udma_probe()
1225 struct udma_rflow *rflow = &ud->rflows[i]; in udma_probe()
1230 for (i = 0; i < ud->ch_count; i++) { in udma_probe()
1231 struct udma_chan *uc = &ud->channels[i]; in udma_probe()
1233 uc->ud = ud; in udma_probe()
1245 udma_read(ud->mmrs[MMR_GCFG], 0), in udma_probe()
1246 udma_read(ud->mmrs[MMR_GCFG], 0x20), in udma_probe()
1247 udma_read(ud->mmrs[MMR_GCFG], 0x24), in udma_probe()
1248 udma_read(ud->mmrs[MMR_GCFG], 0x28), in udma_probe()
1249 udma_read(ud->mmrs[MMR_GCFG], 0x2c)); in udma_probe()
1284 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_memcpy()
1357 struct udma_dev *ud = dev_get_priv(dev); in udma_transfer() local
1359 struct udma_chan *uc = &ud->channels[0]; in udma_transfer()
1378 struct udma_dev *ud = dev_get_priv(dma->dev); in udma_request() local
1383 if (dma->id >= (ud->rchan_cnt + ud->tchan_cnt)) { in udma_request()
1388 uc = &ud->channels[dma->id]; in udma_request()
1422 struct udma_dev *ud = dev_get_priv(dma->dev); in udma_free() local
1425 if (dma->id >= (ud->rchan_cnt + ud->tchan_cnt)) { in udma_free()
1429 uc = &ud->channels[dma->id]; in udma_free()
1442 struct udma_dev *ud = dev_get_priv(dma->dev); in udma_enable() local
1446 if (dma->id >= (ud->rchan_cnt + ud->tchan_cnt)) { in udma_enable()
1450 uc = &ud->channels[dma->id]; in udma_enable()
1459 struct udma_dev *ud = dev_get_priv(dma->dev); in udma_disable() local
1463 if (dma->id >= (ud->rchan_cnt + ud->tchan_cnt)) { in udma_disable()
1467 uc = &ud->channels[dma->id]; in udma_disable()
1479 struct udma_dev *ud = dev_get_priv(dma->dev); in udma_send() local
1491 if (dma->id >= (ud->rchan_cnt + ud->tchan_cnt)) { in udma_send()
1495 uc = &ud->channels[dma->id]; in udma_send()
1540 struct udma_dev *ud = dev_get_priv(dma->dev); in udma_receive() local
1548 if (dma->id >= (ud->rchan_cnt + ud->tchan_cnt)) { in udma_receive()
1552 uc = &ud->channels[dma->id]; in udma_receive()
1592 struct udma_dev *ud = dev_get_priv(dma->dev); in udma_of_xlate() local
1593 struct udma_chan *uc = &ud->channels[0]; in udma_of_xlate()
1598 for (val = 0; val < ud->ch_count; val++) { in udma_of_xlate()
1599 uc = &ud->channels[val]; in udma_of_xlate()
1604 if (val == ud->ch_count) in udma_of_xlate()
1613 dev_err(ud->dev, "slave node is missing\n"); in udma_of_xlate()
1620 dev_err(ud->dev, "Channel configuration node is missing\n"); in udma_of_xlate()
1638 dev_err(ud->dev, "ti,psil-base is missing\n"); in udma_of_xlate()
1655 struct udma_dev *ud = dev_get_priv(dma->dev); in udma_prepare_rcv_buf() local
1661 if (dma->id >= (ud->rchan_cnt + ud->tchan_cnt)) { in udma_prepare_rcv_buf()
1665 uc = &ud->channels[dma->id]; in udma_prepare_rcv_buf()
1701 struct udma_dev *ud = dev_get_priv(dma->dev); in udma_get_cfg() local
1704 if (dma->id >= (ud->rchan_cnt + ud->tchan_cnt)) { in udma_get_cfg()
1711 uc = &ud->channels[dma->id]; in udma_get_cfg()