• Home
  • Raw
  • Download

Lines Matching +full:max +full:- +full:burst +full:- +full:len

23 #include "virt-dma.h"
29 #define SUN4I_DMA_CFG_DST_BURST_LENGTH(len) ((len) << 23) argument
33 #define SUN4I_DMA_CFG_SRC_BURST_LENGTH(len) ((len) << 7) argument
78 #define SUN4I_DDMA_PARA_DST_DATA_BLK_SIZE(n) (((n) - 1) << 24)
79 #define SUN4I_DDMA_PARA_DST_WAIT_CYCLES(n) (((n) - 1) << 16)
80 #define SUN4I_DDMA_PARA_SRC_DATA_BLK_SIZE(n) (((n) - 1) << 8)
81 #define SUN4I_DDMA_PARA_SRC_WAIT_CYCLES(n) (((n) - 1) << 0)
116 #define SUN4I_NDMA_NR_MAX_VCHANS (29 * 2 - 1)
153 size_t len; member
193 return &chan->dev->device; in chan2dev()
199 return -EINVAL; in convert_burst()
201 /* 1 -> 0, 4 -> 1, 8 -> 2 */ in convert_burst()
208 return -EINVAL; in convert_buswidth()
210 /* 8 (1 byte) -> 0, 16 (2 bytes) -> 1, 32 (4 bytes) -> 2 */ in convert_buswidth()
218 vchan_free_chan_resources(&vchan->vc); in sun4i_dma_free_chan_resources()
224 struct sun4i_dma_pchan *pchan = NULL, *pchans = priv->pchans; in find_and_use_pchan()
226 int i, max; in find_and_use_pchan() local
229 * pchans 0-SUN4I_NDMA_NR_MAX_CHANNELS are normal, and in find_and_use_pchan()
232 if (vchan->is_dedicated) { in find_and_use_pchan()
234 max = SUN4I_DMA_NR_MAX_CHANNELS; in find_and_use_pchan()
237 max = SUN4I_NDMA_NR_MAX_CHANNELS; in find_and_use_pchan()
240 spin_lock_irqsave(&priv->lock, flags); in find_and_use_pchan()
241 for_each_clear_bit_from(i, priv->pchans_used, max) { in find_and_use_pchan()
243 pchan->vchan = vchan; in find_and_use_pchan()
244 set_bit(i, priv->pchans_used); in find_and_use_pchan()
247 spin_unlock_irqrestore(&priv->lock, flags); in find_and_use_pchan()
256 int nr = pchan - priv->pchans; in release_pchan()
258 spin_lock_irqsave(&priv->lock, flags); in release_pchan()
260 pchan->vchan = NULL; in release_pchan()
261 clear_bit(nr, priv->pchans_used); in release_pchan()
263 spin_unlock_irqrestore(&priv->lock, flags); in release_pchan()
273 if (pchan->is_dedicated) { in configure_pchan()
274 writel_relaxed(d->src, pchan->base + SUN4I_DDMA_SRC_ADDR_REG); in configure_pchan()
275 writel_relaxed(d->dst, pchan->base + SUN4I_DDMA_DST_ADDR_REG); in configure_pchan()
276 writel_relaxed(d->len, pchan->base + SUN4I_DDMA_BYTE_COUNT_REG); in configure_pchan()
277 writel_relaxed(d->para, pchan->base + SUN4I_DDMA_PARA_REG); in configure_pchan()
278 writel_relaxed(d->cfg, pchan->base + SUN4I_DDMA_CFG_REG); in configure_pchan()
280 writel_relaxed(d->src, pchan->base + SUN4I_NDMA_SRC_ADDR_REG); in configure_pchan()
281 writel_relaxed(d->dst, pchan->base + SUN4I_NDMA_DST_ADDR_REG); in configure_pchan()
282 writel_relaxed(d->len, pchan->base + SUN4I_NDMA_BYTE_COUNT_REG); in configure_pchan()
283 writel_relaxed(d->cfg, pchan->base + SUN4I_NDMA_CFG_REG); in configure_pchan()
292 int pchan_number = pchan - priv->pchans; in set_pchan_interrupt()
295 spin_lock_irqsave(&priv->lock, flags); in set_pchan_interrupt()
297 reg = readl_relaxed(priv->base + SUN4I_DMA_IRQ_ENABLE_REG); in set_pchan_interrupt()
309 writel_relaxed(reg, priv->base + SUN4I_DMA_IRQ_ENABLE_REG); in set_pchan_interrupt()
311 spin_unlock_irqrestore(&priv->lock, flags); in set_pchan_interrupt()
321 * This function must be called with &vchan->vc.lock held.
332 lockdep_assert_held(&vchan->vc.lock); in __execute_vchan_pending()
337 return -EBUSY; in __execute_vchan_pending()
343 if (vchan->processing) { in __execute_vchan_pending()
344 dev_dbg(chan2dev(&vchan->vc.chan), in __execute_vchan_pending()
346 ret = -EBUSY; in __execute_vchan_pending()
352 vd = vchan_next_desc(&vchan->vc); in __execute_vchan_pending()
354 dev_dbg(chan2dev(&vchan->vc.chan), in __execute_vchan_pending()
361 if (list_empty(&contract->demands)) { in __execute_vchan_pending()
363 list_del(&contract->vd.node); in __execute_vchan_pending()
364 vchan_cookie_complete(&contract->vd); in __execute_vchan_pending()
365 dev_dbg(chan2dev(&vchan->vc.chan), in __execute_vchan_pending()
368 } while (list_empty(&contract->demands)); in __execute_vchan_pending()
371 promise = list_first_entry(&contract->demands, in __execute_vchan_pending()
373 vchan->processing = promise; in __execute_vchan_pending()
377 vchan->contract = contract; in __execute_vchan_pending()
378 vchan->pchan = pchan; in __execute_vchan_pending()
379 set_pchan_interrupt(priv, pchan, contract->is_cyclic, 1); in __execute_vchan_pending()
395 if ((sconfig->dst_addr_width == DMA_SLAVE_BUSWIDTH_UNDEFINED) || in sanitize_config()
396 !sconfig->dst_maxburst) in sanitize_config()
397 return -EINVAL; in sanitize_config()
399 if (sconfig->src_addr_width == DMA_SLAVE_BUSWIDTH_UNDEFINED) in sanitize_config()
400 sconfig->src_addr_width = sconfig->dst_addr_width; in sanitize_config()
402 if (!sconfig->src_maxburst) in sanitize_config()
403 sconfig->src_maxburst = sconfig->dst_maxburst; in sanitize_config()
408 if ((sconfig->src_addr_width == DMA_SLAVE_BUSWIDTH_UNDEFINED) || in sanitize_config()
409 !sconfig->src_maxburst) in sanitize_config()
410 return -EINVAL; in sanitize_config()
412 if (sconfig->dst_addr_width == DMA_SLAVE_BUSWIDTH_UNDEFINED) in sanitize_config()
413 sconfig->dst_addr_width = sconfig->src_addr_width; in sanitize_config()
415 if (!sconfig->dst_maxburst) in sanitize_config()
416 sconfig->dst_maxburst = sconfig->src_maxburst; in sanitize_config()
430 * normal part of the DMA Engine and get data copied. A non-executed
437 size_t len, struct dma_slave_config *sconfig, in generate_ndma_promise() argument
451 promise->src = src; in generate_ndma_promise()
452 promise->dst = dest; in generate_ndma_promise()
453 promise->len = len; in generate_ndma_promise()
454 promise->cfg = SUN4I_DMA_CFG_LOADING | in generate_ndma_promise()
458 "src burst %d, dst burst %d, src buswidth %d, dst buswidth %d", in generate_ndma_promise()
459 sconfig->src_maxburst, sconfig->dst_maxburst, in generate_ndma_promise()
460 sconfig->src_addr_width, sconfig->dst_addr_width); in generate_ndma_promise()
462 /* Source burst */ in generate_ndma_promise()
463 ret = convert_burst(sconfig->src_maxburst); in generate_ndma_promise()
466 promise->cfg |= SUN4I_DMA_CFG_SRC_BURST_LENGTH(ret); in generate_ndma_promise()
468 /* Destination burst */ in generate_ndma_promise()
469 ret = convert_burst(sconfig->dst_maxburst); in generate_ndma_promise()
472 promise->cfg |= SUN4I_DMA_CFG_DST_BURST_LENGTH(ret); in generate_ndma_promise()
475 ret = convert_buswidth(sconfig->src_addr_width); in generate_ndma_promise()
478 promise->cfg |= SUN4I_DMA_CFG_SRC_DATA_WIDTH(ret); in generate_ndma_promise()
481 ret = convert_buswidth(sconfig->dst_addr_width); in generate_ndma_promise()
484 promise->cfg |= SUN4I_DMA_CFG_DST_DATA_WIDTH(ret); in generate_ndma_promise()
497 * Dedicated part of the DMA Engine and get data copied. A non-executed
504 size_t len, struct dma_slave_config *sconfig) in generate_ddma_promise() argument
513 promise->src = src; in generate_ddma_promise()
514 promise->dst = dest; in generate_ddma_promise()
515 promise->len = len; in generate_ddma_promise()
516 promise->cfg = SUN4I_DMA_CFG_LOADING | in generate_ddma_promise()
519 /* Source burst */ in generate_ddma_promise()
520 ret = convert_burst(sconfig->src_maxburst); in generate_ddma_promise()
523 promise->cfg |= SUN4I_DMA_CFG_SRC_BURST_LENGTH(ret); in generate_ddma_promise()
525 /* Destination burst */ in generate_ddma_promise()
526 ret = convert_burst(sconfig->dst_maxburst); in generate_ddma_promise()
529 promise->cfg |= SUN4I_DMA_CFG_DST_BURST_LENGTH(ret); in generate_ddma_promise()
532 ret = convert_buswidth(sconfig->src_addr_width); in generate_ddma_promise()
535 promise->cfg |= SUN4I_DMA_CFG_SRC_DATA_WIDTH(ret); in generate_ddma_promise()
538 ret = convert_buswidth(sconfig->dst_addr_width); in generate_ddma_promise()
541 promise->cfg |= SUN4I_DMA_CFG_DST_DATA_WIDTH(ret); in generate_ddma_promise()
566 INIT_LIST_HEAD(&contract->demands); in generate_dma_contract()
567 INIT_LIST_HEAD(&contract->completed_demands); in generate_dma_contract()
584 promise = list_first_entry_or_null(&contract->demands, in get_next_cyclic_promise()
587 list_splice_init(&contract->completed_demands, in get_next_cyclic_promise()
588 &contract->demands); in get_next_cyclic_promise()
589 promise = list_first_entry(&contract->demands, in get_next_cyclic_promise()
605 list_for_each_entry_safe(promise, tmp, &contract->demands, list) in sun4i_dma_free_contract()
608 list_for_each_entry_safe(promise, tmp, &contract->completed_demands, list) in sun4i_dma_free_contract()
616 dma_addr_t src, size_t len, unsigned long flags) in sun4i_dma_prep_dma_memcpy() argument
619 struct dma_slave_config *sconfig = &vchan->cfg; in sun4i_dma_prep_dma_memcpy()
630 * maximize the burst size for this same reason. in sun4i_dma_prep_dma_memcpy()
632 sconfig->src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; in sun4i_dma_prep_dma_memcpy()
633 sconfig->dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; in sun4i_dma_prep_dma_memcpy()
634 sconfig->src_maxburst = 8; in sun4i_dma_prep_dma_memcpy()
635 sconfig->dst_maxburst = 8; in sun4i_dma_prep_dma_memcpy()
637 if (vchan->is_dedicated) in sun4i_dma_prep_dma_memcpy()
638 promise = generate_ddma_promise(chan, src, dest, len, sconfig); in sun4i_dma_prep_dma_memcpy()
640 promise = generate_ndma_promise(chan, src, dest, len, sconfig, in sun4i_dma_prep_dma_memcpy()
649 if (vchan->is_dedicated) { in sun4i_dma_prep_dma_memcpy()
650 promise->cfg |= SUN4I_DMA_CFG_SRC_DRQ_TYPE(SUN4I_DDMA_DRQ_TYPE_SDRAM) | in sun4i_dma_prep_dma_memcpy()
653 promise->cfg |= SUN4I_DMA_CFG_SRC_DRQ_TYPE(SUN4I_NDMA_DRQ_TYPE_SDRAM) | in sun4i_dma_prep_dma_memcpy()
658 list_add_tail(&promise->list, &contract->demands); in sun4i_dma_prep_dma_memcpy()
661 return vchan_tx_prep(&vchan->vc, &contract->vd, flags); in sun4i_dma_prep_dma_memcpy()
665 sun4i_dma_prep_dma_cyclic(struct dma_chan *chan, dma_addr_t buf, size_t len, in sun4i_dma_prep_dma_cyclic() argument
670 struct dma_slave_config *sconfig = &vchan->cfg; in sun4i_dma_prep_dma_cyclic()
682 if (vchan->is_dedicated) { in sun4i_dma_prep_dma_cyclic()
698 contract->is_cyclic = 1; in sun4i_dma_prep_dma_cyclic()
703 dest = sconfig->dst_addr; in sun4i_dma_prep_dma_cyclic()
705 SUN4I_DMA_CFG_DST_DRQ_TYPE(vchan->endpoint) | in sun4i_dma_prep_dma_cyclic()
708 src = sconfig->src_addr; in sun4i_dma_prep_dma_cyclic()
710 endpoints = SUN4I_DMA_CFG_SRC_DRQ_TYPE(vchan->endpoint) | in sun4i_dma_prep_dma_cyclic()
722 * The engine can interrupt on half-transfer, so we can use in sun4i_dma_prep_dma_cyclic()
731 * |---|---|---|---| (periods / promises) in sun4i_dma_prep_dma_cyclic()
736 * |-------|-------| (promises as configured on hw) in sun4i_dma_prep_dma_cyclic()
737 * |---|---|---|---| (periods) in sun4i_dma_prep_dma_cyclic()
743 nr_periods = DIV_ROUND_UP(len / period_len, 2); in sun4i_dma_prep_dma_cyclic()
747 plength = min((len - offset), (period_len * 2)); in sun4i_dma_prep_dma_cyclic()
760 promise->cfg |= endpoints; in sun4i_dma_prep_dma_cyclic()
763 list_add_tail(&promise->list, &contract->demands); in sun4i_dma_prep_dma_cyclic()
767 return vchan_tx_prep(&vchan->vc, &contract->vd, flags); in sun4i_dma_prep_dma_cyclic()
776 struct dma_slave_config *sconfig = &vchan->cfg; in sun4i_dma_prep_slave_sg()
797 if (vchan->is_dedicated) { in sun4i_dma_prep_slave_sg()
808 endpoints = SUN4I_DMA_CFG_DST_DRQ_TYPE(vchan->endpoint) | in sun4i_dma_prep_slave_sg()
815 SUN4I_DMA_CFG_SRC_DRQ_TYPE(vchan->endpoint) | in sun4i_dma_prep_slave_sg()
822 dstaddr = sconfig->dst_addr; in sun4i_dma_prep_slave_sg()
824 srcaddr = sconfig->src_addr; in sun4i_dma_prep_slave_sg()
840 if (vchan->is_dedicated) in sun4i_dma_prep_slave_sg()
852 promise->cfg |= endpoints; in sun4i_dma_prep_slave_sg()
853 promise->para = para; in sun4i_dma_prep_slave_sg()
856 list_add_tail(&promise->list, &contract->demands); in sun4i_dma_prep_slave_sg()
863 return vchan_tx_prep(&vchan->vc, &contract->vd, flags); in sun4i_dma_prep_slave_sg()
868 struct sun4i_dma_dev *priv = to_sun4i_dma_dev(chan->device); in sun4i_dma_terminate_all()
870 struct sun4i_dma_pchan *pchan = vchan->pchan; in sun4i_dma_terminate_all()
874 spin_lock_irqsave(&vchan->vc.lock, flags); in sun4i_dma_terminate_all()
875 vchan_get_all_descriptors(&vchan->vc, &head); in sun4i_dma_terminate_all()
876 spin_unlock_irqrestore(&vchan->vc.lock, flags); in sun4i_dma_terminate_all()
883 if (pchan->is_dedicated) in sun4i_dma_terminate_all()
884 writel(0, pchan->base + SUN4I_DDMA_CFG_REG); in sun4i_dma_terminate_all()
886 writel(0, pchan->base + SUN4I_NDMA_CFG_REG); in sun4i_dma_terminate_all()
891 spin_lock_irqsave(&vchan->vc.lock, flags); in sun4i_dma_terminate_all()
892 vchan_dma_desc_free_list(&vchan->vc, &head); in sun4i_dma_terminate_all()
894 vchan->processing = NULL; in sun4i_dma_terminate_all()
895 vchan->pchan = NULL; in sun4i_dma_terminate_all()
896 spin_unlock_irqrestore(&vchan->vc.lock, flags); in sun4i_dma_terminate_all()
906 memcpy(&vchan->cfg, config, sizeof(*config)); in sun4i_dma_config()
914 struct sun4i_dma_dev *priv = ofdma->of_dma_data; in sun4i_dma_of_xlate()
917 u8 is_dedicated = dma_spec->args[0]; in sun4i_dma_of_xlate()
918 u8 endpoint = dma_spec->args[1]; in sun4i_dma_of_xlate()
929 chan = dma_get_any_slave_channel(&priv->slave); in sun4i_dma_of_xlate()
935 vchan->is_dedicated = is_dedicated; in sun4i_dma_of_xlate()
936 vchan->endpoint = endpoint; in sun4i_dma_of_xlate()
946 struct sun4i_dma_pchan *pchan = vchan->pchan; in sun4i_dma_tx_status()
958 spin_lock_irqsave(&vchan->vc.lock, flags); in sun4i_dma_tx_status()
959 vd = vchan_find_desc(&vchan->vc, cookie); in sun4i_dma_tx_status()
964 list_for_each_entry(promise, &contract->demands, list) in sun4i_dma_tx_status()
965 bytes += promise->len; in sun4i_dma_tx_status()
972 promise = list_first_entry_or_null(&contract->demands, in sun4i_dma_tx_status()
975 bytes -= promise->len; in sun4i_dma_tx_status()
976 if (pchan->is_dedicated) in sun4i_dma_tx_status()
977 bytes += readl(pchan->base + SUN4I_DDMA_BYTE_COUNT_REG); in sun4i_dma_tx_status()
979 bytes += readl(pchan->base + SUN4I_NDMA_BYTE_COUNT_REG); in sun4i_dma_tx_status()
985 spin_unlock_irqrestore(&vchan->vc.lock, flags); in sun4i_dma_tx_status()
992 struct sun4i_dma_dev *priv = to_sun4i_dma_dev(chan->device); in sun4i_dma_issue_pending()
996 spin_lock_irqsave(&vchan->vc.lock, flags); in sun4i_dma_issue_pending()
1002 if (vchan_issue_pending(&vchan->vc)) in sun4i_dma_issue_pending()
1005 spin_unlock_irqrestore(&vchan->vc.lock, flags); in sun4i_dma_issue_pending()
1011 struct sun4i_dma_pchan *pchans = priv->pchans, *pchan; in sun4i_dma_interrupt()
1018 pendirq = readl_relaxed(priv->base + SUN4I_DMA_IRQ_PENDING_STATUS_REG); in sun4i_dma_interrupt()
1027 vchan = pchan->vchan; in sun4i_dma_interrupt()
1030 contract = vchan->contract; in sun4i_dma_interrupt()
1037 spin_lock(&vchan->vc.lock); in sun4i_dma_interrupt()
1043 list_del(&vchan->processing->list); in sun4i_dma_interrupt()
1044 list_add_tail(&vchan->processing->list, in sun4i_dma_interrupt()
1045 &contract->completed_demands); in sun4i_dma_interrupt()
1049 * - There's always something we can dispatch in sun4i_dma_interrupt()
1050 * - We need to run the callback in sun4i_dma_interrupt()
1051 * - Latency is very important, as this is used by audio in sun4i_dma_interrupt()
1056 * For non-cyclic transfers we need to look around, in sun4i_dma_interrupt()
1060 if (contract->is_cyclic) { in sun4i_dma_interrupt()
1062 vchan->processing = promise; in sun4i_dma_interrupt()
1064 vchan_cyclic_callback(&contract->vd); in sun4i_dma_interrupt()
1066 vchan->processing = NULL; in sun4i_dma_interrupt()
1067 vchan->pchan = NULL; in sun4i_dma_interrupt()
1074 spin_unlock(&vchan->vc.lock); in sun4i_dma_interrupt()
1077 if (contract->is_cyclic) in sun4i_dma_interrupt()
1078 vchan_cyclic_callback(&contract->vd); in sun4i_dma_interrupt()
1085 spin_lock(&priv->lock); in sun4i_dma_interrupt()
1086 irqs = readl_relaxed(priv->base + SUN4I_DMA_IRQ_ENABLE_REG); in sun4i_dma_interrupt()
1088 priv->base + SUN4I_DMA_IRQ_ENABLE_REG); in sun4i_dma_interrupt()
1089 spin_unlock(&priv->lock); in sun4i_dma_interrupt()
1092 writel_relaxed(pendirq, priv->base + SUN4I_DMA_IRQ_PENDING_STATUS_REG); in sun4i_dma_interrupt()
1100 vchan = &priv->vchans[i]; in sun4i_dma_interrupt()
1101 spin_lock(&vchan->vc.lock); in sun4i_dma_interrupt()
1103 spin_unlock(&vchan->vc.lock); in sun4i_dma_interrupt()
1112 pendirq = readl_relaxed(priv->base + in sun4i_dma_interrupt()
1129 priv = devm_kzalloc(&pdev->dev, sizeof(*priv), GFP_KERNEL); in sun4i_dma_probe()
1131 return -ENOMEM; in sun4i_dma_probe()
1134 priv->base = devm_ioremap_resource(&pdev->dev, res); in sun4i_dma_probe()
1135 if (IS_ERR(priv->base)) in sun4i_dma_probe()
1136 return PTR_ERR(priv->base); in sun4i_dma_probe()
1138 priv->irq = platform_get_irq(pdev, 0); in sun4i_dma_probe()
1139 if (priv->irq < 0) { in sun4i_dma_probe()
1140 dev_err(&pdev->dev, "Cannot claim IRQ\n"); in sun4i_dma_probe()
1141 return priv->irq; in sun4i_dma_probe()
1144 priv->clk = devm_clk_get(&pdev->dev, NULL); in sun4i_dma_probe()
1145 if (IS_ERR(priv->clk)) { in sun4i_dma_probe()
1146 dev_err(&pdev->dev, "No clock specified\n"); in sun4i_dma_probe()
1147 return PTR_ERR(priv->clk); in sun4i_dma_probe()
1151 spin_lock_init(&priv->lock); in sun4i_dma_probe()
1153 dma_cap_zero(priv->slave.cap_mask); in sun4i_dma_probe()
1154 dma_cap_set(DMA_PRIVATE, priv->slave.cap_mask); in sun4i_dma_probe()
1155 dma_cap_set(DMA_MEMCPY, priv->slave.cap_mask); in sun4i_dma_probe()
1156 dma_cap_set(DMA_CYCLIC, priv->slave.cap_mask); in sun4i_dma_probe()
1157 dma_cap_set(DMA_SLAVE, priv->slave.cap_mask); in sun4i_dma_probe()
1159 INIT_LIST_HEAD(&priv->slave.channels); in sun4i_dma_probe()
1160 priv->slave.device_free_chan_resources = sun4i_dma_free_chan_resources; in sun4i_dma_probe()
1161 priv->slave.device_tx_status = sun4i_dma_tx_status; in sun4i_dma_probe()
1162 priv->slave.device_issue_pending = sun4i_dma_issue_pending; in sun4i_dma_probe()
1163 priv->slave.device_prep_slave_sg = sun4i_dma_prep_slave_sg; in sun4i_dma_probe()
1164 priv->slave.device_prep_dma_memcpy = sun4i_dma_prep_dma_memcpy; in sun4i_dma_probe()
1165 priv->slave.device_prep_dma_cyclic = sun4i_dma_prep_dma_cyclic; in sun4i_dma_probe()
1166 priv->slave.device_config = sun4i_dma_config; in sun4i_dma_probe()
1167 priv->slave.device_terminate_all = sun4i_dma_terminate_all; in sun4i_dma_probe()
1168 priv->slave.copy_align = 2; in sun4i_dma_probe()
1169 priv->slave.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in sun4i_dma_probe()
1172 priv->slave.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in sun4i_dma_probe()
1175 priv->slave.directions = BIT(DMA_DEV_TO_MEM) | in sun4i_dma_probe()
1177 priv->slave.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in sun4i_dma_probe()
1179 priv->slave.dev = &pdev->dev; in sun4i_dma_probe()
1181 priv->pchans = devm_kcalloc(&pdev->dev, SUN4I_DMA_NR_MAX_CHANNELS, in sun4i_dma_probe()
1183 priv->vchans = devm_kcalloc(&pdev->dev, SUN4I_DMA_NR_MAX_VCHANS, in sun4i_dma_probe()
1185 if (!priv->vchans || !priv->pchans) in sun4i_dma_probe()
1186 return -ENOMEM; in sun4i_dma_probe()
1194 priv->pchans[i].base = priv->base + in sun4i_dma_probe()
1198 priv->pchans[i].base = priv->base + in sun4i_dma_probe()
1200 priv->pchans[i].is_dedicated = 1; in sun4i_dma_probe()
1204 struct sun4i_dma_vchan *vchan = &priv->vchans[i]; in sun4i_dma_probe()
1206 spin_lock_init(&vchan->vc.lock); in sun4i_dma_probe()
1207 vchan->vc.desc_free = sun4i_dma_free_contract; in sun4i_dma_probe()
1208 vchan_init(&vchan->vc, &priv->slave); in sun4i_dma_probe()
1211 ret = clk_prepare_enable(priv->clk); in sun4i_dma_probe()
1213 dev_err(&pdev->dev, "Couldn't enable the clock\n"); in sun4i_dma_probe()
1221 writel(0, priv->base + SUN4I_DMA_IRQ_ENABLE_REG); in sun4i_dma_probe()
1222 writel(0xFFFFFFFF, priv->base + SUN4I_DMA_IRQ_PENDING_STATUS_REG); in sun4i_dma_probe()
1224 ret = devm_request_irq(&pdev->dev, priv->irq, sun4i_dma_interrupt, in sun4i_dma_probe()
1225 0, dev_name(&pdev->dev), priv); in sun4i_dma_probe()
1227 dev_err(&pdev->dev, "Cannot request IRQ\n"); in sun4i_dma_probe()
1231 ret = dma_async_device_register(&priv->slave); in sun4i_dma_probe()
1233 dev_warn(&pdev->dev, "Failed to register DMA engine device\n"); in sun4i_dma_probe()
1237 ret = of_dma_controller_register(pdev->dev.of_node, sun4i_dma_of_xlate, in sun4i_dma_probe()
1240 dev_err(&pdev->dev, "of_dma_controller_register failed\n"); in sun4i_dma_probe()
1244 dev_dbg(&pdev->dev, "Successfully probed SUN4I_DMA\n"); in sun4i_dma_probe()
1249 dma_async_device_unregister(&priv->slave); in sun4i_dma_probe()
1251 clk_disable_unprepare(priv->clk); in sun4i_dma_probe()
1260 disable_irq(priv->irq); in sun4i_dma_remove()
1262 of_dma_controller_free(pdev->dev.of_node); in sun4i_dma_remove()
1263 dma_async_device_unregister(&priv->slave); in sun4i_dma_remove()
1265 clk_disable_unprepare(priv->clk); in sun4i_dma_remove()
1271 { .compatible = "allwinner,sun4i-a10-dma" },
1280 .name = "sun4i-dma",