Lines Matching +full:tegra20 +full:- +full:ahb
1 // SPDX-License-Identifier: GPL-2.0-only
3 * DMA driver for Nvidia's Tegra20 APB DMA controller.
5 * Copyright (c) 2012-2013, NVIDIA CORPORATION. All rights reserved.
12 #include <linux/dma-mapping.h>
65 /* AHB memory address */
68 /* AHB sequence register */
104 * on-flight burst and update DMA status register.
145 * sub-transfer as per requester details and hw support.
202 /* Channel-slave specific configuration */
233 writel(val, tdma->base_addr + reg); in tdma_write()
238 return readl(tdma->base_addr + reg); in tdma_read()
244 writel(val, tdc->chan_addr + reg); in tdc_write()
249 return readl(tdc->chan_addr + reg); in tdc_read()
265 return &tdc->dma_chan.dev->device; in tdc2dev()
276 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_desc_get()
279 list_for_each_entry(dma_desc, &tdc->free_dma_desc, node) { in tegra_dma_desc_get()
280 if (async_tx_test_ack(&dma_desc->txd) && !dma_desc->cb_count) { in tegra_dma_desc_get()
281 list_del(&dma_desc->node); in tegra_dma_desc_get()
282 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_desc_get()
283 dma_desc->txd.flags = 0; in tegra_dma_desc_get()
288 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_desc_get()
295 dma_async_tx_descriptor_init(&dma_desc->txd, &tdc->dma_chan); in tegra_dma_desc_get()
296 dma_desc->txd.tx_submit = tegra_dma_tx_submit; in tegra_dma_desc_get()
297 dma_desc->txd.flags = 0; in tegra_dma_desc_get()
307 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_desc_put()
308 if (!list_empty(&dma_desc->tx_list)) in tegra_dma_desc_put()
309 list_splice_init(&dma_desc->tx_list, &tdc->free_sg_req); in tegra_dma_desc_put()
310 list_add_tail(&dma_desc->node, &tdc->free_dma_desc); in tegra_dma_desc_put()
311 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_desc_put()
320 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_sg_req_get()
321 if (!list_empty(&tdc->free_sg_req)) { in tegra_dma_sg_req_get()
322 sg_req = list_first_entry(&tdc->free_sg_req, typeof(*sg_req), in tegra_dma_sg_req_get()
324 list_del(&sg_req->node); in tegra_dma_sg_req_get()
325 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_sg_req_get()
328 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_sg_req_get()
340 if (!list_empty(&tdc->pending_sg_req)) { in tegra_dma_slave_config()
342 return -EBUSY; in tegra_dma_slave_config()
345 memcpy(&tdc->dma_sconfig, sconfig, sizeof(*sconfig)); in tegra_dma_slave_config()
346 if (tdc->slave_id == TEGRA_APBDMA_SLAVE_ID_INVALID && in tegra_dma_slave_config()
347 sconfig->device_fc) { in tegra_dma_slave_config()
348 if (sconfig->slave_id > TEGRA_APBDMA_CSR_REQ_SEL_MASK) in tegra_dma_slave_config()
349 return -EINVAL; in tegra_dma_slave_config()
350 tdc->slave_id = sconfig->slave_id; in tegra_dma_slave_config()
352 tdc->config_init = true; in tegra_dma_slave_config()
360 struct tegra_dma *tdma = tdc->tdma; in tegra_dma_global_pause()
362 spin_lock(&tdma->global_lock); in tegra_dma_global_pause()
364 if (tdc->tdma->global_pause_count == 0) { in tegra_dma_global_pause()
370 tdc->tdma->global_pause_count++; in tegra_dma_global_pause()
372 spin_unlock(&tdma->global_lock); in tegra_dma_global_pause()
377 struct tegra_dma *tdma = tdc->tdma; in tegra_dma_global_resume()
379 spin_lock(&tdma->global_lock); in tegra_dma_global_resume()
381 if (WARN_ON(tdc->tdma->global_pause_count == 0)) in tegra_dma_global_resume()
384 if (--tdc->tdma->global_pause_count == 0) in tegra_dma_global_resume()
389 spin_unlock(&tdma->global_lock); in tegra_dma_global_resume()
395 struct tegra_dma *tdma = tdc->tdma; in tegra_dma_pause()
397 if (tdma->chip_data->support_channel_pause) { in tegra_dma_pause()
409 struct tegra_dma *tdma = tdc->tdma; in tegra_dma_resume()
411 if (tdma->chip_data->support_channel_pause) in tegra_dma_resume()
436 tdc->busy = false; in tegra_dma_stop()
442 struct tegra_dma_channel_regs *ch_regs = &sg_req->ch_regs; in tegra_dma_start()
444 tdc_write(tdc, TEGRA_APBDMA_CHAN_CSR, ch_regs->csr); in tegra_dma_start()
445 tdc_write(tdc, TEGRA_APBDMA_CHAN_APBSEQ, ch_regs->apb_seq); in tegra_dma_start()
446 tdc_write(tdc, TEGRA_APBDMA_CHAN_APBPTR, ch_regs->apb_ptr); in tegra_dma_start()
447 tdc_write(tdc, TEGRA_APBDMA_CHAN_AHBSEQ, ch_regs->ahb_seq); in tegra_dma_start()
448 tdc_write(tdc, TEGRA_APBDMA_CHAN_AHBPTR, ch_regs->ahb_ptr); in tegra_dma_start()
449 if (tdc->tdma->chip_data->support_separate_wcount_reg) in tegra_dma_start()
450 tdc_write(tdc, TEGRA_APBDMA_CHAN_WCOUNT, ch_regs->wcount); in tegra_dma_start()
454 ch_regs->csr | TEGRA_APBDMA_CSR_ENB); in tegra_dma_start()
488 tdc_write(tdc, TEGRA_APBDMA_CHAN_APBPTR, nsg_req->ch_regs.apb_ptr); in tegra_dma_configure_for_next()
489 tdc_write(tdc, TEGRA_APBDMA_CHAN_AHBPTR, nsg_req->ch_regs.ahb_ptr); in tegra_dma_configure_for_next()
490 if (tdc->tdma->chip_data->support_separate_wcount_reg) in tegra_dma_configure_for_next()
492 nsg_req->ch_regs.wcount); in tegra_dma_configure_for_next()
494 nsg_req->ch_regs.csr | TEGRA_APBDMA_CSR_ENB); in tegra_dma_configure_for_next()
495 nsg_req->configured = true; in tegra_dma_configure_for_next()
496 nsg_req->words_xferred = 0; in tegra_dma_configure_for_next()
505 sg_req = list_first_entry(&tdc->pending_sg_req, typeof(*sg_req), node); in tdc_start_head_req()
507 sg_req->configured = true; in tdc_start_head_req()
508 sg_req->words_xferred = 0; in tdc_start_head_req()
509 tdc->busy = true; in tdc_start_head_req()
516 hsgreq = list_first_entry(&tdc->pending_sg_req, typeof(*hsgreq), node); in tdc_configure_next_head_desc()
517 if (!list_is_last(&hsgreq->node, &tdc->pending_sg_req)) { in tdc_configure_next_head_desc()
518 hnsgreq = list_first_entry(&hsgreq->node, typeof(*hnsgreq), in tdc_configure_next_head_desc()
529 return sg_req->req_len - (status & TEGRA_APBDMA_STATUS_COUNT_MASK) - 4; in get_current_xferred_count()
537 while (!list_empty(&tdc->pending_sg_req)) { in tegra_dma_abort_all()
538 sgreq = list_first_entry(&tdc->pending_sg_req, typeof(*sgreq), in tegra_dma_abort_all()
540 list_move_tail(&sgreq->node, &tdc->free_sg_req); in tegra_dma_abort_all()
541 if (sgreq->last_sg) { in tegra_dma_abort_all()
542 dma_desc = sgreq->dma_desc; in tegra_dma_abort_all()
543 dma_desc->dma_status = DMA_ERROR; in tegra_dma_abort_all()
544 list_add_tail(&dma_desc->node, &tdc->free_dma_desc); in tegra_dma_abort_all()
547 if (!dma_desc->cb_count) in tegra_dma_abort_all()
548 list_add_tail(&dma_desc->cb_node, in tegra_dma_abort_all()
549 &tdc->cb_desc); in tegra_dma_abort_all()
550 dma_desc->cb_count++; in tegra_dma_abort_all()
553 tdc->isr_handler = NULL; in tegra_dma_abort_all()
566 hsgreq = list_first_entry(&tdc->pending_sg_req, typeof(*hsgreq), node); in handle_continuous_head_request()
567 if (!hsgreq->configured) { in handle_continuous_head_request()
569 pm_runtime_put(tdc->tdma->dev); in handle_continuous_head_request()
588 tdc->busy = false; in handle_once_dma_done()
589 sgreq = list_first_entry(&tdc->pending_sg_req, typeof(*sgreq), node); in handle_once_dma_done()
590 dma_desc = sgreq->dma_desc; in handle_once_dma_done()
591 dma_desc->bytes_transferred += sgreq->req_len; in handle_once_dma_done()
593 list_del(&sgreq->node); in handle_once_dma_done()
594 if (sgreq->last_sg) { in handle_once_dma_done()
595 dma_desc->dma_status = DMA_COMPLETE; in handle_once_dma_done()
596 dma_cookie_complete(&dma_desc->txd); in handle_once_dma_done()
597 if (!dma_desc->cb_count) in handle_once_dma_done()
598 list_add_tail(&dma_desc->cb_node, &tdc->cb_desc); in handle_once_dma_done()
599 dma_desc->cb_count++; in handle_once_dma_done()
600 list_add_tail(&dma_desc->node, &tdc->free_dma_desc); in handle_once_dma_done()
602 list_add_tail(&sgreq->node, &tdc->free_sg_req); in handle_once_dma_done()
608 if (list_empty(&tdc->pending_sg_req)) { in handle_once_dma_done()
609 pm_runtime_put(tdc->tdma->dev); in handle_once_dma_done()
623 sgreq = list_first_entry(&tdc->pending_sg_req, typeof(*sgreq), node); in handle_cont_sngl_cycle_dma_done()
624 dma_desc = sgreq->dma_desc; in handle_cont_sngl_cycle_dma_done()
626 dma_desc->bytes_transferred = in handle_cont_sngl_cycle_dma_done()
627 (dma_desc->bytes_transferred + sgreq->req_len) % in handle_cont_sngl_cycle_dma_done()
628 dma_desc->bytes_requested; in handle_cont_sngl_cycle_dma_done()
631 if (!dma_desc->cb_count) in handle_cont_sngl_cycle_dma_done()
632 list_add_tail(&dma_desc->cb_node, &tdc->cb_desc); in handle_cont_sngl_cycle_dma_done()
633 dma_desc->cb_count++; in handle_cont_sngl_cycle_dma_done()
635 sgreq->words_xferred = 0; in handle_cont_sngl_cycle_dma_done()
638 if (!list_is_last(&sgreq->node, &tdc->pending_sg_req)) { in handle_cont_sngl_cycle_dma_done()
639 list_move_tail(&sgreq->node, &tdc->pending_sg_req); in handle_cont_sngl_cycle_dma_done()
640 sgreq->configured = false; in handle_cont_sngl_cycle_dma_done()
643 dma_desc->dma_status = DMA_ERROR; in handle_cont_sngl_cycle_dma_done()
655 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_tasklet()
656 while (!list_empty(&tdc->cb_desc)) { in tegra_dma_tasklet()
657 dma_desc = list_first_entry(&tdc->cb_desc, typeof(*dma_desc), in tegra_dma_tasklet()
659 list_del(&dma_desc->cb_node); in tegra_dma_tasklet()
660 dmaengine_desc_get_callback(&dma_desc->txd, &cb); in tegra_dma_tasklet()
661 cb_count = dma_desc->cb_count; in tegra_dma_tasklet()
662 dma_desc->cb_count = 0; in tegra_dma_tasklet()
663 trace_tegra_dma_complete_cb(&tdc->dma_chan, cb_count, in tegra_dma_tasklet()
665 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_tasklet()
666 while (cb_count--) in tegra_dma_tasklet()
668 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_tasklet()
670 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_tasklet()
678 spin_lock(&tdc->lock); in tegra_dma_isr()
680 trace_tegra_dma_isr(&tdc->dma_chan, irq); in tegra_dma_isr()
684 tdc->isr_handler(tdc, false); in tegra_dma_isr()
685 tasklet_schedule(&tdc->tasklet); in tegra_dma_isr()
686 wake_up_all(&tdc->wq); in tegra_dma_isr()
687 spin_unlock(&tdc->lock); in tegra_dma_isr()
691 spin_unlock(&tdc->lock); in tegra_dma_isr()
701 struct tegra_dma_channel *tdc = to_tegra_dma_chan(txd->chan); in tegra_dma_tx_submit()
705 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_tx_submit()
706 dma_desc->dma_status = DMA_IN_PROGRESS; in tegra_dma_tx_submit()
707 cookie = dma_cookie_assign(&dma_desc->txd); in tegra_dma_tx_submit()
708 list_splice_tail_init(&dma_desc->tx_list, &tdc->pending_sg_req); in tegra_dma_tx_submit()
709 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_tx_submit()
720 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_issue_pending()
721 if (list_empty(&tdc->pending_sg_req)) { in tegra_dma_issue_pending()
725 if (!tdc->busy) { in tegra_dma_issue_pending()
726 err = pm_runtime_resume_and_get(tdc->tdma->dev); in tegra_dma_issue_pending()
735 if (tdc->cyclic) { in tegra_dma_issue_pending()
745 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_issue_pending()
757 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_terminate_all()
759 if (!tdc->busy) in tegra_dma_terminate_all()
768 tdc->isr_handler(tdc, true); in tegra_dma_terminate_all()
771 if (tdc->tdma->chip_data->support_separate_wcount_reg) in tegra_dma_terminate_all()
776 was_busy = tdc->busy; in tegra_dma_terminate_all()
779 if (!list_empty(&tdc->pending_sg_req) && was_busy) { in tegra_dma_terminate_all()
780 sgreq = list_first_entry(&tdc->pending_sg_req, typeof(*sgreq), in tegra_dma_terminate_all()
782 sgreq->dma_desc->bytes_transferred += in tegra_dma_terminate_all()
787 pm_runtime_put(tdc->tdma->dev); in tegra_dma_terminate_all()
788 wake_up_all(&tdc->wq); in tegra_dma_terminate_all()
793 while (!list_empty(&tdc->cb_desc)) { in tegra_dma_terminate_all()
794 dma_desc = list_first_entry(&tdc->cb_desc, typeof(*dma_desc), in tegra_dma_terminate_all()
796 list_del(&dma_desc->cb_node); in tegra_dma_terminate_all()
797 dma_desc->cb_count = 0; in tegra_dma_terminate_all()
799 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_terminate_all()
809 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_eoc_interrupt_deasserted()
811 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_eoc_interrupt_deasserted()
821 err = pm_runtime_resume_and_get(tdc->tdma->dev); in tegra_dma_synchronize()
832 wait_event(tdc->wq, tegra_dma_eoc_interrupt_deasserted(tdc)); in tegra_dma_synchronize()
834 tasklet_kill(&tdc->tasklet); in tegra_dma_synchronize()
836 pm_runtime_put(tdc->tdma->dev); in tegra_dma_synchronize()
844 if (!list_is_first(&sg_req->node, &tdc->pending_sg_req)) in tegra_dma_sg_bytes_xferred()
847 if (tdc->tdma->chip_data->support_separate_wcount_reg) in tegra_dma_sg_bytes_xferred()
852 if (!tdc->tdma->chip_data->support_separate_wcount_reg) in tegra_dma_sg_bytes_xferred()
856 return sg_req->req_len; in tegra_dma_sg_bytes_xferred()
875 if (sg_req->words_xferred) in tegra_dma_sg_bytes_xferred()
876 wcount = sg_req->req_len - 4; in tegra_dma_sg_bytes_xferred()
878 } else if (wcount < sg_req->words_xferred) { in tegra_dma_sg_bytes_xferred()
880 * This case will never happen for a non-cyclic transfer. in tegra_dma_sg_bytes_xferred()
890 wcount = sg_req->req_len - 4; in tegra_dma_sg_bytes_xferred()
892 sg_req->words_xferred = wcount; in tegra_dma_sg_bytes_xferred()
914 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_tx_status()
917 list_for_each_entry(dma_desc, &tdc->free_dma_desc, node) { in tegra_dma_tx_status()
918 if (dma_desc->txd.cookie == cookie) { in tegra_dma_tx_status()
919 ret = dma_desc->dma_status; in tegra_dma_tx_status()
925 list_for_each_entry(sg_req, &tdc->pending_sg_req, node) { in tegra_dma_tx_status()
926 dma_desc = sg_req->dma_desc; in tegra_dma_tx_status()
927 if (dma_desc->txd.cookie == cookie) { in tegra_dma_tx_status()
929 ret = dma_desc->dma_status; in tegra_dma_tx_status()
939 residual = dma_desc->bytes_requested - in tegra_dma_tx_status()
940 ((dma_desc->bytes_transferred + bytes) % in tegra_dma_tx_status()
941 dma_desc->bytes_requested); in tegra_dma_tx_status()
945 trace_tegra_dma_tx_status(&tdc->dma_chan, cookie, txstate); in tegra_dma_tx_status()
946 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_tx_status()
979 * convert them into AHB memory width which is 4 byte. in get_burst_size()
1011 *apb_addr = tdc->dma_sconfig.dst_addr; in get_transfer_param()
1012 *apb_seq = get_bus_width(tdc, tdc->dma_sconfig.dst_addr_width); in get_transfer_param()
1013 *burst_size = tdc->dma_sconfig.dst_maxburst; in get_transfer_param()
1014 *slave_bw = tdc->dma_sconfig.dst_addr_width; in get_transfer_param()
1019 *apb_addr = tdc->dma_sconfig.src_addr; in get_transfer_param()
1020 *apb_seq = get_bus_width(tdc, tdc->dma_sconfig.src_addr_width); in get_transfer_param()
1021 *burst_size = tdc->dma_sconfig.src_maxburst; in get_transfer_param()
1022 *slave_bw = tdc->dma_sconfig.src_addr_width; in get_transfer_param()
1031 return -EINVAL; in get_transfer_param()
1038 u32 len_field = (len - 4) & 0xFFFC; in tegra_dma_prep_wcount()
1040 if (tdc->tdma->chip_data->support_separate_wcount_reg) in tegra_dma_prep_wcount()
1041 ch_regs->wcount = len_field; in tegra_dma_prep_wcount()
1043 ch_regs->csr |= len_field; in tegra_dma_prep_wcount()
1064 if (!tdc->config_init) { in tegra_dma_prep_slave_sg()
1086 if (tdc->slave_id != TEGRA_APBDMA_SLAVE_ID_INVALID) { in tegra_dma_prep_slave_sg()
1088 csr |= tdc->slave_id << TEGRA_APBDMA_CSR_REQ_SEL_SHIFT; in tegra_dma_prep_slave_sg()
1105 INIT_LIST_HEAD(&dma_desc->tx_list); in tegra_dma_prep_slave_sg()
1106 INIT_LIST_HEAD(&dma_desc->cb_node); in tegra_dma_prep_slave_sg()
1107 dma_desc->cb_count = 0; in tegra_dma_prep_slave_sg()
1108 dma_desc->bytes_requested = 0; in tegra_dma_prep_slave_sg()
1109 dma_desc->bytes_transferred = 0; in tegra_dma_prep_slave_sg()
1110 dma_desc->dma_status = DMA_IN_PROGRESS; in tegra_dma_prep_slave_sg()
1120 len > tdc->tdma->chip_data->max_dma_count) { in tegra_dma_prep_slave_sg()
1129 dev_err(tdc2dev(tdc), "DMA sg-req not available\n"); in tegra_dma_prep_slave_sg()
1135 dma_desc->bytes_requested += len; in tegra_dma_prep_slave_sg()
1137 sg_req->ch_regs.apb_ptr = apb_ptr; in tegra_dma_prep_slave_sg()
1138 sg_req->ch_regs.ahb_ptr = mem; in tegra_dma_prep_slave_sg()
1139 sg_req->ch_regs.csr = csr; in tegra_dma_prep_slave_sg()
1140 tegra_dma_prep_wcount(tdc, &sg_req->ch_regs, len); in tegra_dma_prep_slave_sg()
1141 sg_req->ch_regs.apb_seq = apb_seq; in tegra_dma_prep_slave_sg()
1142 sg_req->ch_regs.ahb_seq = ahb_seq; in tegra_dma_prep_slave_sg()
1143 sg_req->configured = false; in tegra_dma_prep_slave_sg()
1144 sg_req->last_sg = false; in tegra_dma_prep_slave_sg()
1145 sg_req->dma_desc = dma_desc; in tegra_dma_prep_slave_sg()
1146 sg_req->req_len = len; in tegra_dma_prep_slave_sg()
1148 list_add_tail(&sg_req->node, &dma_desc->tx_list); in tegra_dma_prep_slave_sg()
1150 sg_req->last_sg = true; in tegra_dma_prep_slave_sg()
1152 dma_desc->txd.flags = DMA_CTRL_ACK; in tegra_dma_prep_slave_sg()
1158 if (!tdc->isr_handler) { in tegra_dma_prep_slave_sg()
1159 tdc->isr_handler = handle_once_dma_done; in tegra_dma_prep_slave_sg()
1160 tdc->cyclic = false; in tegra_dma_prep_slave_sg()
1162 if (tdc->cyclic) { in tegra_dma_prep_slave_sg()
1169 return &dma_desc->txd; in tegra_dma_prep_slave_sg()
1193 if (!tdc->config_init) { in tegra_dma_prep_dma_cyclic()
1204 if (tdc->busy) { in tegra_dma_prep_dma_cyclic()
1220 len > tdc->tdma->chip_data->max_dma_count) { in tegra_dma_prep_dma_cyclic()
1234 if (tdc->slave_id != TEGRA_APBDMA_SLAVE_ID_INVALID) { in tegra_dma_prep_dma_cyclic()
1236 csr |= tdc->slave_id << TEGRA_APBDMA_CSR_REQ_SEL_SHIFT; in tegra_dma_prep_dma_cyclic()
1254 INIT_LIST_HEAD(&dma_desc->tx_list); in tegra_dma_prep_dma_cyclic()
1255 INIT_LIST_HEAD(&dma_desc->cb_node); in tegra_dma_prep_dma_cyclic()
1256 dma_desc->cb_count = 0; in tegra_dma_prep_dma_cyclic()
1258 dma_desc->bytes_transferred = 0; in tegra_dma_prep_dma_cyclic()
1259 dma_desc->bytes_requested = buf_len; in tegra_dma_prep_dma_cyclic()
1266 dev_err(tdc2dev(tdc), "DMA sg-req not available\n"); in tegra_dma_prep_dma_cyclic()
1272 sg_req->ch_regs.apb_ptr = apb_ptr; in tegra_dma_prep_dma_cyclic()
1273 sg_req->ch_regs.ahb_ptr = mem; in tegra_dma_prep_dma_cyclic()
1274 sg_req->ch_regs.csr = csr; in tegra_dma_prep_dma_cyclic()
1275 tegra_dma_prep_wcount(tdc, &sg_req->ch_regs, len); in tegra_dma_prep_dma_cyclic()
1276 sg_req->ch_regs.apb_seq = apb_seq; in tegra_dma_prep_dma_cyclic()
1277 sg_req->ch_regs.ahb_seq = ahb_seq; in tegra_dma_prep_dma_cyclic()
1278 sg_req->configured = false; in tegra_dma_prep_dma_cyclic()
1279 sg_req->last_sg = false; in tegra_dma_prep_dma_cyclic()
1280 sg_req->dma_desc = dma_desc; in tegra_dma_prep_dma_cyclic()
1281 sg_req->req_len = len; in tegra_dma_prep_dma_cyclic()
1283 list_add_tail(&sg_req->node, &dma_desc->tx_list); in tegra_dma_prep_dma_cyclic()
1284 remain_len -= len; in tegra_dma_prep_dma_cyclic()
1287 sg_req->last_sg = true; in tegra_dma_prep_dma_cyclic()
1289 dma_desc->txd.flags = DMA_CTRL_ACK; in tegra_dma_prep_dma_cyclic()
1295 if (!tdc->isr_handler) { in tegra_dma_prep_dma_cyclic()
1296 tdc->isr_handler = handle_cont_sngl_cycle_dma_done; in tegra_dma_prep_dma_cyclic()
1297 tdc->cyclic = true; in tegra_dma_prep_dma_cyclic()
1299 if (!tdc->cyclic) { in tegra_dma_prep_dma_cyclic()
1306 return &dma_desc->txd; in tegra_dma_prep_dma_cyclic()
1313 dma_cookie_init(&tdc->dma_chan); in tegra_dma_alloc_chan_resources()
1329 dev_dbg(tdc2dev(tdc), "Freeing channel %d\n", tdc->id); in tegra_dma_free_chan_resources()
1332 tasklet_kill(&tdc->tasklet); in tegra_dma_free_chan_resources()
1334 list_splice_init(&tdc->pending_sg_req, &sg_req_list); in tegra_dma_free_chan_resources()
1335 list_splice_init(&tdc->free_sg_req, &sg_req_list); in tegra_dma_free_chan_resources()
1336 list_splice_init(&tdc->free_dma_desc, &dma_desc_list); in tegra_dma_free_chan_resources()
1337 INIT_LIST_HEAD(&tdc->cb_desc); in tegra_dma_free_chan_resources()
1338 tdc->config_init = false; in tegra_dma_free_chan_resources()
1339 tdc->isr_handler = NULL; in tegra_dma_free_chan_resources()
1344 list_del(&dma_desc->node); in tegra_dma_free_chan_resources()
1350 list_del(&sg_req->node); in tegra_dma_free_chan_resources()
1354 tdc->slave_id = TEGRA_APBDMA_SLAVE_ID_INVALID; in tegra_dma_free_chan_resources()
1360 struct tegra_dma *tdma = ofdma->of_dma_data; in tegra_dma_of_xlate()
1364 if (dma_spec->args[0] > TEGRA_APBDMA_CSR_REQ_SEL_MASK) { in tegra_dma_of_xlate()
1365 dev_err(tdma->dev, "Invalid slave id: %d\n", dma_spec->args[0]); in tegra_dma_of_xlate()
1369 chan = dma_get_any_slave_channel(&tdma->dma_dev); in tegra_dma_of_xlate()
1374 tdc->slave_id = dma_spec->args[0]; in tegra_dma_of_xlate()
1379 /* Tegra20 specific DMA controller information */
1419 err = reset_control_assert(tdma->rst); in tegra_dma_init_hw()
1421 dev_err(tdma->dev, "failed to assert reset: %d\n", err); in tegra_dma_init_hw()
1425 err = clk_enable(tdma->dma_clk); in tegra_dma_init_hw()
1427 dev_err(tdma->dev, "failed to enable clk: %d\n", err); in tegra_dma_init_hw()
1433 reset_control_deassert(tdma->rst); in tegra_dma_init_hw()
1440 clk_disable(tdma->dma_clk); in tegra_dma_init_hw()
1453 cdata = of_device_get_match_data(&pdev->dev); in tegra_dma_probe()
1454 size = struct_size(tdma, channels, cdata->nr_channels); in tegra_dma_probe()
1456 tdma = devm_kzalloc(&pdev->dev, size, GFP_KERNEL); in tegra_dma_probe()
1458 return -ENOMEM; in tegra_dma_probe()
1460 tdma->dev = &pdev->dev; in tegra_dma_probe()
1461 tdma->chip_data = cdata; in tegra_dma_probe()
1464 tdma->base_addr = devm_platform_ioremap_resource(pdev, 0); in tegra_dma_probe()
1465 if (IS_ERR(tdma->base_addr)) in tegra_dma_probe()
1466 return PTR_ERR(tdma->base_addr); in tegra_dma_probe()
1468 tdma->dma_clk = devm_clk_get(&pdev->dev, NULL); in tegra_dma_probe()
1469 if (IS_ERR(tdma->dma_clk)) { in tegra_dma_probe()
1470 dev_err(&pdev->dev, "Error: Missing controller clock\n"); in tegra_dma_probe()
1471 return PTR_ERR(tdma->dma_clk); in tegra_dma_probe()
1474 tdma->rst = devm_reset_control_get(&pdev->dev, "dma"); in tegra_dma_probe()
1475 if (IS_ERR(tdma->rst)) { in tegra_dma_probe()
1476 dev_err(&pdev->dev, "Error: Missing reset\n"); in tegra_dma_probe()
1477 return PTR_ERR(tdma->rst); in tegra_dma_probe()
1480 spin_lock_init(&tdma->global_lock); in tegra_dma_probe()
1482 ret = clk_prepare(tdma->dma_clk); in tegra_dma_probe()
1490 pm_runtime_irq_safe(&pdev->dev); in tegra_dma_probe()
1491 pm_runtime_enable(&pdev->dev); in tegra_dma_probe()
1493 INIT_LIST_HEAD(&tdma->dma_dev.channels); in tegra_dma_probe()
1494 for (i = 0; i < cdata->nr_channels; i++) { in tegra_dma_probe()
1495 struct tegra_dma_channel *tdc = &tdma->channels[i]; in tegra_dma_probe()
1498 tdc->chan_addr = tdma->base_addr + in tegra_dma_probe()
1500 (i * cdata->channel_reg_size); in tegra_dma_probe()
1508 snprintf(tdc->name, sizeof(tdc->name), "apbdma.%d", i); in tegra_dma_probe()
1509 ret = devm_request_irq(&pdev->dev, irq, tegra_dma_isr, 0, in tegra_dma_probe()
1510 tdc->name, tdc); in tegra_dma_probe()
1512 dev_err(&pdev->dev, in tegra_dma_probe()
1518 tdc->dma_chan.device = &tdma->dma_dev; in tegra_dma_probe()
1519 dma_cookie_init(&tdc->dma_chan); in tegra_dma_probe()
1520 list_add_tail(&tdc->dma_chan.device_node, in tegra_dma_probe()
1521 &tdma->dma_dev.channels); in tegra_dma_probe()
1522 tdc->tdma = tdma; in tegra_dma_probe()
1523 tdc->id = i; in tegra_dma_probe()
1524 tdc->slave_id = TEGRA_APBDMA_SLAVE_ID_INVALID; in tegra_dma_probe()
1526 tasklet_setup(&tdc->tasklet, tegra_dma_tasklet); in tegra_dma_probe()
1527 spin_lock_init(&tdc->lock); in tegra_dma_probe()
1528 init_waitqueue_head(&tdc->wq); in tegra_dma_probe()
1530 INIT_LIST_HEAD(&tdc->pending_sg_req); in tegra_dma_probe()
1531 INIT_LIST_HEAD(&tdc->free_sg_req); in tegra_dma_probe()
1532 INIT_LIST_HEAD(&tdc->free_dma_desc); in tegra_dma_probe()
1533 INIT_LIST_HEAD(&tdc->cb_desc); in tegra_dma_probe()
1536 dma_cap_set(DMA_SLAVE, tdma->dma_dev.cap_mask); in tegra_dma_probe()
1537 dma_cap_set(DMA_PRIVATE, tdma->dma_dev.cap_mask); in tegra_dma_probe()
1538 dma_cap_set(DMA_CYCLIC, tdma->dma_dev.cap_mask); in tegra_dma_probe()
1540 tdma->global_pause_count = 0; in tegra_dma_probe()
1541 tdma->dma_dev.dev = &pdev->dev; in tegra_dma_probe()
1542 tdma->dma_dev.device_alloc_chan_resources = in tegra_dma_probe()
1544 tdma->dma_dev.device_free_chan_resources = in tegra_dma_probe()
1546 tdma->dma_dev.device_prep_slave_sg = tegra_dma_prep_slave_sg; in tegra_dma_probe()
1547 tdma->dma_dev.device_prep_dma_cyclic = tegra_dma_prep_dma_cyclic; in tegra_dma_probe()
1548 tdma->dma_dev.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in tegra_dma_probe()
1552 tdma->dma_dev.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in tegra_dma_probe()
1556 tdma->dma_dev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in tegra_dma_probe()
1557 tdma->dma_dev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in tegra_dma_probe()
1558 tdma->dma_dev.device_config = tegra_dma_slave_config; in tegra_dma_probe()
1559 tdma->dma_dev.device_terminate_all = tegra_dma_terminate_all; in tegra_dma_probe()
1560 tdma->dma_dev.device_synchronize = tegra_dma_synchronize; in tegra_dma_probe()
1561 tdma->dma_dev.device_tx_status = tegra_dma_tx_status; in tegra_dma_probe()
1562 tdma->dma_dev.device_issue_pending = tegra_dma_issue_pending; in tegra_dma_probe()
1564 ret = dma_async_device_register(&tdma->dma_dev); in tegra_dma_probe()
1566 dev_err(&pdev->dev, in tegra_dma_probe()
1567 "Tegra20 APB DMA driver registration failed %d\n", ret); in tegra_dma_probe()
1571 ret = of_dma_controller_register(pdev->dev.of_node, in tegra_dma_probe()
1574 dev_err(&pdev->dev, in tegra_dma_probe()
1575 "Tegra20 APB DMA OF registration failed %d\n", ret); in tegra_dma_probe()
1579 dev_info(&pdev->dev, "Tegra20 APB DMA driver registered %u channels\n", in tegra_dma_probe()
1580 cdata->nr_channels); in tegra_dma_probe()
1585 dma_async_device_unregister(&tdma->dma_dev); in tegra_dma_probe()
1588 pm_runtime_disable(&pdev->dev); in tegra_dma_probe()
1591 clk_unprepare(tdma->dma_clk); in tegra_dma_probe()
1600 of_dma_controller_free(pdev->dev.of_node); in tegra_dma_remove()
1601 dma_async_device_unregister(&tdma->dma_dev); in tegra_dma_remove()
1602 pm_runtime_disable(&pdev->dev); in tegra_dma_remove()
1603 clk_unprepare(tdma->dma_clk); in tegra_dma_remove()
1612 clk_disable(tdma->dma_clk); in tegra_dma_runtime_suspend()
1621 return clk_enable(tdma->dma_clk); in tegra_dma_runtime_resume()
1631 for (i = 0; i < tdma->chip_data->nr_channels; i++) { in tegra_dma_dev_suspend()
1632 struct tegra_dma_channel *tdc = &tdma->channels[i]; in tegra_dma_dev_suspend()
1634 tasklet_kill(&tdc->tasklet); in tegra_dma_dev_suspend()
1636 spin_lock_irqsave(&tdc->lock, flags); in tegra_dma_dev_suspend()
1637 busy = tdc->busy; in tegra_dma_dev_suspend()
1638 spin_unlock_irqrestore(&tdc->lock, flags); in tegra_dma_dev_suspend()
1641 dev_err(tdma->dev, "channel %u busy\n", i); in tegra_dma_dev_suspend()
1642 return -EBUSY; in tegra_dma_dev_suspend()
1669 .compatible = "nvidia,tegra148-apbdma",
1672 .compatible = "nvidia,tegra114-apbdma",
1675 .compatible = "nvidia,tegra30-apbdma",
1678 .compatible = "nvidia,tegra20-apbdma",
1687 .name = "tegra-apbdma",