• Home
  • Raw
  • Download

Lines Matching refs:chan

233 	struct stm32_dma_chan chan[STM32_DMA_MAX_CHANNELS];  member
236 static struct stm32_dma_device *stm32_dma_get_dev(struct stm32_dma_chan *chan) in stm32_dma_get_dev() argument
238 return container_of(chan->vchan.chan.device, struct stm32_dma_device, in stm32_dma_get_dev()
244 return container_of(c, struct stm32_dma_chan, vchan.chan); in to_stm32_dma_chan()
252 static struct device *chan2dev(struct stm32_dma_chan *chan) in chan2dev() argument
254 return &chan->vchan.chan.dev->device; in chan2dev()
267 static int stm32_dma_get_width(struct stm32_dma_chan *chan, in stm32_dma_get_width() argument
278 dev_err(chan2dev(chan), "Dma bus width not supported\n"); in stm32_dma_get_width()
365 static int stm32_dma_get_burst(struct stm32_dma_chan *chan, u32 maxburst) in stm32_dma_get_burst() argument
378 dev_err(chan2dev(chan), "Dma burst size not supported\n"); in stm32_dma_get_burst()
383 static void stm32_dma_set_fifo_config(struct stm32_dma_chan *chan, in stm32_dma_set_fifo_config() argument
386 chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_MASK; in stm32_dma_set_fifo_config()
387 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_DMEIE; in stm32_dma_set_fifo_config()
391 chan->chan_reg.dma_scr |= STM32_DMA_SCR_DMEIE; in stm32_dma_set_fifo_config()
394 chan->chan_reg.dma_sfcr |= STM32_DMA_SFCR_MASK; in stm32_dma_set_fifo_config()
401 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_slave_config() local
403 memcpy(&chan->dma_sconfig, config, sizeof(*config)); in stm32_dma_slave_config()
407 config->peripheral_config = &chan->mdma_config; in stm32_dma_slave_config()
408 config->peripheral_size = sizeof(chan->mdma_config); in stm32_dma_slave_config()
409 chan->trig_mdma = true; in stm32_dma_slave_config()
412 chan->config_init = true; in stm32_dma_slave_config()
417 static u32 stm32_dma_irq_status(struct stm32_dma_chan *chan) in stm32_dma_irq_status() argument
419 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_irq_status()
427 dma_isr = stm32_dma_read(dmadev, STM32_DMA_ISR(chan->id)); in stm32_dma_irq_status()
428 flags = dma_isr >> STM32_DMA_FLAGS_SHIFT(chan->id); in stm32_dma_irq_status()
433 static void stm32_dma_irq_clear(struct stm32_dma_chan *chan, u32 flags) in stm32_dma_irq_clear() argument
435 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_irq_clear()
443 dma_ifcr = flags << STM32_DMA_FLAGS_SHIFT(chan->id); in stm32_dma_irq_clear()
445 stm32_dma_write(dmadev, STM32_DMA_IFCR(chan->id), dma_ifcr); in stm32_dma_irq_clear()
448 static int stm32_dma_disable_chan(struct stm32_dma_chan *chan) in stm32_dma_disable_chan() argument
450 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_disable_chan()
453 id = chan->id; in stm32_dma_disable_chan()
469 static void stm32_dma_stop(struct stm32_dma_chan *chan) in stm32_dma_stop() argument
471 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_stop()
476 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); in stm32_dma_stop()
478 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr); in stm32_dma_stop()
479 dma_sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id)); in stm32_dma_stop()
481 stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), dma_sfcr); in stm32_dma_stop()
484 ret = stm32_dma_disable_chan(chan); in stm32_dma_stop()
489 status = stm32_dma_irq_status(chan); in stm32_dma_stop()
491 dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n", in stm32_dma_stop()
493 stm32_dma_irq_clear(chan, status); in stm32_dma_stop()
496 chan->busy = false; in stm32_dma_stop()
497 chan->status = DMA_COMPLETE; in stm32_dma_stop()
502 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_terminate_all() local
506 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
508 if (chan->desc) { in stm32_dma_terminate_all()
509 dma_cookie_complete(&chan->desc->vdesc.tx); in stm32_dma_terminate_all()
510 vchan_terminate_vdesc(&chan->desc->vdesc); in stm32_dma_terminate_all()
511 if (chan->busy) in stm32_dma_terminate_all()
512 stm32_dma_stop(chan); in stm32_dma_terminate_all()
513 chan->desc = NULL; in stm32_dma_terminate_all()
516 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_dma_terminate_all()
517 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
518 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma_terminate_all()
525 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_synchronize() local
527 vchan_synchronize(&chan->vchan); in stm32_dma_synchronize()
530 static void stm32_dma_dump_reg(struct stm32_dma_chan *chan) in stm32_dma_dump_reg() argument
532 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_dump_reg()
533 u32 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); in stm32_dma_dump_reg()
534 u32 ndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id)); in stm32_dma_dump_reg()
535 u32 spar = stm32_dma_read(dmadev, STM32_DMA_SPAR(chan->id)); in stm32_dma_dump_reg()
536 u32 sm0ar = stm32_dma_read(dmadev, STM32_DMA_SM0AR(chan->id)); in stm32_dma_dump_reg()
537 u32 sm1ar = stm32_dma_read(dmadev, STM32_DMA_SM1AR(chan->id)); in stm32_dma_dump_reg()
538 u32 sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id)); in stm32_dma_dump_reg()
540 dev_dbg(chan2dev(chan), "SCR: 0x%08x\n", scr); in stm32_dma_dump_reg()
541 dev_dbg(chan2dev(chan), "NDTR: 0x%08x\n", ndtr); in stm32_dma_dump_reg()
542 dev_dbg(chan2dev(chan), "SPAR: 0x%08x\n", spar); in stm32_dma_dump_reg()
543 dev_dbg(chan2dev(chan), "SM0AR: 0x%08x\n", sm0ar); in stm32_dma_dump_reg()
544 dev_dbg(chan2dev(chan), "SM1AR: 0x%08x\n", sm1ar); in stm32_dma_dump_reg()
545 dev_dbg(chan2dev(chan), "SFCR: 0x%08x\n", sfcr); in stm32_dma_dump_reg()
548 static void stm32_dma_sg_inc(struct stm32_dma_chan *chan) in stm32_dma_sg_inc() argument
550 chan->next_sg++; in stm32_dma_sg_inc()
551 if (chan->desc->cyclic && (chan->next_sg == chan->desc->num_sgs)) in stm32_dma_sg_inc()
552 chan->next_sg = 0; in stm32_dma_sg_inc()
555 static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan);
557 static void stm32_dma_start_transfer(struct stm32_dma_chan *chan) in stm32_dma_start_transfer() argument
559 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_start_transfer()
566 ret = stm32_dma_disable_chan(chan); in stm32_dma_start_transfer()
570 if (!chan->desc) { in stm32_dma_start_transfer()
571 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer()
577 chan->desc = to_stm32_dma_desc(vdesc); in stm32_dma_start_transfer()
578 chan->next_sg = 0; in stm32_dma_start_transfer()
581 if (chan->next_sg == chan->desc->num_sgs) in stm32_dma_start_transfer()
582 chan->next_sg = 0; in stm32_dma_start_transfer()
584 sg_req = &chan->desc->sg_req[chan->next_sg]; in stm32_dma_start_transfer()
588 if (chan->trig_mdma && chan->dma_sconfig.direction != DMA_MEM_TO_DEV) in stm32_dma_start_transfer()
592 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr); in stm32_dma_start_transfer()
593 stm32_dma_write(dmadev, STM32_DMA_SPAR(chan->id), reg->dma_spar); in stm32_dma_start_transfer()
594 stm32_dma_write(dmadev, STM32_DMA_SM0AR(chan->id), reg->dma_sm0ar); in stm32_dma_start_transfer()
595 stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), reg->dma_sfcr); in stm32_dma_start_transfer()
596 stm32_dma_write(dmadev, STM32_DMA_SM1AR(chan->id), reg->dma_sm1ar); in stm32_dma_start_transfer()
597 stm32_dma_write(dmadev, STM32_DMA_SNDTR(chan->id), reg->dma_sndtr); in stm32_dma_start_transfer()
599 stm32_dma_sg_inc(chan); in stm32_dma_start_transfer()
602 status = stm32_dma_irq_status(chan); in stm32_dma_start_transfer()
604 stm32_dma_irq_clear(chan, status); in stm32_dma_start_transfer()
606 if (chan->desc->cyclic) in stm32_dma_start_transfer()
607 stm32_dma_configure_next_sg(chan); in stm32_dma_start_transfer()
609 stm32_dma_dump_reg(chan); in stm32_dma_start_transfer()
612 chan->busy = true; in stm32_dma_start_transfer()
613 chan->status = DMA_IN_PROGRESS; in stm32_dma_start_transfer()
615 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr); in stm32_dma_start_transfer()
617 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); in stm32_dma_start_transfer()
620 static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan) in stm32_dma_configure_next_sg() argument
622 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_configure_next_sg()
626 id = chan->id; in stm32_dma_configure_next_sg()
629 sg_req = &chan->desc->sg_req[chan->next_sg]; in stm32_dma_configure_next_sg()
634 dev_dbg(chan2dev(chan), "CT=1 <=> SM0AR: 0x%08x\n", in stm32_dma_configure_next_sg()
639 dev_dbg(chan2dev(chan), "CT=0 <=> SM1AR: 0x%08x\n", in stm32_dma_configure_next_sg()
644 static void stm32_dma_handle_chan_paused(struct stm32_dma_chan *chan) in stm32_dma_handle_chan_paused() argument
646 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_handle_chan_paused()
653 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); in stm32_dma_handle_chan_paused()
659 if (chan->desc && chan->desc->cyclic) { in stm32_dma_handle_chan_paused()
660 if (chan->desc->num_sgs == 1) in stm32_dma_handle_chan_paused()
665 chan->chan_reg.dma_scr = dma_scr; in stm32_dma_handle_chan_paused()
671 if (chan->desc && chan->desc->cyclic) { in stm32_dma_handle_chan_paused()
673 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr); in stm32_dma_handle_chan_paused()
676 chan->chan_reg.dma_sndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id)); in stm32_dma_handle_chan_paused()
678 chan->status = DMA_PAUSED; in stm32_dma_handle_chan_paused()
680 dev_dbg(chan2dev(chan), "vchan %pK: paused\n", &chan->vchan); in stm32_dma_handle_chan_paused()
683 static void stm32_dma_post_resume_reconfigure(struct stm32_dma_chan *chan) in stm32_dma_post_resume_reconfigure() argument
685 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_post_resume_reconfigure()
689 id = chan->id; in stm32_dma_post_resume_reconfigure()
693 status = stm32_dma_irq_status(chan); in stm32_dma_post_resume_reconfigure()
695 stm32_dma_irq_clear(chan, status); in stm32_dma_post_resume_reconfigure()
697 if (!chan->next_sg) in stm32_dma_post_resume_reconfigure()
698 sg_req = &chan->desc->sg_req[chan->desc->num_sgs - 1]; in stm32_dma_post_resume_reconfigure()
700 sg_req = &chan->desc->sg_req[chan->next_sg - 1]; in stm32_dma_post_resume_reconfigure()
703 stm32_dma_write(dmadev, STM32_DMA_SNDTR(chan->id), sg_req->chan_reg.dma_sndtr); in stm32_dma_post_resume_reconfigure()
713 if (chan->chan_reg.dma_scr & STM32_DMA_SCR_DBM) { in stm32_dma_post_resume_reconfigure()
716 if (chan->chan_reg.dma_scr & STM32_DMA_SCR_CT) in stm32_dma_post_resume_reconfigure()
720 } else if (chan->chan_reg.dma_scr & STM32_DMA_SCR_CIRC) { in stm32_dma_post_resume_reconfigure()
723 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr); in stm32_dma_post_resume_reconfigure()
725 stm32_dma_configure_next_sg(chan); in stm32_dma_post_resume_reconfigure()
727 stm32_dma_dump_reg(chan); in stm32_dma_post_resume_reconfigure()
730 stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr); in stm32_dma_post_resume_reconfigure()
732 dev_dbg(chan2dev(chan), "vchan %pK: reconfigured after pause/resume\n", &chan->vchan); in stm32_dma_post_resume_reconfigure()
735 static void stm32_dma_handle_chan_done(struct stm32_dma_chan *chan, u32 scr) in stm32_dma_handle_chan_done() argument
737 if (!chan->desc) in stm32_dma_handle_chan_done()
740 if (chan->desc->cyclic) { in stm32_dma_handle_chan_done()
741 vchan_cyclic_callback(&chan->desc->vdesc); in stm32_dma_handle_chan_done()
742 if (chan->trig_mdma) in stm32_dma_handle_chan_done()
744 stm32_dma_sg_inc(chan); in stm32_dma_handle_chan_done()
747 stm32_dma_post_resume_reconfigure(chan); in stm32_dma_handle_chan_done()
749 stm32_dma_configure_next_sg(chan); in stm32_dma_handle_chan_done()
751 chan->busy = false; in stm32_dma_handle_chan_done()
752 chan->status = DMA_COMPLETE; in stm32_dma_handle_chan_done()
753 if (chan->next_sg == chan->desc->num_sgs) { in stm32_dma_handle_chan_done()
754 vchan_cookie_complete(&chan->desc->vdesc); in stm32_dma_handle_chan_done()
755 chan->desc = NULL; in stm32_dma_handle_chan_done()
757 stm32_dma_start_transfer(chan); in stm32_dma_handle_chan_done()
763 struct stm32_dma_chan *chan = devid; in stm32_dma_chan_irq() local
764 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_chan_irq()
767 spin_lock(&chan->vchan.lock); in stm32_dma_chan_irq()
769 status = stm32_dma_irq_status(chan); in stm32_dma_chan_irq()
770 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); in stm32_dma_chan_irq()
771 sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id)); in stm32_dma_chan_irq()
774 stm32_dma_irq_clear(chan, STM32_DMA_FEI); in stm32_dma_chan_irq()
779 dev_err(chan2dev(chan), "FIFO Error\n"); in stm32_dma_chan_irq()
781 dev_dbg(chan2dev(chan), "FIFO over/underrun\n"); in stm32_dma_chan_irq()
785 stm32_dma_irq_clear(chan, STM32_DMA_DMEI); in stm32_dma_chan_irq()
788 dev_dbg(chan2dev(chan), "Direct mode overrun\n"); in stm32_dma_chan_irq()
792 stm32_dma_irq_clear(chan, STM32_DMA_TCI); in stm32_dma_chan_irq()
794 if (chan->status != DMA_PAUSED) in stm32_dma_chan_irq()
795 stm32_dma_handle_chan_done(chan, scr); in stm32_dma_chan_irq()
801 stm32_dma_irq_clear(chan, STM32_DMA_HTI); in stm32_dma_chan_irq()
806 stm32_dma_irq_clear(chan, status); in stm32_dma_chan_irq()
807 dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status); in stm32_dma_chan_irq()
809 dev_err(chan2dev(chan), "chan disabled by HW\n"); in stm32_dma_chan_irq()
812 spin_unlock(&chan->vchan.lock); in stm32_dma_chan_irq()
819 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_issue_pending() local
822 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_issue_pending()
823 if (vchan_issue_pending(&chan->vchan) && !chan->desc && !chan->busy) { in stm32_dma_issue_pending()
824 dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan); in stm32_dma_issue_pending()
825 stm32_dma_start_transfer(chan); in stm32_dma_issue_pending()
828 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_issue_pending()
833 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_pause() local
837 if (chan->status != DMA_IN_PROGRESS) in stm32_dma_pause()
840 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_pause()
842 ret = stm32_dma_disable_chan(chan); in stm32_dma_pause()
844 stm32_dma_handle_chan_paused(chan); in stm32_dma_pause()
846 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_pause()
853 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_resume() local
854 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_resume()
855 struct stm32_dma_chan_reg chan_reg = chan->chan_reg; in stm32_dma_resume()
856 u32 id = chan->id, scr, ndtr, offset, spar, sm0ar, sm1ar; in stm32_dma_resume()
860 if (chan->status != DMA_PAUSED) in stm32_dma_resume()
867 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_resume()
870 if (!chan->next_sg) in stm32_dma_resume()
871 sg_req = &chan->desc->sg_req[chan->desc->num_sgs - 1]; in stm32_dma_resume()
873 sg_req = &chan->desc->sg_req[chan->next_sg - 1]; in stm32_dma_resume()
915 stm32_dma_configure_next_sg(chan); in stm32_dma_resume()
917 stm32_dma_dump_reg(chan); in stm32_dma_resume()
920 chan->status = DMA_IN_PROGRESS; in stm32_dma_resume()
924 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_resume()
926 dev_dbg(chan2dev(chan), "vchan %pK: resumed\n", &chan->vchan); in stm32_dma_resume()
931 static int stm32_dma_set_xfer_param(struct stm32_dma_chan *chan, in stm32_dma_set_xfer_param() argument
942 src_addr_width = chan->dma_sconfig.src_addr_width; in stm32_dma_set_xfer_param()
943 dst_addr_width = chan->dma_sconfig.dst_addr_width; in stm32_dma_set_xfer_param()
944 src_maxburst = chan->dma_sconfig.src_maxburst; in stm32_dma_set_xfer_param()
945 dst_maxburst = chan->dma_sconfig.dst_maxburst; in stm32_dma_set_xfer_param()
946 fifoth = chan->threshold; in stm32_dma_set_xfer_param()
951 dst_bus_width = stm32_dma_get_width(chan, dst_addr_width); in stm32_dma_set_xfer_param()
961 dst_burst_size = stm32_dma_get_burst(chan, dst_best_burst); in stm32_dma_set_xfer_param()
968 chan->mem_width = src_addr_width; in stm32_dma_set_xfer_param()
969 src_bus_width = stm32_dma_get_width(chan, src_addr_width); in stm32_dma_set_xfer_param()
985 src_burst_size = stm32_dma_get_burst(chan, src_best_burst); in stm32_dma_set_xfer_param()
996 chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_FTH_MASK; in stm32_dma_set_xfer_param()
998 chan->chan_reg.dma_sfcr |= FIELD_PREP(STM32_DMA_SFCR_FTH_MASK, fifoth); in stm32_dma_set_xfer_param()
1001 chan->chan_reg.dma_spar = chan->dma_sconfig.dst_addr; in stm32_dma_set_xfer_param()
1007 src_bus_width = stm32_dma_get_width(chan, src_addr_width); in stm32_dma_set_xfer_param()
1016 chan->mem_burst = src_best_burst; in stm32_dma_set_xfer_param()
1017 src_burst_size = stm32_dma_get_burst(chan, src_best_burst); in stm32_dma_set_xfer_param()
1024 chan->mem_width = dst_addr_width; in stm32_dma_set_xfer_param()
1025 dst_bus_width = stm32_dma_get_width(chan, dst_addr_width); in stm32_dma_set_xfer_param()
1041 chan->mem_burst = dst_best_burst; in stm32_dma_set_xfer_param()
1042 dst_burst_size = stm32_dma_get_burst(chan, dst_best_burst); in stm32_dma_set_xfer_param()
1053 chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_FTH_MASK; in stm32_dma_set_xfer_param()
1055 chan->chan_reg.dma_sfcr |= FIELD_PREP(STM32_DMA_SFCR_FTH_MASK, fifoth); in stm32_dma_set_xfer_param()
1058 chan->chan_reg.dma_spar = chan->dma_sconfig.src_addr; in stm32_dma_set_xfer_param()
1059 *buswidth = chan->dma_sconfig.src_addr_width; in stm32_dma_set_xfer_param()
1063 dev_err(chan2dev(chan), "Dma direction is not supported\n"); in stm32_dma_set_xfer_param()
1067 stm32_dma_set_fifo_config(chan, src_best_burst, dst_best_burst); in stm32_dma_set_xfer_param()
1070 chan->chan_reg.dma_scr &= ~(STM32_DMA_SCR_DIR_MASK | in stm32_dma_set_xfer_param()
1073 chan->chan_reg.dma_scr |= dma_scr; in stm32_dma_set_xfer_param()
1088 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_prep_slave_sg() local
1095 if (!chan->config_init) { in stm32_dma_prep_slave_sg()
1096 dev_err(chan2dev(chan), "dma channel is not configured\n"); in stm32_dma_prep_slave_sg()
1101 dev_err(chan2dev(chan), "Invalid segment length %d\n", sg_len); in stm32_dma_prep_slave_sg()
1110 if (chan->dma_sconfig.device_fc) in stm32_dma_prep_slave_sg()
1111 chan->chan_reg.dma_scr |= STM32_DMA_SCR_PFCTRL; in stm32_dma_prep_slave_sg()
1113 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL; in stm32_dma_prep_slave_sg()
1116 if (chan->trig_mdma && sg_len > 1) { in stm32_dma_prep_slave_sg()
1117 chan->chan_reg.dma_scr |= STM32_DMA_SCR_DBM; in stm32_dma_prep_slave_sg()
1118 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_CT; in stm32_dma_prep_slave_sg()
1122 ret = stm32_dma_set_xfer_param(chan, direction, &buswidth, in stm32_dma_prep_slave_sg()
1132 dev_err(chan2dev(chan), "nb items not supported\n"); in stm32_dma_prep_slave_sg()
1137 desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr; in stm32_dma_prep_slave_sg()
1138 desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr; in stm32_dma_prep_slave_sg()
1139 desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar; in stm32_dma_prep_slave_sg()
1142 if (chan->trig_mdma) in stm32_dma_prep_slave_sg()
1150 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_dma_prep_slave_sg()
1162 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_prep_dma_cyclic() local
1169 dev_err(chan2dev(chan), "Invalid buffer/period len\n"); in stm32_dma_prep_dma_cyclic()
1173 if (!chan->config_init) { in stm32_dma_prep_dma_cyclic()
1174 dev_err(chan2dev(chan), "dma channel is not configured\n"); in stm32_dma_prep_dma_cyclic()
1179 dev_err(chan2dev(chan), "buf_len not multiple of period_len\n"); in stm32_dma_prep_dma_cyclic()
1189 if (chan->busy) { in stm32_dma_prep_dma_cyclic()
1190 dev_err(chan2dev(chan), "Request not allowed when dma busy\n"); in stm32_dma_prep_dma_cyclic()
1194 ret = stm32_dma_set_xfer_param(chan, direction, &buswidth, period_len, in stm32_dma_prep_dma_cyclic()
1201 dev_err(chan2dev(chan), "number of items not supported\n"); in stm32_dma_prep_dma_cyclic()
1207 chan->chan_reg.dma_scr |= STM32_DMA_SCR_CIRC; in stm32_dma_prep_dma_cyclic()
1209 chan->chan_reg.dma_scr |= STM32_DMA_SCR_DBM; in stm32_dma_prep_dma_cyclic()
1210 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_CT; in stm32_dma_prep_dma_cyclic()
1214 chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL; in stm32_dma_prep_dma_cyclic()
1226 desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr; in stm32_dma_prep_dma_cyclic()
1227 desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr; in stm32_dma_prep_dma_cyclic()
1228 desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar; in stm32_dma_prep_dma_cyclic()
1231 if (chan->trig_mdma) in stm32_dma_prep_dma_cyclic()
1234 if (!chan->trig_mdma) in stm32_dma_prep_dma_cyclic()
1241 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_dma_prep_dma_cyclic()
1248 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_prep_dma_memcpy() local
1260 threshold = chan->threshold; in stm32_dma_prep_dma_memcpy()
1270 dma_burst = stm32_dma_get_burst(chan, best_burst); in stm32_dma_prep_dma_memcpy()
1296 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_dma_prep_dma_memcpy()
1299 static u32 stm32_dma_get_remaining_bytes(struct stm32_dma_chan *chan) in stm32_dma_get_remaining_bytes() argument
1302 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_get_remaining_bytes()
1304 dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id)); in stm32_dma_get_remaining_bytes()
1306 ndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id)); in stm32_dma_get_remaining_bytes()
1323 static bool stm32_dma_is_current_sg(struct stm32_dma_chan *chan) in stm32_dma_is_current_sg() argument
1325 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_is_current_sg()
1329 id = chan->id; in stm32_dma_is_current_sg()
1336 sg_req = &chan->desc->sg_req[chan->next_sg]; in stm32_dma_is_current_sg()
1359 static size_t stm32_dma_desc_residue(struct stm32_dma_chan *chan, in stm32_dma_desc_residue() argument
1366 struct stm32_dma_sg_req *sg_req = &chan->desc->sg_req[chan->next_sg]; in stm32_dma_desc_residue()
1394 residue = stm32_dma_get_remaining_bytes(chan); in stm32_dma_desc_residue()
1396 if ((chan->desc->cyclic || chan->trig_mdma) && !stm32_dma_is_current_sg(chan)) { in stm32_dma_desc_residue()
1398 if (n_sg == chan->desc->num_sgs) in stm32_dma_desc_residue()
1400 if (!chan->trig_mdma) in stm32_dma_desc_residue()
1411 if ((!chan->desc->cyclic && !chan->trig_mdma) || n_sg != 0) in stm32_dma_desc_residue()
1415 if (!chan->mem_burst) in stm32_dma_desc_residue()
1418 burst_size = chan->mem_burst * chan->mem_width; in stm32_dma_desc_residue()
1430 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_tx_status() local
1440 status = chan->status; in stm32_dma_tx_status()
1445 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_tx_status()
1446 vdesc = vchan_find_desc(&chan->vchan, cookie); in stm32_dma_tx_status()
1447 if (chan->desc && cookie == chan->desc->vdesc.tx.cookie) in stm32_dma_tx_status()
1448 residue = stm32_dma_desc_residue(chan, chan->desc, in stm32_dma_tx_status()
1449 chan->next_sg); in stm32_dma_tx_status()
1451 residue = stm32_dma_desc_residue(chan, in stm32_dma_tx_status()
1455 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_tx_status()
1462 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_alloc_chan_resources() local
1463 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_alloc_chan_resources()
1466 chan->config_init = false; in stm32_dma_alloc_chan_resources()
1472 ret = stm32_dma_disable_chan(chan); in stm32_dma_alloc_chan_resources()
1481 struct stm32_dma_chan *chan = to_stm32_dma_chan(c); in stm32_dma_free_chan_resources() local
1482 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_free_chan_resources()
1485 dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id); in stm32_dma_free_chan_resources()
1487 if (chan->busy) { in stm32_dma_free_chan_resources()
1488 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_free_chan_resources()
1489 stm32_dma_stop(chan); in stm32_dma_free_chan_resources()
1490 chan->desc = NULL; in stm32_dma_free_chan_resources()
1491 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_free_chan_resources()
1497 stm32_dma_clear_reg(&chan->chan_reg); in stm32_dma_free_chan_resources()
1498 chan->threshold = 0; in stm32_dma_free_chan_resources()
1506 static void stm32_dma_set_config(struct stm32_dma_chan *chan, in stm32_dma_set_config() argument
1509 stm32_dma_clear_reg(&chan->chan_reg); in stm32_dma_set_config()
1511 chan->chan_reg.dma_scr = cfg->stream_config & STM32_DMA_SCR_CFG_MASK; in stm32_dma_set_config()
1512 chan->chan_reg.dma_scr |= FIELD_PREP(STM32_DMA_SCR_REQ_MASK, cfg->request_line); in stm32_dma_set_config()
1515 chan->chan_reg.dma_scr |= STM32_DMA_SCR_TEIE | STM32_DMA_SCR_TCIE; in stm32_dma_set_config()
1517 chan->threshold = FIELD_GET(STM32_DMA_THRESHOLD_FTR_MASK, cfg->features); in stm32_dma_set_config()
1519 chan->threshold = STM32_DMA_FIFO_THRESHOLD_NONE; in stm32_dma_set_config()
1521 chan->chan_reg.dma_scr |= STM32_DMA_SCR_TRBUFF; in stm32_dma_set_config()
1522 chan->mdma_config.stream_id = FIELD_GET(STM32_DMA_MDMA_STREAM_ID_MASK, cfg->features); in stm32_dma_set_config()
1531 struct stm32_dma_chan *chan; in stm32_dma_of_xlate() local
1550 chan = &dmadev->chan[cfg.channel_id]; in stm32_dma_of_xlate()
1552 c = dma_get_slave_channel(&chan->vchan.chan); in stm32_dma_of_xlate()
1558 stm32_dma_set_config(chan, &cfg); in stm32_dma_of_xlate()
1571 struct stm32_dma_chan *chan; in stm32_dma_probe() local
1657 chan = &dmadev->chan[i]; in stm32_dma_probe()
1658 chan->id = i; in stm32_dma_probe()
1659 chan->vchan.desc_free = stm32_dma_desc_free; in stm32_dma_probe()
1660 vchan_init(&chan->vchan, dd); in stm32_dma_probe()
1662 chan->mdma_config.ifcr = res->start; in stm32_dma_probe()
1663 chan->mdma_config.ifcr += STM32_DMA_IFCR(chan->id); in stm32_dma_probe()
1665 chan->mdma_config.tcf = STM32_DMA_TCI; in stm32_dma_probe()
1666 chan->mdma_config.tcf <<= STM32_DMA_FLAGS_SHIFT(chan->id); in stm32_dma_probe()
1674 chan = &dmadev->chan[i]; in stm32_dma_probe()
1678 chan->irq = ret; in stm32_dma_probe()
1680 ret = devm_request_irq(&pdev->dev, chan->irq, in stm32_dma_probe()
1682 dev_name(chan2dev(chan)), chan); in stm32_dma_probe()