• Home
  • Raw
  • Download

Lines Matching refs:schan

159 	struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(c);  in dma_chan_to_sirfsoc_dma()  local
160 return container_of(schan, struct sirfsoc_dma, channels[c->chan_id]); in dma_chan_to_sirfsoc_dma()
237 static void sirfsoc_dma_execute(struct sirfsoc_dma_chan *schan) in sirfsoc_dma_execute() argument
239 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_execute()
240 int cid = schan->chan.chan_id; in sirfsoc_dma_execute()
249 sdesc = list_first_entry(&schan->queued, struct sirfsoc_dma_desc, in sirfsoc_dma_execute()
252 list_move_tail(&sdesc->node, &schan->active); in sirfsoc_dma_execute()
258 sdma->exec_desc(sdesc, cid, schan->mode, base); in sirfsoc_dma_execute()
261 schan->happened_cyclic = schan->completed_cyclic = 0; in sirfsoc_dma_execute()
268 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_irq() local
283 schan = &sdma->channels[ch]; in sirfsoc_dma_irq()
284 spin_lock(&schan->lock); in sirfsoc_dma_irq()
285 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
289 list_splice_tail_init(&schan->active, in sirfsoc_dma_irq()
290 &schan->completed); in sirfsoc_dma_irq()
292 if (!list_empty(&schan->queued)) in sirfsoc_dma_irq()
293 sirfsoc_dma_execute(schan); in sirfsoc_dma_irq()
295 schan->happened_cyclic++; in sirfsoc_dma_irq()
296 spin_unlock(&schan->lock); in sirfsoc_dma_irq()
305 schan = &sdma->channels[0]; in sirfsoc_dma_irq()
306 spin_lock(&schan->lock); in sirfsoc_dma_irq()
307 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
315 list_splice_tail_init(&schan->active, in sirfsoc_dma_irq()
316 &schan->completed); in sirfsoc_dma_irq()
318 if (!list_empty(&schan->queued)) in sirfsoc_dma_irq()
319 sirfsoc_dma_execute(schan); in sirfsoc_dma_irq()
323 schan->happened_cyclic++; in sirfsoc_dma_irq()
325 spin_unlock(&schan->lock); in sirfsoc_dma_irq()
342 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_process_completed() local
351 schan = &sdma->channels[i]; in sirfsoc_dma_process_completed()
354 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_process_completed()
355 if (!list_empty(&schan->completed)) { in sirfsoc_dma_process_completed()
356 list_splice_tail_init(&schan->completed, &list); in sirfsoc_dma_process_completed()
357 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
371 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_process_completed()
372 list_splice_tail_init(&list, &schan->free); in sirfsoc_dma_process_completed()
373 schan->chan.completed_cookie = last_cookie; in sirfsoc_dma_process_completed()
374 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
376 if (list_empty(&schan->active)) { in sirfsoc_dma_process_completed()
377 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
382 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_process_completed()
386 happened_cyclic = schan->happened_cyclic; in sirfsoc_dma_process_completed()
387 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
390 while (happened_cyclic != schan->completed_cyclic) { in sirfsoc_dma_process_completed()
393 schan->completed_cyclic++; in sirfsoc_dma_process_completed()
410 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(txd->chan); in sirfsoc_dma_tx_submit() local
417 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_tx_submit()
420 list_move_tail(&sdesc->node, &schan->queued); in sirfsoc_dma_tx_submit()
424 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_tx_submit()
432 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_slave_config() local
439 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_slave_config()
440 schan->mode = (config->src_maxburst == 4 ? 1 : 0); in sirfsoc_dma_slave_config()
441 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_slave_config()
448 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_terminate_all() local
449 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_terminate_all()
450 int cid = schan->chan.chan_id; in sirfsoc_dma_terminate_all()
453 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_terminate_all()
484 list_splice_tail_init(&schan->active, &schan->free); in sirfsoc_dma_terminate_all()
485 list_splice_tail_init(&schan->queued, &schan->free); in sirfsoc_dma_terminate_all()
487 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_terminate_all()
494 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_pause_chan() local
495 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_pause_chan()
496 int cid = schan->chan.chan_id; in sirfsoc_dma_pause_chan()
499 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_pause_chan()
521 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_pause_chan()
528 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_resume_chan() local
529 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_resume_chan()
530 int cid = schan->chan.chan_id; in sirfsoc_dma_resume_chan()
533 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_resume_chan()
554 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_resume_chan()
563 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_alloc_chan_resources() local
591 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_alloc_chan_resources()
593 list_splice_tail_init(&descs, &schan->free); in sirfsoc_dma_alloc_chan_resources()
594 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_alloc_chan_resources()
602 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_free_chan_resources() local
608 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_free_chan_resources()
611 BUG_ON(!list_empty(&schan->prepared)); in sirfsoc_dma_free_chan_resources()
612 BUG_ON(!list_empty(&schan->queued)); in sirfsoc_dma_free_chan_resources()
613 BUG_ON(!list_empty(&schan->active)); in sirfsoc_dma_free_chan_resources()
614 BUG_ON(!list_empty(&schan->completed)); in sirfsoc_dma_free_chan_resources()
617 list_splice_tail_init(&schan->free, &descs); in sirfsoc_dma_free_chan_resources()
619 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_free_chan_resources()
631 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_issue_pending() local
634 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_issue_pending()
636 if (list_empty(&schan->active) && !list_empty(&schan->queued)) in sirfsoc_dma_issue_pending()
637 sirfsoc_dma_execute(schan); in sirfsoc_dma_issue_pending()
639 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_issue_pending()
648 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_tx_status() local
652 int cid = schan->chan.chan_id; in sirfsoc_dma_tx_status()
657 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_tx_status()
659 if (list_empty(&schan->active)) { in sirfsoc_dma_tx_status()
662 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_tx_status()
665 sdesc = list_first_entry(&schan->active, struct sirfsoc_dma_desc, node); in sirfsoc_dma_tx_status()
687 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_tx_status()
697 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_prep_interleaved() local
708 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
709 if (!list_empty(&schan->free)) { in sirfsoc_dma_prep_interleaved()
710 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_interleaved()
714 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
724 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
744 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_interleaved()
750 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
754 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
765 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_prep_cyclic() local
784 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
785 if (!list_empty(&schan->free)) { in sirfsoc_dma_prep_cyclic()
786 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_cyclic()
790 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
796 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
802 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_cyclic()
803 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
849 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_probe() local
931 schan = &sdma->channels[i]; in sirfsoc_dma_probe()
933 schan->chan.device = dma; in sirfsoc_dma_probe()
934 dma_cookie_init(&schan->chan); in sirfsoc_dma_probe()
936 INIT_LIST_HEAD(&schan->free); in sirfsoc_dma_probe()
937 INIT_LIST_HEAD(&schan->prepared); in sirfsoc_dma_probe()
938 INIT_LIST_HEAD(&schan->queued); in sirfsoc_dma_probe()
939 INIT_LIST_HEAD(&schan->active); in sirfsoc_dma_probe()
940 INIT_LIST_HEAD(&schan->completed); in sirfsoc_dma_probe()
942 spin_lock_init(&schan->lock); in sirfsoc_dma_probe()
943 list_add_tail(&schan->chan.device_node, &dma->channels); in sirfsoc_dma_probe()
1019 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_pm_suspend() local
1048 schan = &sdma->channels[ch]; in sirfsoc_dma_pm_suspend()
1049 if (list_empty(&schan->active)) in sirfsoc_dma_pm_suspend()
1051 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_pm_suspend()
1070 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_pm_resume() local
1094 schan = &sdma->channels[ch]; in sirfsoc_dma_pm_resume()
1095 if (list_empty(&schan->active)) in sirfsoc_dma_pm_resume()
1097 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_pm_resume()