• Home
  • Raw
  • Download

Lines Matching full:bchan

344 #define IS_BUSY(chan)	(CIRC_SPACE(bchan->tail, bchan->head,\
420 * @bchan: bam channel
424 static void bam_reset_channel(struct bam_chan *bchan) in bam_reset_channel() argument
426 struct bam_device *bdev = bchan->bdev; in bam_reset_channel()
428 lockdep_assert_held(&bchan->vc.lock); in bam_reset_channel()
431 writel_relaxed(1, bam_addr(bdev, bchan->id, BAM_P_RST)); in bam_reset_channel()
432 writel_relaxed(0, bam_addr(bdev, bchan->id, BAM_P_RST)); in bam_reset_channel()
438 bchan->initialized = 0; in bam_reset_channel()
443 * @bchan: bam channel
448 static void bam_chan_init_hw(struct bam_chan *bchan, in bam_chan_init_hw() argument
451 struct bam_device *bdev = bchan->bdev; in bam_chan_init_hw()
455 bam_reset_channel(bchan); in bam_chan_init_hw()
461 writel_relaxed(ALIGN(bchan->fifo_phys, sizeof(struct bam_desc_hw)), in bam_chan_init_hw()
462 bam_addr(bdev, bchan->id, BAM_P_DESC_FIFO_ADDR)); in bam_chan_init_hw()
464 bam_addr(bdev, bchan->id, BAM_P_FIFO_SIZES)); in bam_chan_init_hw()
468 bam_addr(bdev, bchan->id, BAM_P_IRQ_EN)); in bam_chan_init_hw()
472 val |= BIT(bchan->id); in bam_chan_init_hw()
483 writel_relaxed(val, bam_addr(bdev, bchan->id, BAM_P_CTRL)); in bam_chan_init_hw()
485 bchan->initialized = 1; in bam_chan_init_hw()
488 bchan->head = 0; in bam_chan_init_hw()
489 bchan->tail = 0; in bam_chan_init_hw()
500 struct bam_chan *bchan = to_bam_chan(chan); in bam_alloc_chan() local
501 struct bam_device *bdev = bchan->bdev; in bam_alloc_chan()
503 if (bchan->fifo_virt) in bam_alloc_chan()
507 bchan->fifo_virt = dma_alloc_wc(bdev->dev, BAM_DESC_FIFO_SIZE, in bam_alloc_chan()
508 &bchan->fifo_phys, GFP_KERNEL); in bam_alloc_chan()
510 if (!bchan->fifo_virt) { in bam_alloc_chan()
535 struct bam_chan *bchan = to_bam_chan(chan); in bam_free_chan() local
536 struct bam_device *bdev = bchan->bdev; in bam_free_chan()
547 if (!list_empty(&bchan->desc_list)) { in bam_free_chan()
548 dev_err(bchan->bdev->dev, "Cannot free busy channel\n"); in bam_free_chan()
552 spin_lock_irqsave(&bchan->vc.lock, flags); in bam_free_chan()
553 bam_reset_channel(bchan); in bam_free_chan()
554 spin_unlock_irqrestore(&bchan->vc.lock, flags); in bam_free_chan()
556 dma_free_wc(bdev->dev, BAM_DESC_FIFO_SIZE, bchan->fifo_virt, in bam_free_chan()
557 bchan->fifo_phys); in bam_free_chan()
558 bchan->fifo_virt = NULL; in bam_free_chan()
562 val &= ~BIT(bchan->id); in bam_free_chan()
566 writel_relaxed(0, bam_addr(bdev, bchan->id, BAM_P_IRQ_EN)); in bam_free_chan()
584 struct bam_chan *bchan = to_bam_chan(chan); in bam_slave_config() local
587 spin_lock_irqsave(&bchan->vc.lock, flag); in bam_slave_config()
588 memcpy(&bchan->slave, cfg, sizeof(*cfg)); in bam_slave_config()
589 bchan->reconfigure = 1; in bam_slave_config()
590 spin_unlock_irqrestore(&bchan->vc.lock, flag); in bam_slave_config()
610 struct bam_chan *bchan = to_bam_chan(chan); in bam_prep_slave_sg() local
611 struct bam_device *bdev = bchan->bdev; in bam_prep_slave_sg()
672 return vchan_tx_prep(&bchan->vc, &async_desc->vd, flags); in bam_prep_slave_sg()
689 struct bam_chan *bchan = to_bam_chan(chan); in bam_dma_terminate_all() local
695 spin_lock_irqsave(&bchan->vc.lock, flag); in bam_dma_terminate_all()
709 if (!list_empty(&bchan->desc_list)) { in bam_dma_terminate_all()
710 async_desc = list_first_entry(&bchan->desc_list, in bam_dma_terminate_all()
712 bam_chan_init_hw(bchan, async_desc->dir); in bam_dma_terminate_all()
716 &bchan->desc_list, desc_node) { in bam_dma_terminate_all()
717 list_add(&async_desc->vd.node, &bchan->vc.desc_issued); in bam_dma_terminate_all()
721 vchan_get_all_descriptors(&bchan->vc, &head); in bam_dma_terminate_all()
722 spin_unlock_irqrestore(&bchan->vc.lock, flag); in bam_dma_terminate_all()
724 vchan_dma_desc_free_list(&bchan->vc, &head); in bam_dma_terminate_all()
736 struct bam_chan *bchan = to_bam_chan(chan); in bam_pause() local
737 struct bam_device *bdev = bchan->bdev; in bam_pause()
745 spin_lock_irqsave(&bchan->vc.lock, flag); in bam_pause()
746 writel_relaxed(1, bam_addr(bdev, bchan->id, BAM_P_HALT)); in bam_pause()
747 bchan->paused = 1; in bam_pause()
748 spin_unlock_irqrestore(&bchan->vc.lock, flag); in bam_pause()
762 struct bam_chan *bchan = to_bam_chan(chan); in bam_resume() local
763 struct bam_device *bdev = bchan->bdev; in bam_resume()
771 spin_lock_irqsave(&bchan->vc.lock, flag); in bam_resume()
772 writel_relaxed(0, bam_addr(bdev, bchan->id, BAM_P_HALT)); in bam_resume()
773 bchan->paused = 0; in bam_resume()
774 spin_unlock_irqrestore(&bchan->vc.lock, flag); in bam_resume()
801 struct bam_chan *bchan = &bdev->channels[i]; in process_channel_irqs() local
811 spin_lock_irqsave(&bchan->vc.lock, flags); in process_channel_irqs()
818 avail = CIRC_CNT(offset, bchan->head, MAX_DESCRIPTORS + 1); in process_channel_irqs()
820 if (offset < bchan->head) in process_channel_irqs()
824 &bchan->desc_list, desc_node) { in process_channel_irqs()
830 bchan->head += async_desc->xfer_len; in process_channel_irqs()
831 bchan->head %= MAX_DESCRIPTORS; in process_channel_irqs()
846 &bchan->vc.desc_issued); in process_channel_irqs()
851 spin_unlock_irqrestore(&bchan->vc.lock, flags); in process_channel_irqs()
909 struct bam_chan *bchan = to_bam_chan(chan); in bam_tx_status() local
922 return bchan->paused ? DMA_PAUSED : ret; in bam_tx_status()
924 spin_lock_irqsave(&bchan->vc.lock, flags); in bam_tx_status()
925 vd = vchan_find_desc(&bchan->vc, cookie); in bam_tx_status()
929 list_for_each_entry(async_desc, &bchan->desc_list, desc_node) { in bam_tx_status()
939 spin_unlock_irqrestore(&bchan->vc.lock, flags); in bam_tx_status()
943 if (ret == DMA_IN_PROGRESS && bchan->paused) in bam_tx_status()
951 * @bchan: bam dma channel
954 static void bam_apply_new_config(struct bam_chan *bchan, in bam_apply_new_config() argument
957 struct bam_device *bdev = bchan->bdev; in bam_apply_new_config()
962 maxburst = bchan->slave.src_maxburst; in bam_apply_new_config()
964 maxburst = bchan->slave.dst_maxburst; in bam_apply_new_config()
970 bchan->reconfigure = 0; in bam_apply_new_config()
975 * @bchan: bam dma channel
977 static void bam_start_dma(struct bam_chan *bchan) in bam_start_dma() argument
979 struct virt_dma_desc *vd = vchan_next_desc(&bchan->vc); in bam_start_dma()
980 struct bam_device *bdev = bchan->bdev; in bam_start_dma()
983 struct bam_desc_hw *fifo = PTR_ALIGN(bchan->fifo_virt, in bam_start_dma()
989 lockdep_assert_held(&bchan->vc.lock); in bam_start_dma()
998 while (vd && !IS_BUSY(bchan)) { in bam_start_dma()
1004 if (!bchan->initialized) in bam_start_dma()
1005 bam_chan_init_hw(bchan, async_desc->dir); in bam_start_dma()
1008 if (bchan->reconfigure) in bam_start_dma()
1009 bam_apply_new_config(bchan, async_desc->dir); in bam_start_dma()
1012 avail = CIRC_SPACE(bchan->tail, bchan->head, in bam_start_dma()
1025 vd = vchan_next_desc(&bchan->vc); in bam_start_dma()
1043 if (bchan->tail + async_desc->xfer_len > MAX_DESCRIPTORS) { in bam_start_dma()
1044 u32 partial = MAX_DESCRIPTORS - bchan->tail; in bam_start_dma()
1046 memcpy(&fifo[bchan->tail], desc, in bam_start_dma()
1052 memcpy(&fifo[bchan->tail], desc, in bam_start_dma()
1057 bchan->tail += async_desc->xfer_len; in bam_start_dma()
1058 bchan->tail %= MAX_DESCRIPTORS; in bam_start_dma()
1059 list_add_tail(&async_desc->desc_node, &bchan->desc_list); in bam_start_dma()
1064 writel_relaxed(bchan->tail * sizeof(struct bam_desc_hw), in bam_start_dma()
1065 bam_addr(bdev, bchan->id, BAM_P_EVNT_REG)); in bam_start_dma()
1080 struct bam_chan *bchan; in dma_tasklet() local
1086 bchan = &bdev->channels[i]; in dma_tasklet()
1087 spin_lock_irqsave(&bchan->vc.lock, flags); in dma_tasklet()
1089 if (!list_empty(&bchan->vc.desc_issued) && !IS_BUSY(bchan)) in dma_tasklet()
1090 bam_start_dma(bchan); in dma_tasklet()
1091 spin_unlock_irqrestore(&bchan->vc.lock, flags); in dma_tasklet()
1104 struct bam_chan *bchan = to_bam_chan(chan); in bam_issue_pending() local
1107 spin_lock_irqsave(&bchan->vc.lock, flags); in bam_issue_pending()
1110 if (vchan_issue_pending(&bchan->vc) && !IS_BUSY(bchan)) in bam_issue_pending()
1111 bam_start_dma(bchan); in bam_issue_pending()
1113 spin_unlock_irqrestore(&bchan->vc.lock, flags); in bam_issue_pending()
1206 static void bam_channel_init(struct bam_device *bdev, struct bam_chan *bchan, in bam_channel_init() argument
1209 bchan->id = index; in bam_channel_init()
1210 bchan->bdev = bdev; in bam_channel_init()
1212 vchan_init(&bchan->vc, &bdev->common); in bam_channel_init()
1213 bchan->vc.desc_free = bam_dma_free_desc; in bam_channel_init()
1214 INIT_LIST_HEAD(&bchan->desc_list); in bam_channel_init()