• Home
  • Raw
  • Download

Lines Matching refs:pd

140 #define dma_readl(pd, name) \  argument
141 readl((pd)->membase + PCH_DMA_##name)
142 #define dma_writel(pd, name, val) \ argument
143 writel((val), (pd)->membase + PCH_DMA_##name)
187 struct pch_dma *pd = to_pd(chan->device); in pdc_enable_irq() local
196 val = dma_readl(pd, CTL2); in pdc_enable_irq()
203 dma_writel(pd, CTL2, val); in pdc_enable_irq()
212 struct pch_dma *pd = to_pd(chan->device); in pdc_set_dir() local
218 val = dma_readl(pd, CTL0); in pdc_set_dir()
233 dma_writel(pd, CTL0, val); in pdc_set_dir()
236 val = dma_readl(pd, CTL3); in pdc_set_dir()
250 dma_writel(pd, CTL3, val); in pdc_set_dir()
259 struct pch_dma *pd = to_pd(chan->device); in pdc_set_mode() local
269 val = dma_readl(pd, CTL0); in pdc_set_mode()
273 dma_writel(pd, CTL0, val); in pdc_set_mode()
280 val = dma_readl(pd, CTL3); in pdc_set_mode()
284 dma_writel(pd, CTL3, val); in pdc_set_mode()
293 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status0() local
296 val = dma_readl(pd, STS0); in pdc_get_status0()
303 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status2() local
306 val = dma_readl(pd, STS2); in pdc_get_status2()
439 struct pch_dma *pd = to_pd(chan->device); in pdc_alloc_desc() local
442 desc = pci_pool_alloc(pd->pool, flags, &addr); in pdc_alloc_desc()
541 struct pch_dma *pd = to_pd(chan->device); in pd_free_chan_resources() local
555 pci_pool_free(pd->pool, desc, desc->txd.phys); in pd_free_chan_resources()
705 struct pch_dma *pd = (struct pch_dma *)devid; in pd_irq() local
713 sts0 = dma_readl(pd, STS0); in pd_irq()
714 sts2 = dma_readl(pd, STS2); in pd_irq()
716 dev_dbg(pd->dma.dev, "pd_irq sts0: %x\n", sts0); in pd_irq()
718 for (i = 0; i < pd->dma.chancnt; i++) { in pd_irq()
719 pd_chan = &pd->channels[i]; in pd_irq()
742 dma_writel(pd, STS0, sts0); in pd_irq()
744 dma_writel(pd, STS2, sts2); in pd_irq()
750 static void pch_dma_save_regs(struct pch_dma *pd) in pch_dma_save_regs() argument
756 pd->regs.dma_ctl0 = dma_readl(pd, CTL0); in pch_dma_save_regs()
757 pd->regs.dma_ctl1 = dma_readl(pd, CTL1); in pch_dma_save_regs()
758 pd->regs.dma_ctl2 = dma_readl(pd, CTL2); in pch_dma_save_regs()
759 pd->regs.dma_ctl3 = dma_readl(pd, CTL3); in pch_dma_save_regs()
761 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_save_regs()
764 pd->ch_regs[i].dev_addr = channel_readl(pd_chan, DEV_ADDR); in pch_dma_save_regs()
765 pd->ch_regs[i].mem_addr = channel_readl(pd_chan, MEM_ADDR); in pch_dma_save_regs()
766 pd->ch_regs[i].size = channel_readl(pd_chan, SIZE); in pch_dma_save_regs()
767 pd->ch_regs[i].next = channel_readl(pd_chan, NEXT); in pch_dma_save_regs()
773 static void pch_dma_restore_regs(struct pch_dma *pd) in pch_dma_restore_regs() argument
779 dma_writel(pd, CTL0, pd->regs.dma_ctl0); in pch_dma_restore_regs()
780 dma_writel(pd, CTL1, pd->regs.dma_ctl1); in pch_dma_restore_regs()
781 dma_writel(pd, CTL2, pd->regs.dma_ctl2); in pch_dma_restore_regs()
782 dma_writel(pd, CTL3, pd->regs.dma_ctl3); in pch_dma_restore_regs()
784 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_restore_regs()
787 channel_writel(pd_chan, DEV_ADDR, pd->ch_regs[i].dev_addr); in pch_dma_restore_regs()
788 channel_writel(pd_chan, MEM_ADDR, pd->ch_regs[i].mem_addr); in pch_dma_restore_regs()
789 channel_writel(pd_chan, SIZE, pd->ch_regs[i].size); in pch_dma_restore_regs()
790 channel_writel(pd_chan, NEXT, pd->ch_regs[i].next); in pch_dma_restore_regs()
798 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_suspend() local
800 if (pd) in pch_dma_suspend()
801 pch_dma_save_regs(pd); in pch_dma_suspend()
812 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_resume() local
824 if (pd) in pch_dma_resume()
825 pch_dma_restore_regs(pd); in pch_dma_resume()
834 struct pch_dma *pd; in pch_dma_probe() local
841 pd = kzalloc(sizeof(*pd), GFP_KERNEL); in pch_dma_probe()
842 if (!pd) in pch_dma_probe()
845 pci_set_drvdata(pdev, pd); in pch_dma_probe()
871 regs = pd->membase = pci_iomap(pdev, 1, 0); in pch_dma_probe()
872 if (!pd->membase) { in pch_dma_probe()
880 err = request_irq(pdev->irq, pd_irq, IRQF_SHARED, DRV_NAME, pd); in pch_dma_probe()
886 pd->pool = pci_pool_create("pch_dma_desc_pool", pdev, in pch_dma_probe()
888 if (!pd->pool) { in pch_dma_probe()
894 pd->dma.dev = &pdev->dev; in pch_dma_probe()
896 INIT_LIST_HEAD(&pd->dma.channels); in pch_dma_probe()
899 struct pch_dma_chan *pd_chan = &pd->channels[i]; in pch_dma_probe()
901 pd_chan->chan.device = &pd->dma; in pch_dma_probe()
914 list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels); in pch_dma_probe()
917 dma_cap_zero(pd->dma.cap_mask); in pch_dma_probe()
918 dma_cap_set(DMA_PRIVATE, pd->dma.cap_mask); in pch_dma_probe()
919 dma_cap_set(DMA_SLAVE, pd->dma.cap_mask); in pch_dma_probe()
921 pd->dma.device_alloc_chan_resources = pd_alloc_chan_resources; in pch_dma_probe()
922 pd->dma.device_free_chan_resources = pd_free_chan_resources; in pch_dma_probe()
923 pd->dma.device_tx_status = pd_tx_status; in pch_dma_probe()
924 pd->dma.device_issue_pending = pd_issue_pending; in pch_dma_probe()
925 pd->dma.device_prep_slave_sg = pd_prep_slave_sg; in pch_dma_probe()
926 pd->dma.device_terminate_all = pd_device_terminate_all; in pch_dma_probe()
928 err = dma_async_device_register(&pd->dma); in pch_dma_probe()
937 pci_pool_destroy(pd->pool); in pch_dma_probe()
939 free_irq(pdev->irq, pd); in pch_dma_probe()
941 pci_iounmap(pdev, pd->membase); in pch_dma_probe()
947 kfree(pd); in pch_dma_probe()
953 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_remove() local
957 if (pd) { in pch_dma_remove()
958 dma_async_device_unregister(&pd->dma); in pch_dma_remove()
960 free_irq(pdev->irq, pd); in pch_dma_remove()
962 list_for_each_entry_safe(chan, _c, &pd->dma.channels, in pch_dma_remove()
969 pci_pool_destroy(pd->pool); in pch_dma_remove()
970 pci_iounmap(pdev, pd->membase); in pch_dma_remove()
973 kfree(pd); in pch_dma_remove()