• Home
  • Raw
  • Download

Lines Matching full:pd

140 #define dma_readl(pd, name) \  argument
141 readl((pd)->membase + PCH_DMA_##name)
142 #define dma_writel(pd, name, val) \ argument
143 writel((val), (pd)->membase + PCH_DMA_##name)
187 struct pch_dma *pd = to_pd(chan->device); in pdc_enable_irq() local
196 val = dma_readl(pd, CTL2); in pdc_enable_irq()
203 dma_writel(pd, CTL2, val); in pdc_enable_irq()
212 struct pch_dma *pd = to_pd(chan->device); in pdc_set_dir() local
218 val = dma_readl(pd, CTL0); in pdc_set_dir()
233 dma_writel(pd, CTL0, val); in pdc_set_dir()
236 val = dma_readl(pd, CTL3); in pdc_set_dir()
250 dma_writel(pd, CTL3, val); in pdc_set_dir()
259 struct pch_dma *pd = to_pd(chan->device); in pdc_set_mode() local
269 val = dma_readl(pd, CTL0); in pdc_set_mode()
273 dma_writel(pd, CTL0, val); in pdc_set_mode()
280 val = dma_readl(pd, CTL3); in pdc_set_mode()
284 dma_writel(pd, CTL3, val); in pdc_set_mode()
293 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status0() local
296 val = dma_readl(pd, STS0); in pdc_get_status0()
303 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status2() local
306 val = dma_readl(pd, STS2); in pdc_get_status2()
437 struct pch_dma *pd = to_pd(chan->device); in pdc_alloc_desc() local
440 desc = dma_pool_zalloc(pd->pool, flags, &addr); in pdc_alloc_desc()
538 struct pch_dma *pd = to_pd(chan->device); in pd_free_chan_resources() local
552 dma_pool_free(pd->pool, desc, desc->txd.phys); in pd_free_chan_resources()
702 struct pch_dma *pd = (struct pch_dma *)devid; in pd_irq() local
710 sts0 = dma_readl(pd, STS0); in pd_irq()
711 sts2 = dma_readl(pd, STS2); in pd_irq()
713 dev_dbg(pd->dma.dev, "pd_irq sts0: %x\n", sts0); in pd_irq()
715 for (i = 0; i < pd->dma.chancnt; i++) { in pd_irq()
716 pd_chan = &pd->channels[i]; in pd_irq()
739 dma_writel(pd, STS0, sts0); in pd_irq()
741 dma_writel(pd, STS2, sts2); in pd_irq()
747 static void pch_dma_save_regs(struct pch_dma *pd) in pch_dma_save_regs() argument
753 pd->regs.dma_ctl0 = dma_readl(pd, CTL0); in pch_dma_save_regs()
754 pd->regs.dma_ctl1 = dma_readl(pd, CTL1); in pch_dma_save_regs()
755 pd->regs.dma_ctl2 = dma_readl(pd, CTL2); in pch_dma_save_regs()
756 pd->regs.dma_ctl3 = dma_readl(pd, CTL3); in pch_dma_save_regs()
758 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_save_regs()
761 pd->ch_regs[i].dev_addr = channel_readl(pd_chan, DEV_ADDR); in pch_dma_save_regs()
762 pd->ch_regs[i].mem_addr = channel_readl(pd_chan, MEM_ADDR); in pch_dma_save_regs()
763 pd->ch_regs[i].size = channel_readl(pd_chan, SIZE); in pch_dma_save_regs()
764 pd->ch_regs[i].next = channel_readl(pd_chan, NEXT); in pch_dma_save_regs()
770 static void pch_dma_restore_regs(struct pch_dma *pd) in pch_dma_restore_regs() argument
776 dma_writel(pd, CTL0, pd->regs.dma_ctl0); in pch_dma_restore_regs()
777 dma_writel(pd, CTL1, pd->regs.dma_ctl1); in pch_dma_restore_regs()
778 dma_writel(pd, CTL2, pd->regs.dma_ctl2); in pch_dma_restore_regs()
779 dma_writel(pd, CTL3, pd->regs.dma_ctl3); in pch_dma_restore_regs()
781 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_restore_regs()
784 channel_writel(pd_chan, DEV_ADDR, pd->ch_regs[i].dev_addr); in pch_dma_restore_regs()
785 channel_writel(pd_chan, MEM_ADDR, pd->ch_regs[i].mem_addr); in pch_dma_restore_regs()
786 channel_writel(pd_chan, SIZE, pd->ch_regs[i].size); in pch_dma_restore_regs()
787 channel_writel(pd_chan, NEXT, pd->ch_regs[i].next); in pch_dma_restore_regs()
795 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_suspend() local
797 if (pd) in pch_dma_suspend()
798 pch_dma_save_regs(pd); in pch_dma_suspend()
809 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_resume() local
821 if (pd) in pch_dma_resume()
822 pch_dma_restore_regs(pd); in pch_dma_resume()
831 struct pch_dma *pd; in pch_dma_probe() local
838 pd = kzalloc(sizeof(*pd), GFP_KERNEL); in pch_dma_probe()
839 if (!pd) in pch_dma_probe()
842 pci_set_drvdata(pdev, pd); in pch_dma_probe()
868 regs = pd->membase = pci_iomap(pdev, 1, 0); in pch_dma_probe()
869 if (!pd->membase) { in pch_dma_probe()
876 pd->dma.dev = &pdev->dev; in pch_dma_probe()
878 err = request_irq(pdev->irq, pd_irq, IRQF_SHARED, DRV_NAME, pd); in pch_dma_probe()
884 pd->pool = dma_pool_create("pch_dma_desc_pool", &pdev->dev, in pch_dma_probe()
886 if (!pd->pool) { in pch_dma_probe()
893 INIT_LIST_HEAD(&pd->dma.channels); in pch_dma_probe()
896 struct pch_dma_chan *pd_chan = &pd->channels[i]; in pch_dma_probe()
898 pd_chan->chan.device = &pd->dma; in pch_dma_probe()
911 list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels); in pch_dma_probe()
914 dma_cap_zero(pd->dma.cap_mask); in pch_dma_probe()
915 dma_cap_set(DMA_PRIVATE, pd->dma.cap_mask); in pch_dma_probe()
916 dma_cap_set(DMA_SLAVE, pd->dma.cap_mask); in pch_dma_probe()
918 pd->dma.device_alloc_chan_resources = pd_alloc_chan_resources; in pch_dma_probe()
919 pd->dma.device_free_chan_resources = pd_free_chan_resources; in pch_dma_probe()
920 pd->dma.device_tx_status = pd_tx_status; in pch_dma_probe()
921 pd->dma.device_issue_pending = pd_issue_pending; in pch_dma_probe()
922 pd->dma.device_prep_slave_sg = pd_prep_slave_sg; in pch_dma_probe()
923 pd->dma.device_terminate_all = pd_device_terminate_all; in pch_dma_probe()
925 err = dma_async_device_register(&pd->dma); in pch_dma_probe()
934 dma_pool_destroy(pd->pool); in pch_dma_probe()
936 free_irq(pdev->irq, pd); in pch_dma_probe()
938 pci_iounmap(pdev, pd->membase); in pch_dma_probe()
944 kfree(pd); in pch_dma_probe()
950 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_remove() local
954 if (pd) { in pch_dma_remove()
955 dma_async_device_unregister(&pd->dma); in pch_dma_remove()
957 free_irq(pdev->irq, pd); in pch_dma_remove()
959 list_for_each_entry_safe(chan, _c, &pd->dma.channels, in pch_dma_remove()
966 dma_pool_destroy(pd->pool); in pch_dma_remove()
967 pci_iounmap(pdev, pd->membase); in pch_dma_remove()
970 kfree(pd); in pch_dma_remove()