• Home
  • Raw
  • Download

Lines Matching full:mdma

121 	struct mdc_dma *mdma;  member
152 static inline u32 mdc_readl(struct mdc_dma *mdma, u32 reg) in mdc_readl() argument
154 return readl(mdma->regs + reg); in mdc_readl()
157 static inline void mdc_writel(struct mdc_dma *mdma, u32 val, u32 reg) in mdc_writel() argument
159 writel(val, mdma->regs + reg); in mdc_writel()
164 return mdc_readl(mchan->mdma, mchan->chan_nr * 0x040 + reg); in mdc_chan_readl()
169 mdc_writel(mchan->mdma, val, mchan->chan_nr * 0x040 + reg); in mdc_chan_writel()
184 static inline struct device *mdma2dev(struct mdc_dma *mdma) in mdma2dev() argument
186 return mdma->dma_dev.dev; in mdma2dev()
213 struct mdc_dma *mdma = mchan->mdma; in mdc_list_desc_config() local
232 if (IS_ALIGNED(dst, mdma->bus_width) && in mdc_list_desc_config()
233 IS_ALIGNED(src, mdma->bus_width)) in mdc_list_desc_config()
234 max_burst = mdma->bus_width * mdma->max_burst_mult; in mdc_list_desc_config()
236 max_burst = mdma->bus_width * (mdma->max_burst_mult - 1); in mdc_list_desc_config()
241 mdc_set_read_width(ldesc, mdma->bus_width); in mdc_list_desc_config()
249 mdc_set_write_width(ldesc, mdma->bus_width); in mdc_list_desc_config()
255 mdc_set_read_width(ldesc, mdma->bus_width); in mdc_list_desc_config()
256 mdc_set_write_width(ldesc, mdma->bus_width); in mdc_list_desc_config()
265 struct mdc_dma *mdma = mdesc->chan->mdma; in mdc_list_desc_free() local
274 dma_pool_free(mdma->desc_pool, curr, curr_phys); in mdc_list_desc_free()
293 struct mdc_dma *mdma = mchan->mdma; in mdc_prep_dma_memcpy() local
310 curr = dma_pool_alloc(mdma->desc_pool, GFP_NOWAIT, &curr_phys); in mdc_prep_dma_memcpy()
322 xfer_size = min_t(size_t, mdma->max_xfer_size, len); in mdc_prep_dma_memcpy()
363 if (width > mchan->mdma->bus_width) in mdc_check_slave_width()
375 struct mdc_dma *mdma = mchan->mdma; in mdc_prep_dma_cyclic() local
396 mdma->max_xfer_size); in mdc_prep_dma_cyclic()
404 curr = dma_pool_alloc(mdma->desc_pool, GFP_NOWAIT, in mdc_prep_dma_cyclic()
417 xfer_size = min_t(size_t, mdma->max_xfer_size, in mdc_prep_dma_cyclic()
456 struct mdc_dma *mdma = mchan->mdma; in mdc_prep_slave_sg() local
484 curr = dma_pool_alloc(mdma->desc_pool, GFP_NOWAIT, in mdc_prep_slave_sg()
497 xfer_size = min_t(size_t, mdma->max_xfer_size, in mdc_prep_slave_sg()
529 struct mdc_dma *mdma = mchan->mdma; in mdc_issue_desc() local
543 dev_dbg(mdma2dev(mdma), "Issuing descriptor on channel %d\n", in mdc_issue_desc()
546 mdma->soc->enable_chan(mchan); in mdc_issue_desc()
743 struct device *dev = mdma2dev(mchan->mdma); in mdc_alloc_chan_resources()
751 struct mdc_dma *mdma = mchan->mdma; in mdc_free_chan_resources() local
752 struct device *dev = mdma2dev(mdma); in mdc_free_chan_resources()
755 mdma->soc->disable_chan(mchan); in mdc_free_chan_resources()
767 dev_dbg(mdma2dev(mchan->mdma), "IRQ on channel %d\n", mchan->chan_nr); in mdc_chan_irq()
776 dev_warn(mdma2dev(mchan->mdma), in mdc_chan_irq()
814 struct mdc_dma *mdma = ofdma->of_dma_data; in mdc_of_xlate() local
820 list_for_each_entry(chan, &mdma->dma_dev.channels, device_node) { in mdc_of_xlate()
841 struct mdc_dma *mdma = mchan->mdma; in pistachio_mdc_enable_chan() local
843 regmap_update_bits(mdma->periph_regs, in pistachio_mdc_enable_chan()
853 struct mdc_dma *mdma = mchan->mdma; in pistachio_mdc_disable_chan() local
855 regmap_update_bits(mdma->periph_regs, in pistachio_mdc_disable_chan()
875 struct mdc_dma *mdma = dev_get_drvdata(dev); in img_mdc_runtime_suspend() local
877 clk_disable_unprepare(mdma->clk); in img_mdc_runtime_suspend()
884 struct mdc_dma *mdma = dev_get_drvdata(dev); in img_mdc_runtime_resume() local
886 return clk_prepare_enable(mdma->clk); in img_mdc_runtime_resume()
891 struct mdc_dma *mdma; in mdc_dma_probe() local
897 mdma = devm_kzalloc(&pdev->dev, sizeof(*mdma), GFP_KERNEL); in mdc_dma_probe()
898 if (!mdma) in mdc_dma_probe()
900 platform_set_drvdata(pdev, mdma); in mdc_dma_probe()
902 mdma->soc = of_device_get_match_data(&pdev->dev); in mdc_dma_probe()
905 mdma->regs = devm_ioremap_resource(&pdev->dev, res); in mdc_dma_probe()
906 if (IS_ERR(mdma->regs)) in mdc_dma_probe()
907 return PTR_ERR(mdma->regs); in mdc_dma_probe()
909 mdma->periph_regs = syscon_regmap_lookup_by_phandle(pdev->dev.of_node, in mdc_dma_probe()
911 if (IS_ERR(mdma->periph_regs)) in mdc_dma_probe()
912 return PTR_ERR(mdma->periph_regs); in mdc_dma_probe()
914 mdma->clk = devm_clk_get(&pdev->dev, "sys"); in mdc_dma_probe()
915 if (IS_ERR(mdma->clk)) in mdc_dma_probe()
916 return PTR_ERR(mdma->clk); in mdc_dma_probe()
918 dma_cap_zero(mdma->dma_dev.cap_mask); in mdc_dma_probe()
919 dma_cap_set(DMA_SLAVE, mdma->dma_dev.cap_mask); in mdc_dma_probe()
920 dma_cap_set(DMA_PRIVATE, mdma->dma_dev.cap_mask); in mdc_dma_probe()
921 dma_cap_set(DMA_CYCLIC, mdma->dma_dev.cap_mask); in mdc_dma_probe()
922 dma_cap_set(DMA_MEMCPY, mdma->dma_dev.cap_mask); in mdc_dma_probe()
924 val = mdc_readl(mdma, MDC_GLOBAL_CONFIG_A); in mdc_dma_probe()
925 mdma->nr_channels = (val >> MDC_GLOBAL_CONFIG_A_DMA_CONTEXTS_SHIFT) & in mdc_dma_probe()
927 mdma->nr_threads = in mdc_dma_probe()
930 mdma->bus_width = in mdc_dma_probe()
942 mdma->max_xfer_size = MDC_TRANSFER_SIZE_MASK + 1 - mdma->bus_width; in mdc_dma_probe()
945 &mdma->nr_channels); in mdc_dma_probe()
948 &mdma->max_burst_mult); in mdc_dma_probe()
952 mdma->dma_dev.dev = &pdev->dev; in mdc_dma_probe()
953 mdma->dma_dev.device_prep_slave_sg = mdc_prep_slave_sg; in mdc_dma_probe()
954 mdma->dma_dev.device_prep_dma_cyclic = mdc_prep_dma_cyclic; in mdc_dma_probe()
955 mdma->dma_dev.device_prep_dma_memcpy = mdc_prep_dma_memcpy; in mdc_dma_probe()
956 mdma->dma_dev.device_alloc_chan_resources = mdc_alloc_chan_resources; in mdc_dma_probe()
957 mdma->dma_dev.device_free_chan_resources = mdc_free_chan_resources; in mdc_dma_probe()
958 mdma->dma_dev.device_tx_status = mdc_tx_status; in mdc_dma_probe()
959 mdma->dma_dev.device_issue_pending = mdc_issue_pending; in mdc_dma_probe()
960 mdma->dma_dev.device_terminate_all = mdc_terminate_all; in mdc_dma_probe()
961 mdma->dma_dev.device_synchronize = mdc_synchronize; in mdc_dma_probe()
962 mdma->dma_dev.device_config = mdc_slave_config; in mdc_dma_probe()
964 mdma->dma_dev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in mdc_dma_probe()
965 mdma->dma_dev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in mdc_dma_probe()
966 for (i = 1; i <= mdma->bus_width; i <<= 1) { in mdc_dma_probe()
967 mdma->dma_dev.src_addr_widths |= BIT(i); in mdc_dma_probe()
968 mdma->dma_dev.dst_addr_widths |= BIT(i); in mdc_dma_probe()
971 INIT_LIST_HEAD(&mdma->dma_dev.channels); in mdc_dma_probe()
972 for (i = 0; i < mdma->nr_channels; i++) { in mdc_dma_probe()
973 struct mdc_chan *mchan = &mdma->channels[i]; in mdc_dma_probe()
975 mchan->mdma = mdma; in mdc_dma_probe()
988 vchan_init(&mchan->vc, &mdma->dma_dev); in mdc_dma_probe()
991 mdma->desc_pool = dmam_pool_create(dev_name(&pdev->dev), &pdev->dev, in mdc_dma_probe()
994 if (!mdma->desc_pool) in mdc_dma_probe()
1004 ret = dma_async_device_register(&mdma->dma_dev); in mdc_dma_probe()
1008 ret = of_dma_controller_register(pdev->dev.of_node, mdc_of_xlate, mdma); in mdc_dma_probe()
1013 mdma->nr_channels, mdma->nr_threads); in mdc_dma_probe()
1018 dma_async_device_unregister(&mdma->dma_dev); in mdc_dma_probe()
1028 struct mdc_dma *mdma = platform_get_drvdata(pdev); in mdc_dma_remove() local
1032 dma_async_device_unregister(&mdma->dma_dev); in mdc_dma_remove()
1034 list_for_each_entry_safe(mchan, next, &mdma->dma_dev.channels, in mdc_dma_remove()
1053 struct mdc_dma *mdma = dev_get_drvdata(dev); in img_mdc_suspend_late() local
1057 for (i = 0; i < mdma->nr_channels; i++) { in img_mdc_suspend_late()
1058 struct mdc_chan *mchan = &mdma->channels[i]; in img_mdc_suspend_late()