Home
last modified time | relevance | path

Searched refs:dma_cfg (Results 1 – 25 of 32) sorted by relevance

12

/drivers/net/ethernet/stmicro/stmmac/
Ddwmac1000_dma.c74 struct stmmac_dma_cfg *dma_cfg, int atds) in dwmac1000_dma_init() argument
77 int txpbl = dma_cfg->txpbl ?: dma_cfg->pbl; in dwmac1000_dma_init()
78 int rxpbl = dma_cfg->rxpbl ?: dma_cfg->pbl; in dwmac1000_dma_init()
86 if (dma_cfg->pblx8) in dwmac1000_dma_init()
94 if (dma_cfg->fixed_burst) in dwmac1000_dma_init()
98 if (dma_cfg->mixed_burst) in dwmac1000_dma_init()
104 if (dma_cfg->aal) in dwmac1000_dma_init()
114 struct stmmac_dma_cfg *dma_cfg, in dwmac1000_dma_init_rx() argument
122 struct stmmac_dma_cfg *dma_cfg, in dwmac1000_dma_init_tx() argument
Ddwmac4_dma.c72 struct stmmac_dma_cfg *dma_cfg, in dwmac4_dma_init_rx_chan() argument
76 u32 rxpbl = dma_cfg->rxpbl ?: dma_cfg->pbl; in dwmac4_dma_init_rx_chan()
82 if (IS_ENABLED(CONFIG_ARCH_DMA_ADDR_T_64BIT) && likely(dma_cfg->eame)) in dwmac4_dma_init_rx_chan()
90 struct stmmac_dma_cfg *dma_cfg, in dwmac4_dma_init_tx_chan() argument
94 u32 txpbl = dma_cfg->txpbl ?: dma_cfg->pbl; in dwmac4_dma_init_tx_chan()
104 if (IS_ENABLED(CONFIG_ARCH_DMA_ADDR_T_64BIT) && likely(dma_cfg->eame)) in dwmac4_dma_init_tx_chan()
112 struct stmmac_dma_cfg *dma_cfg, u32 chan) in dwmac4_dma_init_channel() argument
118 if (dma_cfg->pblx8) in dwmac4_dma_init_channel()
128 struct stmmac_dma_cfg *dma_cfg, u32 chan) in dwmac410_dma_init_channel() argument
134 if (dma_cfg->pblx8) in dwmac410_dma_init_channel()
[all …]
Dstmmac_platform.c400 struct stmmac_dma_cfg *dma_cfg; in stmmac_probe_config_dt() local
541 dma_cfg = devm_kzalloc(&pdev->dev, sizeof(*dma_cfg), in stmmac_probe_config_dt()
543 if (!dma_cfg) { in stmmac_probe_config_dt()
547 plat->dma_cfg = dma_cfg; in stmmac_probe_config_dt()
549 of_property_read_u32(np, "snps,pbl", &dma_cfg->pbl); in stmmac_probe_config_dt()
550 if (!dma_cfg->pbl) in stmmac_probe_config_dt()
551 dma_cfg->pbl = DEFAULT_DMA_PBL; in stmmac_probe_config_dt()
552 of_property_read_u32(np, "snps,txpbl", &dma_cfg->txpbl); in stmmac_probe_config_dt()
553 of_property_read_u32(np, "snps,rxpbl", &dma_cfg->rxpbl); in stmmac_probe_config_dt()
554 dma_cfg->pblx8 = !of_property_read_bool(np, "snps,no-pbl-x8"); in stmmac_probe_config_dt()
[all …]
Dstmmac_pci.c61 plat->dma_cfg->pbl = 32; in stmmac_default_data()
62 plat->dma_cfg->pblx8 = true; in stmmac_default_data()
117 plat->dma_cfg->pbl = 32; in snps_gmac5_default_data()
118 plat->dma_cfg->pblx8 = true; in snps_gmac5_default_data()
172 plat->dma_cfg = devm_kzalloc(&pdev->dev, sizeof(*plat->dma_cfg), in stmmac_pci_probe()
174 if (!plat->dma_cfg) in stmmac_pci_probe()
Ddwmac100_dma.c22 struct stmmac_dma_cfg *dma_cfg, int atds) in dwmac100_dma_init() argument
25 writel(DMA_BUS_MODE_DEFAULT | (dma_cfg->pbl << DMA_BUS_MODE_PBL_SHIFT), in dwmac100_dma_init()
33 struct stmmac_dma_cfg *dma_cfg, in dwmac100_dma_init_rx() argument
41 struct stmmac_dma_cfg *dma_cfg, in dwmac100_dma_init_tx() argument
Ddwmac-loongson.c41 plat->dma_cfg->pbl = 32; in loongson_default_data()
42 plat->dma_cfg->pblx8 = true; in loongson_default_data()
78 plat->dma_cfg = devm_kzalloc(&pdev->dev, sizeof(*plat->dma_cfg), GFP_KERNEL); in loongson_dwmac_probe()
79 if (!plat->dma_cfg) { in loongson_dwmac_probe()
Ddwxgmac2_dma.c23 struct stmmac_dma_cfg *dma_cfg, int atds) in dwxgmac2_dma_init() argument
27 if (dma_cfg->aal) in dwxgmac2_dma_init()
30 if (dma_cfg->eame) in dwxgmac2_dma_init()
37 struct stmmac_dma_cfg *dma_cfg, u32 chan) in dwxgmac2_dma_init_chan() argument
41 if (dma_cfg->pblx8) in dwxgmac2_dma_init_chan()
49 struct stmmac_dma_cfg *dma_cfg, in dwxgmac2_dma_init_rx_chan() argument
52 u32 rxpbl = dma_cfg->rxpbl ?: dma_cfg->pbl; in dwxgmac2_dma_init_rx_chan()
65 struct stmmac_dma_cfg *dma_cfg, in dwxgmac2_dma_init_tx_chan() argument
68 u32 txpbl = dma_cfg->txpbl ?: dma_cfg->pbl; in dwxgmac2_dma_init_tx_chan()
Ddwmac-intel.c513 plat->dma_cfg->pbl = 32; in intel_mgbe_common_data()
514 plat->dma_cfg->pblx8 = true; in intel_mgbe_common_data()
515 plat->dma_cfg->fixed_burst = 0; in intel_mgbe_common_data()
516 plat->dma_cfg->mixed_burst = 0; in intel_mgbe_common_data()
517 plat->dma_cfg->aal = 0; in intel_mgbe_common_data()
518 plat->dma_cfg->dche = true; in intel_mgbe_common_data()
902 plat->dma_cfg->pbl = 16; in quark_default_data()
903 plat->dma_cfg->pblx8 = true; in quark_default_data()
904 plat->dma_cfg->fixed_burst = 1; in quark_default_data()
1022 plat->dma_cfg = devm_kzalloc(&pdev->dev, sizeof(*plat->dma_cfg), in intel_eth_pci_probe()
[all …]
Dhwif.h173 void (*init)(void __iomem *ioaddr, struct stmmac_dma_cfg *dma_cfg,
176 struct stmmac_dma_cfg *dma_cfg, u32 chan);
178 struct stmmac_dma_cfg *dma_cfg,
181 struct stmmac_dma_cfg *dma_cfg,
/drivers/net/ethernet/samsung/sxgbe/
Dsxgbe_platform.c31 struct sxgbe_dma_cfg *dma_cfg; in sxgbe_probe_config_dt() local
51 dma_cfg = devm_kzalloc(&pdev->dev, sizeof(*dma_cfg), GFP_KERNEL); in sxgbe_probe_config_dt()
52 if (!dma_cfg) in sxgbe_probe_config_dt()
55 plat->dma_cfg = dma_cfg; in sxgbe_probe_config_dt()
56 of_property_read_u32(np, "samsung,pbl", &dma_cfg->pbl); in sxgbe_probe_config_dt()
57 if (of_property_read_u32(np, "samsung,burst-map", &dma_cfg->burst_map) == 0) in sxgbe_probe_config_dt()
58 dma_cfg->fixed_burst = true; in sxgbe_probe_config_dt()
/drivers/usb/musb/
Dtusb6010_omap.c197 struct dma_slave_config dma_cfg; in tusb_omap_dma_program() local
270 memset(&dma_cfg, 0, sizeof(dma_cfg)); in tusb_omap_dma_program()
274 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; in tusb_omap_dma_program()
275 dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; in tusb_omap_dma_program()
278 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; in tusb_omap_dma_program()
279 dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; in tusb_omap_dma_program()
290 dma_cfg.src_addr = fifo_addr; in tusb_omap_dma_program()
291 dma_cfg.dst_addr = fifo_addr; in tusb_omap_dma_program()
292 dma_cfg.src_port_window_size = port_window; in tusb_omap_dma_program()
293 dma_cfg.src_maxburst = port_window; in tusb_omap_dma_program()
[all …]
/drivers/dma/
Dste_dma40.c472 struct stedma40_chan_cfg dma_cfg; member
858 chan->dma_cfg.dir == DMA_DEV_TO_MEM)) in d40_log_lli_to_lcxa()
1241 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_config_set_event()
1244 if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) || in d40_config_set_event()
1245 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_config_set_event()
1249 if (d40c->dma_cfg.dir != DMA_DEV_TO_MEM) in d40_config_set_event()
1343 return phy_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1345 return log_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1397 return num_elt * d40c->dma_cfg.dst_info.data_width; in d40_residue()
1833 int dev_type = d40c->dma_cfg.dev_type; in d40_allocate_channel()
[all …]
/drivers/mmc/host/
Dowl-mmc.c111 struct dma_slave_config dma_cfg; member
310 owl_host->dma_cfg.direction = DMA_MEM_TO_DEV; in owl_mmc_prepare_data()
313 owl_host->dma_cfg.direction = DMA_DEV_TO_MEM; in owl_mmc_prepare_data()
319 dmaengine_slave_config(owl_host->dma, &owl_host->dma_cfg); in owl_mmc_prepare_data()
322 owl_host->dma_cfg.direction, in owl_mmc_prepare_data()
633 owl_host->dma_cfg.src_addr = res->start + OWL_REG_SD_DAT; in owl_mmc_probe()
634 owl_host->dma_cfg.dst_addr = res->start + OWL_REG_SD_DAT; in owl_mmc_probe()
635 owl_host->dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; in owl_mmc_probe()
636 owl_host->dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; in owl_mmc_probe()
637 owl_host->dma_cfg.device_fc = false; in owl_mmc_probe()
Dcavium-thunderx.c173 u64 dma_cfg; in thunder_mmc_remove() local
180 dma_cfg = readq(host->dma_base + MIO_EMM_DMA_CFG(host)); in thunder_mmc_remove()
181 dma_cfg &= ~MIO_EMM_DMA_CFG_EN; in thunder_mmc_remove()
182 writeq(dma_cfg, host->dma_base + MIO_EMM_DMA_CFG(host)); in thunder_mmc_remove()
Dcavium-octeon.c300 u64 dma_cfg; in octeon_mmc_remove() local
307 dma_cfg = readq(host->dma_base + MIO_EMM_DMA_CFG(host)); in octeon_mmc_remove()
308 dma_cfg &= ~MIO_EMM_DMA_CFG_EN; in octeon_mmc_remove()
309 writeq(dma_cfg, host->dma_base + MIO_EMM_DMA_CFG(host)); in octeon_mmc_remove()
Dcavium.c518 u64 dma_cfg, addr; in prepare_dma_single() local
527 dma_cfg = FIELD_PREP(MIO_EMM_DMA_CFG_EN, 1) | in prepare_dma_single()
530 dma_cfg |= FIELD_PREP(MIO_EMM_DMA_CFG_ENDIAN, 1); in prepare_dma_single()
532 dma_cfg |= FIELD_PREP(MIO_EMM_DMA_CFG_SIZE, in prepare_dma_single()
537 dma_cfg |= FIELD_PREP(MIO_EMM_DMA_CFG_ADR, addr); in prepare_dma_single()
538 writeq(dma_cfg, host->dma_base + MIO_EMM_DMA_CFG(host)); in prepare_dma_single()
/drivers/ata/
Dpata_octeon_cf.c626 union cvmx_mio_boot_dma_cfgx dma_cfg; in octeon_cf_dma_finished() local
637 dma_cfg.u64 = cvmx_read_csr(cf_port->dma_base + DMA_CFG); in octeon_cf_dma_finished()
638 if (dma_cfg.s.size != 0xfffff) { in octeon_cf_dma_finished()
645 dma_cfg.u64 = 0; in octeon_cf_dma_finished()
646 dma_cfg.s.size = -1; in octeon_cf_dma_finished()
647 cvmx_write_csr(cf_port->dma_base + DMA_CFG, dma_cfg.u64); in octeon_cf_dma_finished()
687 union cvmx_mio_boot_dma_cfgx dma_cfg; in octeon_cf_interrupt() local
693 dma_cfg.u64 = cvmx_read_csr(cf_port->dma_base + DMA_CFG); in octeon_cf_interrupt()
700 if (dma_int.s.done && !dma_cfg.s.en) { in octeon_cf_interrupt()
1015 union cvmx_mio_boot_dma_cfgx dma_cfg; in octeon_cf_shutdown() local
[all …]
/drivers/spi/
Dspi-stm32-qspi.c616 struct dma_slave_config dma_cfg; in stm32_qspi_dma_setup() local
620 memset(&dma_cfg, 0, sizeof(dma_cfg)); in stm32_qspi_dma_setup()
622 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; in stm32_qspi_dma_setup()
623 dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE; in stm32_qspi_dma_setup()
624 dma_cfg.src_addr = qspi->phys_base + QSPI_DR; in stm32_qspi_dma_setup()
625 dma_cfg.dst_addr = qspi->phys_base + QSPI_DR; in stm32_qspi_dma_setup()
626 dma_cfg.src_maxburst = 4; in stm32_qspi_dma_setup()
627 dma_cfg.dst_maxburst = 4; in stm32_qspi_dma_setup()
636 if (dmaengine_slave_config(qspi->dma_chrx, &dma_cfg)) { in stm32_qspi_dma_setup()
648 if (dmaengine_slave_config(qspi->dma_chtx, &dma_cfg)) { in stm32_qspi_dma_setup()
/drivers/comedi/drivers/
Dni_660x.c265 unsigned int dma_cfg[NI660X_MAX_CHIPS]; member
317 devpriv->dma_cfg[chip] &= ~NI660X_DMA_CFG_SEL_MASK(mite_channel); in ni_660x_set_dma_channel()
318 devpriv->dma_cfg[chip] |= NI660X_DMA_CFG_SEL(mite_channel, in ni_660x_set_dma_channel()
320 ni_660x_write(dev, chip, devpriv->dma_cfg[chip] | in ni_660x_set_dma_channel()
332 devpriv->dma_cfg[chip] &= ~NI660X_DMA_CFG_SEL_MASK(mite_channel); in ni_660x_unset_dma_channel()
333 devpriv->dma_cfg[chip] |= NI660X_DMA_CFG_SEL_NONE(mite_channel); in ni_660x_unset_dma_channel()
334 ni_660x_write(dev, chip, devpriv->dma_cfg[chip], NI660X_DMA_CFG); in ni_660x_unset_dma_channel()
985 devpriv->dma_cfg[chip] = 0; in ni_660x_init_tio_chips()
987 devpriv->dma_cfg[chip] |= NI660X_DMA_CFG_SEL_NONE(chan); in ni_660x_init_tio_chips()
988 ni_660x_write(dev, chip, devpriv->dma_cfg[chip], in ni_660x_init_tio_chips()
/drivers/mtd/nand/raw/
Dstm32_fmc2_nand.c338 struct dma_slave_config dma_cfg; in stm32_fmc2_nfc_select_chip() local
349 memset(&dma_cfg, 0, sizeof(dma_cfg)); in stm32_fmc2_nfc_select_chip()
350 dma_cfg.src_addr = nfc->data_phys_addr[nfc->cs_sel]; in stm32_fmc2_nfc_select_chip()
351 dma_cfg.dst_addr = nfc->data_phys_addr[nfc->cs_sel]; in stm32_fmc2_nfc_select_chip()
352 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; in stm32_fmc2_nfc_select_chip()
353 dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; in stm32_fmc2_nfc_select_chip()
354 dma_cfg.src_maxburst = 32; in stm32_fmc2_nfc_select_chip()
355 dma_cfg.dst_maxburst = 32; in stm32_fmc2_nfc_select_chip()
357 ret = dmaengine_slave_config(nfc->dma_tx_ch, &dma_cfg); in stm32_fmc2_nfc_select_chip()
363 ret = dmaengine_slave_config(nfc->dma_rx_ch, &dma_cfg); in stm32_fmc2_nfc_select_chip()
[all …]
/drivers/staging/media/atomisp/pci/runtime/isys/src/
Dvirtual_isys.c485 &channel_cfg->dma_cfg); in calculate_input_system_channel_cfg()
746 cfg->dma_cfg.channel = channel->dma_channel; in calculate_ibuf_ctrl_cfg()
747 cfg->dma_cfg.cmd = _DMA_V2_MOVE_A2B_NO_SYNC_CHK_COMMAND; in calculate_ibuf_ctrl_cfg()
749 cfg->dma_cfg.shift_returned_items = 0; in calculate_ibuf_ctrl_cfg()
750 cfg->dma_cfg.elems_per_word_in_ibuf = 0; in calculate_ibuf_ctrl_cfg()
751 cfg->dma_cfg.elems_per_word_in_dest = 0; in calculate_ibuf_ctrl_cfg()
/drivers/gpu/drm/kmb/
Dkmb_plane.c355 unsigned int dma_cfg; in kmb_plane_atomic_update() local
523 dma_cfg = LCD_DMA_LAYER_ENABLE | LCD_DMA_LAYER_VSTRIDE_EN | in kmb_plane_atomic_update()
527 kmb_write_lcd(kmb, LCD_LAYERn_DMA_CFG(plane_id), dma_cfg); in kmb_plane_atomic_update()
538 drm_dbg(&kmb->drm, "dma_cfg=0x%x LCD_DMA_CFG=0x%x\n", dma_cfg, in kmb_plane_atomic_update()
/drivers/staging/media/atomisp/pci/css_2401_system/
Dibuf_ctrl_global.h59 } dma_cfg; member
/drivers/media/platform/
Drcar_drif.c270 struct dma_slave_config dma_cfg; in rcar_drif_alloc_dmachannels() local
289 memset(&dma_cfg, 0, sizeof(dma_cfg)); in rcar_drif_alloc_dmachannels()
290 dma_cfg.src_addr = (phys_addr_t)(ch->start + RCAR_DRIF_SIRFDR); in rcar_drif_alloc_dmachannels()
291 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES; in rcar_drif_alloc_dmachannels()
292 ret = dmaengine_slave_config(ch->dmach, &dma_cfg); in rcar_drif_alloc_dmachannels()
/drivers/staging/media/atomisp/pci/
Disp2401_input_system_global.h68 isys2401_dma_cfg_t dma_cfg; member

12