Home
last modified time | relevance | path

Searched refs:dma_base (Results 1 – 25 of 80) sorted by relevance

1234

/kernel/linux/linux-5.10/drivers/ide/
Dpdc202xx_new.c71 outb(index, hwif->dma_base + 1); in get_indexed_reg()
72 value = inb(hwif->dma_base + 3); in get_indexed_reg()
85 outb(index, hwif->dma_base + 1); in set_indexed_reg()
86 outb(value, hwif->dma_base + 3); in set_indexed_reg()
200 static long read_counter(u32 dma_base) in read_counter() argument
202 u32 pri_dma_base = dma_base, sec_dma_base = dma_base + 0x08; in read_counter()
240 static long detect_pll_input_clock(unsigned long dma_base) in detect_pll_input_clock() argument
247 start_count = read_counter(dma_base); in detect_pll_input_clock()
251 outb(0x01, dma_base + 0x01); in detect_pll_input_clock()
252 scr1 = inb(dma_base + 0x03); in detect_pll_input_clock()
[all …]
Dide-dma-sff.c56 unsigned long addr = hwif->dma_base + ATA_DMA_STATUS; in ide_dma_sff_read_status()
67 unsigned long addr = hwif->dma_base + ATA_DMA_STATUS; in ide_dma_sff_write_status()
204 (void __iomem *)(hwif->dma_base + ATA_DMA_TABLE_OFS)); in ide_dma_setup()
206 outl(hwif->dmatable_dma, hwif->dma_base + ATA_DMA_TABLE_OFS); in ide_dma_setup()
210 writeb(rw, (void __iomem *)(hwif->dma_base + ATA_DMA_CMD)); in ide_dma_setup()
212 outb(rw, hwif->dma_base + ATA_DMA_CMD); in ide_dma_setup()
275 dma_cmd = readb((void __iomem *)(hwif->dma_base + ATA_DMA_CMD)); in ide_dma_start()
277 (void __iomem *)(hwif->dma_base + ATA_DMA_CMD)); in ide_dma_start()
279 dma_cmd = inb(hwif->dma_base + ATA_DMA_CMD); in ide_dma_start()
280 outb(dma_cmd | ATA_DMA_START, hwif->dma_base + ATA_DMA_CMD); in ide_dma_start()
[all …]
Dsetup-pci.c62 static int ide_pci_clear_simplex(unsigned long dma_base, const char *name) in ide_pci_clear_simplex() argument
64 u8 dma_stat = inb(dma_base + 2); in ide_pci_clear_simplex()
66 outb(dma_stat & 0x60, dma_base + 2); in ide_pci_clear_simplex()
67 dma_stat = inb(dma_base + 2); in ide_pci_clear_simplex()
83 unsigned long dma_base = 0; in ide_pci_dma_base() local
86 return hwif->dma_base; in ide_pci_dma_base()
88 if (hwif->mate && hwif->mate->dma_base) { in ide_pci_dma_base()
89 dma_base = hwif->mate->dma_base - (hwif->channel ? 0 : 8); in ide_pci_dma_base()
93 dma_base = pci_resource_start(dev, baridx); in ide_pci_dma_base()
95 if (dma_base == 0) { in ide_pci_dma_base()
[all …]
Dtrm290.c202 outl(hwif->dmatable_dma | rw, hwif->dma_base); in trm290_dma_setup()
204 outw(count * 2 - 1, hwif->dma_base + 2); in trm290_dma_setup()
216 u16 status = inw(drive->hwif->dma_base + 2); in trm290_dma_end()
225 u16 status = inw(drive->hwif->dma_base + 2); in trm290_dma_test_irq()
249 hwif->dma_base = (cfg_base + 4) ^ (hwif->channel ? 0x80 : 0); in init_hwif_trm290()
252 hwif->name, hwif->dma_base, hwif->dma_base + 3); in init_hwif_trm290()
Dtc86c001.c69 u8 dma_stat = inb(hwif->dma_base + ATA_DMA_STATUS); in tc86c001_timer_expiry()
77 u8 dma_cmd = inb(hwif->dma_base + ATA_DMA_CMD); in tc86c001_timer_expiry()
83 outb(dma_cmd & ~0x01, hwif->dma_base + ATA_DMA_CMD); in tc86c001_timer_expiry()
92 outb(0x00, hwif->dma_base + ATA_DMA_CMD); in tc86c001_timer_expiry()
94 outb(0x01, hwif->dma_base + ATA_DMA_CMD); in tc86c001_timer_expiry()
165 if (!hwif->dma_base) in init_hwif_tc86c001()
Dns87415.c62 return superio_ide_inb(hwif->dma_base + ATA_DMA_STATUS); in superio_dma_sff_read_status()
192 dma_cmd = inb(hwif->dma_base + ATA_DMA_CMD); in ns87415_dma_end()
194 outb(dma_cmd & ~1, hwif->dma_base + ATA_DMA_CMD); in ns87415_dma_end()
196 dma_cmd = inb(hwif->dma_base + ATA_DMA_CMD); in ns87415_dma_end()
197 outb(dma_cmd | 6, hwif->dma_base + ATA_DMA_CMD); in ns87415_dma_end()
266 if (!hwif->dma_base) in init_hwif_ns87415()
269 outb(0x60, hwif->dma_base + ATA_DMA_STATUS); in init_hwif_ns87415()
Dsc1200.c174 unsigned long dma_base = hwif->dma_base; in sc1200_dma_end() local
177 dma_stat = inb(dma_base+2); /* get DMA status */ in sc1200_dma_end()
183 outb(dma_stat|0x1b, dma_base+2); /* clear the INTR & ERROR bits */ in sc1200_dma_end()
184 outb(inb(dma_base)&~1, dma_base); /* !! DO THIS HERE !! stop DMA */ in sc1200_dma_end()
Dpiix.c235 if (drive->waiting_for_dma || hwif->dma_base == 0) in ich_clear_irq()
239 dma_stat = inb(hwif->dma_base + ATA_DMA_STATUS); in ich_clear_irq()
241 outb(dma_stat, hwif->dma_base + ATA_DMA_STATUS); in ich_clear_irq()
302 if (!hwif->dma_base) in init_hwif_piix()
Dcmd64x.c259 dma_stat = inb(hwif->dma_base + ATA_DMA_STATUS); in cmd646_1_dma_end()
261 dma_cmd = inb(hwif->dma_base + ATA_DMA_CMD); in cmd646_1_dma_end()
263 outb(dma_cmd & ~1, hwif->dma_base + ATA_DMA_CMD); in cmd646_1_dma_end()
265 outb(dma_stat | 6, hwif->dma_base + ATA_DMA_STATUS); in cmd646_1_dma_end()
/kernel/linux/linux-5.10/drivers/dma/
Dmv_xor_v2.c157 void __iomem *dma_base; member
231 writel(num_of_desc, xor_dev->dma_base + MV_XOR_V2_DMA_DESQ_ADD_OFF); in mv_xor_v2_add_desc_to_desq()
241 writel(num_of_desc, xor_dev->dma_base + MV_XOR_V2_DMA_DESQ_DEALLOC_OFF); in mv_xor_v2_free_desc_from_desq()
251 xor_dev->dma_base + MV_XOR_V2_DMA_DESQ_CTRL_OFF); in mv_xor_v2_set_desc_size()
265 reg = readl(xor_dev->dma_base + MV_XOR_V2_DMA_IMSG_THRD_OFF); in mv_xor_v2_enable_imsg_thrd()
269 writel(reg, xor_dev->dma_base + MV_XOR_V2_DMA_IMSG_THRD_OFF); in mv_xor_v2_enable_imsg_thrd()
272 reg = readl(xor_dev->dma_base + MV_XOR_V2_DMA_IMSG_TMOT); in mv_xor_v2_enable_imsg_thrd()
275 writel(reg, xor_dev->dma_base + MV_XOR_V2_DMA_IMSG_TMOT); in mv_xor_v2_enable_imsg_thrd()
284 reg = readl(xor_dev->dma_base + MV_XOR_V2_DMA_DESQ_DONE_OFF); in mv_xor_v2_interrupt_handler()
542 reg = readl(xor_dev->dma_base + MV_XOR_V2_DMA_DESQ_DONE_OFF); in mv_xor_v2_get_pending_params()
[all …]
/kernel/linux/linux-5.10/drivers/net/ethernet/8390/
Detherh.c67 void __iomem *dma_base; member
309 void __iomem *dma_base, *addr; in etherh_block_output() local
327 dma_base = etherh_priv(dev)->dma_base; in etherh_block_output()
348 writesw (dma_base, buf, count >> 1); in etherh_block_output()
350 writesb (dma_base, buf, count); in etherh_block_output()
374 void __iomem *dma_base, *addr; in etherh_block_input() local
386 dma_base = etherh_priv(dev)->dma_base; in etherh_block_input()
397 readsw (dma_base, buf, count >> 1); in etherh_block_input()
399 buf[count - 1] = readb (dma_base); in etherh_block_input()
401 readsb (dma_base, buf, count); in etherh_block_input()
[all …]
/kernel/linux/linux-5.10/arch/c6x/mm/
Ddma-coherent.c31 static phys_addr_t dma_base; variable
52 return dma_base + (pos << PAGE_SHIFT); in __alloc_dma_pages()
58 u32 pos = (addr - dma_base) >> PAGE_SHIFT; in __free_dma_pages()
60 if (addr < dma_base || (pos + (1 << order)) >= dma_pages) { in __free_dma_pages()
127 dma_base = start; in coherent_mem_init()
/kernel/linux/linux-5.10/drivers/ata/
Dpata_octeon_cf.c58 u64 dma_base; member
252 c = (cf_port->dma_base & 8) >> 3; in octeon_cf_set_dmamode()
282 cvmx_write_csr(cf_port->dma_base + DMA_TIM, dma_tim.u64); in octeon_cf_set_dmamode()
577 cvmx_write_csr(cf_port->dma_base + DMA_INT, mio_boot_dma_int.u64); in octeon_cf_dma_start()
580 cvmx_write_csr(cf_port->dma_base + DMA_INT_EN, mio_boot_dma_int.u64); in octeon_cf_dma_start()
612 cvmx_write_csr(cf_port->dma_base + DMA_CFG, mio_boot_dma_cfg.u64); in octeon_cf_dma_start()
637 dma_cfg.u64 = cvmx_read_csr(cf_port->dma_base + DMA_CFG); in octeon_cf_dma_finished()
647 cvmx_write_csr(cf_port->dma_base + DMA_CFG, dma_cfg.u64); in octeon_cf_dma_finished()
651 cvmx_write_csr(cf_port->dma_base + DMA_INT_EN, dma_int.u64); in octeon_cf_dma_finished()
655 cvmx_write_csr(cf_port->dma_base + DMA_INT, dma_int.u64); in octeon_cf_dma_finished()
[all …]
/kernel/linux/linux-5.10/arch/alpha/kernel/
Dpci_iommu.c114 arena->dma_base = base; in iommu_arena_new_node()
143 base = arena->dma_base >> PAGE_SHIFT; in iommu_arena_find_pages()
305 if (!arena || arena->dma_base + arena->size - 1 > max_dma) in pci_map_single_1()
324 ret = arena->dma_base + dma_ofs * PAGE_SIZE; in pci_map_single_1()
407 if (!arena || dma_addr < arena->dma_base) in alpha_pci_unmap_page()
410 dma_ofs = (dma_addr - arena->dma_base) >> PAGE_SHIFT; in alpha_pci_unmap_page()
414 dma_addr, arena->dma_base, arena->size); in alpha_pci_unmap_page()
616 out->dma_address = arena->dma_base + dma_ofs*PAGE_SIZE + paddr; in sg_fill()
695 if (!arena || arena->dma_base + arena->size - 1 > max_dma) in alpha_pci_map_sg()
758 if (!arena || arena->dma_base + arena->size - 1 > max_dma) in alpha_pci_unmap_sg()
[all …]
Dcore_titan.c326 port->wsba[0].csr = hose->sg_isa->dma_base | 3; in titan_init_one_pachip_port()
334 port->wsba[2].csr = hose->sg_pci->dma_base | 3; in titan_init_one_pachip_port()
498 baddr >= (unsigned long)hose->sg_pci->dma_base && in titan_ioremap()
499 last < (unsigned long)hose->sg_pci->dma_base + hose->sg_pci->size){ in titan_ioremap()
504 baddr -= hose->sg_pci->dma_base; in titan_ioremap()
505 last -= hose->sg_pci->dma_base; in titan_ioremap()
612 aper->arena->dma_base + aper->pg_start * PAGE_SIZE; in titan_agp_setup()
702 unsigned long baddr = addr - aper->arena->dma_base; in titan_agp_translate()
Dcore_marvel.c294 hose->sg_isa->dma_base | wbase_m_ena | wbase_m_sg; in io7_init_hose()
312 hose->sg_pci->dma_base | wbase_m_ena | wbase_m_sg; in io7_init_hose()
729 baddr >= (unsigned long)hose->sg_pci->dma_base && in marvel_ioremap()
730 last < (unsigned long)hose->sg_pci->dma_base + hose->sg_pci->size) { in marvel_ioremap()
735 baddr -= hose->sg_pci->dma_base; in marvel_ioremap()
736 last -= hose->sg_pci->dma_base; in marvel_ioremap()
929 aper->arena->dma_base + aper->pg_start * PAGE_SIZE; in marvel_agp_setup()
1041 unsigned long baddr = addr - aper->arena->dma_base; in marvel_agp_translate()
/kernel/linux/linux-5.10/arch/arm/mach-omap1/
Ddma.c174 static void __iomem *dma_base; variable
177 void __iomem *addr = dma_base; in dma_write()
189 void __iomem *addr = dma_base; in dma_read()
324 dma_base = ioremap(res[0].start, resource_size(&res[0])); in omap1_system_dma_init()
325 if (!dma_base) { in omap1_system_dma_init()
409 iounmap(dma_base); in omap1_system_dma_init()
/kernel/linux/linux-5.10/drivers/media/platform/s5p-mfc/
Ds5p_mfc_ctrl.c176 mfc_write(dev, dev->dma_base[BANK_L_CTX], in s5p_mfc_init_memctrl()
179 &dev->dma_base[BANK_L_CTX]); in s5p_mfc_init_memctrl()
181 mfc_write(dev, dev->dma_base[BANK_L_CTX], in s5p_mfc_init_memctrl()
183 mfc_write(dev, dev->dma_base[BANK_R_CTX], in s5p_mfc_init_memctrl()
186 &dev->dma_base[BANK_L_CTX], in s5p_mfc_init_memctrl()
187 &dev->dma_base[BANK_R_CTX]); in s5p_mfc_init_memctrl()
/kernel/linux/linux-5.10/include/linux/
Ddma-iommu.h22 void iommu_setup_dma_ops(struct device *dev, u64 dma_base, u64 size);
47 static inline void iommu_setup_dma_ops(struct device *dev, u64 dma_base, in iommu_setup_dma_ops() argument
/kernel/linux/linux-5.10/arch/arm64/mm/
Ddma-mapping.c40 void arch_setup_dma_ops(struct device *dev, u64 dma_base, u64 size, in arch_setup_dma_ops() argument
53 iommu_setup_dma_ops(dev, dma_base, size); in arch_setup_dma_ops()
/kernel/linux/linux-5.10/arch/arm/mach-davinci/
Dsram.c24 dma_addr_t dma_base = davinci_soc_info.sram_dma; in sram_alloc() local
28 if (!sram_pool || (dma && !dma_base)) in sram_alloc()
/kernel/linux/linux-5.10/drivers/net/ethernet/cortina/
Dgemini.c111 void __iomem *dma_base; member
526 readl(port->dma_base + GMAC_AHB_WEIGHT_REG); in gmac_init()
527 writel(ahb_weight.bits32, port->dma_base + GMAC_AHB_WEIGHT_REG); in gmac_init()
530 port->dma_base + GMAC_TX_WEIGHTING_CTRL_0_REG); in gmac_init()
532 port->dma_base + GMAC_TX_WEIGHTING_CTRL_1_REG); in gmac_init()
560 rwptr_reg = port->dma_base + GMAC_SW_TX_QUEUE0_PTR_REG; in gmac_setup_txqs()
583 port->dma_base + GMAC_SW_TX_QUEUE_BASE_REG); in gmac_setup_txqs()
684 rwptr_reg = port->dma_base + GMAC_SW_TX_QUEUE0_PTR_REG; in gmac_cleanup_txqs()
694 writel(0, port->dma_base + GMAC_SW_TX_QUEUE_BASE_REG); in gmac_cleanup_txqs()
1241 ptr_reg = port->dma_base + GMAC_SW_TX_QUEUE_PTR_REG(txq_num); in gmac_start_xmit()
[all …]
/kernel/linux/linux-5.10/drivers/mmc/host/
Dcavium-thunderx.c85 host->dma_base = host->base; in thunder_mmc_probe()
180 dma_cfg = readq(host->dma_base + MIO_EMM_DMA_CFG(host)); in thunder_mmc_remove()
182 writeq(dma_cfg, host->dma_base + MIO_EMM_DMA_CFG(host)); in thunder_mmc_remove()
Dcavium-octeon.c216 host->dma_base = base; in octeon_mmc_probe()
307 dma_cfg = readq(host->dma_base + MIO_EMM_DMA_CFG(host)); in octeon_mmc_remove()
309 writeq(dma_cfg, host->dma_base + MIO_EMM_DMA_CFG(host)); in octeon_mmc_remove()
Dcavium.c387 fifo_cfg = readq(host->dma_base + MIO_EMM_DMA_FIFO_CFG(host)); in finish_dma_sg()
396 writeq(BIT_ULL(16), host->dma_base + MIO_EMM_DMA_FIFO_CFG(host)); in finish_dma_sg()
539 writeq(dma_cfg, host->dma_base + MIO_EMM_DMA_CFG(host)); in prepare_dma_single()
545 writeq(addr, host->dma_base + MIO_EMM_DMA_ADR(host)); in prepare_dma_single()
567 writeq(0, host->dma_base + MIO_EMM_DMA_FIFO_CFG(host)); in prepare_dma_sg()
574 writeq(addr, host->dma_base + MIO_EMM_DMA_FIFO_ADR(host)); in prepare_dma_sg()
597 writeq(fifo_cmd, host->dma_base + MIO_EMM_DMA_FIFO_CMD(host)); in prepare_dma_sg()
614 writeq(BIT_ULL(16), host->dma_base + MIO_EMM_DMA_FIFO_CFG(host)); in prepare_dma_sg()

1234