/arch/tile/kernel/ |
D | pci-dma.c | 215 sg->dma_address = sg_phys(sg); in tile_dma_map_sg() 216 __dma_prep_pa_range(sg->dma_address, sg->length, direction); in tile_dma_map_sg() 234 sg->dma_address = sg_phys(sg); in tile_dma_unmap_sg() 235 __dma_complete_pa_range(sg->dma_address, sg->length, in tile_dma_unmap_sg() 253 static void tile_dma_unmap_page(struct device *dev, dma_addr_t dma_address, in tile_dma_unmap_page() argument 259 __dma_complete_page(pfn_to_page(PFN_DOWN(dma_address)), in tile_dma_unmap_page() 260 dma_address & (PAGE_SIZE - 1), size, direction); in tile_dma_unmap_page() 291 dma_sync_single_for_cpu(dev, sg->dma_address, in tile_dma_sync_sg_for_cpu() 307 dma_sync_single_for_device(dev, sg->dma_address, in tile_dma_sync_sg_for_device() 388 sg->dma_address = sg_phys(sg); in tile_pci_dma_map_sg() [all …]
|
/arch/microblaze/kernel/ |
D | dma.c | 63 sg->dma_address = sg_phys(sg); in dma_direct_map_sg() 88 dma_addr_t dma_address, in dma_direct_unmap_page() argument 98 __dma_sync(dma_address, size, direction); in dma_direct_unmap_page() 140 __dma_sync(sg->dma_address, sg->length, direction); in dma_direct_sync_sg_for_cpu() 154 __dma_sync(sg->dma_address, sg->length, direction); in dma_direct_sync_sg_for_device()
|
/arch/alpha/kernel/ |
D | pci_iommu.c | 528 sg->dma_address = -1; in sg_classify() 531 sg->dma_address = -2; in sg_classify() 536 leader->dma_address = leader_flag; in sg_classify() 546 leader->dma_address = leader_flag; in sg_classify() 567 if (leader->dma_address == 0 in sg_fill() 570 out->dma_address = paddr + __direct_map_base; in sg_fill() 574 __va(paddr), size, out->dma_address); in sg_fill() 581 if (leader->dma_address == 0 && dac_allowed) { in sg_fill() 582 out->dma_address = paddr + alpha_mv.pci_dac_offset; in sg_fill() 586 __va(paddr), size, out->dma_address); in sg_fill() [all …]
|
/arch/hexagon/kernel/ |
D | dma.c | 127 s->dma_address = sg_phys(s); in hexagon_map_sg() 128 if (!check_addr("map_sg", hwdev, s->dma_address, s->length)) in hexagon_map_sg() 133 flush_dcache_range(dma_addr_to_virt(s->dma_address), in hexagon_map_sg() 134 dma_addr_to_virt(s->dma_address + s->length)); in hexagon_map_sg()
|
/arch/arm/mach-rpc/ |
D | dma.c | 61 sg->dma_address = idma->dma_addr; in iomd_get_next_sg() 62 offset = sg->dma_address & ~PAGE_MASK; in iomd_get_next_sg() 80 idma->dma_addr = idma->dma.sg->dma_address; in iomd_get_next_sg() 90 sg->dma_address = 0; in iomd_get_next_sg() 115 iomd_writel(idma->cur_sg.dma_address, base + CURA); in iomd_dma_handle() 122 iomd_writel(idma->cur_sg.dma_address, base + CURB); in iomd_dma_handle() 171 idma->dma.buf.dma_address = dma_map_single(NULL, in iomd_enable_dma()
|
/arch/metag/include/asm/ |
D | dma-mapping.h | 60 sg->dma_address = sg_phys(sg); in dma_map_sg() 78 dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size, in dma_unmap_page() argument 82 dma_sync_for_cpu(phys_to_virt(dma_address), size, direction); in dma_unmap_page() 99 sg->dma_address = sg_phys(sg); in dma_unmap_sg()
|
/arch/x86/kernel/ |
D | pci-nommu.c | 67 s->dma_address = sg_phys(s); in nommu_map_sg() 68 if (!check_addr("map_sg", hwdev, s->dma_address, s->length)) in nommu_map_sg()
|
D | amd_gart_64.c | 297 gart_unmap_page(dev, s->dma_address, s->dma_length, dir, NULL); in gart_unmap_sg() 325 s->dma_address = addr; in dma_map_sg_nonforce() 348 unsigned long phys_addr = s->dma_address; in __dma_map_cont() 352 sout->dma_address = iommu_bus_base; in __dma_map_cont() 353 sout->dma_address += iommu_page*PAGE_SIZE + s->offset; in __dma_map_cont() 378 sout->dma_address = start->dma_address; in dma_map_cont() 415 s->dma_address = addr; in gart_map_sg() 473 s->dma_address = bad_dma_addr; in gart_map_sg()
|
/arch/m68k/kernel/ |
D | dma.c | 129 dma_sync_single_for_device(dev, sg->dma_address, sg->length, dir); in dma_sync_sg_for_device() 160 sg->dma_address = sg_phys(sg); in dma_map_sg() 161 dma_sync_single_for_device(dev, sg->dma_address, sg->length, dir); in dma_map_sg()
|
/arch/s390/pci/ |
D | pci_dma.c | 388 s->dma_address = s390_dma_map_pages(dev, page, s->offset, in s390_dma_map_sg() 390 if (!dma_mapping_error(dev, s->dma_address)) { in s390_dma_map_sg() 401 if (s->dma_address) in s390_dma_map_sg() 402 s390_dma_unmap_pages(dev, s->dma_address, s->dma_length, in s390_dma_map_sg() 404 s->dma_address = 0; in s390_dma_map_sg() 419 s390_dma_unmap_pages(dev, s->dma_address, s->dma_length, dir, NULL); in s390_dma_unmap_sg() 420 s->dma_address = 0; in s390_dma_unmap_sg()
|
/arch/avr32/include/asm/ |
D | dma-mapping.h | 183 dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size, in dma_unmap_page() argument 186 dma_unmap_single(dev, dma_address, size, direction); in dma_unmap_page() 220 sg[i].dma_address = page_to_bus(sg_page(&sg[i])) + sg[i].offset; in dma_map_sg()
|
/arch/blackfin/kernel/ |
D | dma-mapping.c | 123 sg->dma_address = (dma_addr_t) sg_virt(sg); in dma_map_sg() 138 sg->dma_address = (dma_addr_t) sg_virt(sg); in dma_sync_sg_for_device()
|
/arch/sparc/kernel/ |
D | iommu.c | 629 outs->dma_address = dma_addr; in dma_4u_map_sg() 642 outs->dma_address = DMA_ERROR_CODE; in dma_4u_map_sg() 654 vaddr = s->dma_address & IO_PAGE_MASK; in dma_4u_map_sg() 655 npages = iommu_num_pages(s->dma_address, s->dma_length, in dma_4u_map_sg() 666 s->dma_address = DMA_ERROR_CODE; in dma_4u_map_sg() 688 bus_addr = sg->dma_address & IO_PAGE_MASK; in fetch_sg_ctx() 717 dma_addr_t dma_handle = sg->dma_address; in dma_4u_unmap_sg() 810 ((sglist[0].dma_address - iommu->page_table_map_base) >> IO_PAGE_SHIFT); in dma_4u_sync_sg_for_cpu() 815 bus_addr = sglist[0].dma_address & IO_PAGE_MASK; in dma_4u_sync_sg_for_cpu() 823 npages = (IO_PAGE_ALIGN(sgprv->dma_address + sgprv->dma_length) in dma_4u_sync_sg_for_cpu()
|
D | pci_sun4v.c | 440 outs->dma_address = dma_addr; in dma_4v_map_sg() 458 outs->dma_address = DMA_ERROR_CODE; in dma_4v_map_sg() 469 vaddr = s->dma_address & IO_PAGE_MASK; in dma_4v_map_sg() 470 npages = iommu_num_pages(s->dma_address, s->dma_length, in dma_4v_map_sg() 474 s->dma_address = DMA_ERROR_CODE; in dma_4v_map_sg() 505 dma_addr_t dma_handle = sg->dma_address; in dma_4v_unmap_sg()
|
/arch/ia64/sn/pci/ |
D | pci_dma.c | 253 provider->dma_unmap(pdev, sg->dma_address, dir); in sn_dma_unmap_sg() 254 sg->dma_address = (dma_addr_t) NULL; in sn_dma_unmap_sg() 305 sg->dma_address = dma_addr; in sn_dma_map_sg() 306 if (!sg->dma_address) { in sn_dma_map_sg()
|
/arch/arm/kernel/ |
D | dma-isa.c | 92 dma->buf.dma_address = dma_map_single(NULL, in isa_enable_dma() 97 address = dma->buf.dma_address; in isa_enable_dma()
|
/arch/mn10300/include/asm/ |
D | dma-mapping.h | 63 sg->dma_address = sg_phys(sg); in dma_map_sg() 87 void dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size, in dma_unmap_page() argument
|
/arch/sparc/mm/ |
D | iommu.c | 246 sg->dma_address = iommu_get_one(dev, sg_page(sg), n) + sg->offset; in iommu_get_scsi_sgl_gflush() 277 sg->dma_address = iommu_get_one(dev, sg_page(sg), n) + sg->offset; in iommu_get_scsi_sgl_pflush() 317 iommu_release_one(dev, sg->dma_address & PAGE_MASK, n); in iommu_release_scsi_sgl() 318 sg->dma_address = 0x21212121; in iommu_release_scsi_sgl()
|
D | io-unit.c | 162 sg->dma_address = iounit_get_area(iounit, (unsigned long) sg_virt(sg), sg->length); in iounit_get_scsi_sgl() 192 len = ((sg->dma_address & ~PAGE_MASK) + sg->length + (PAGE_SIZE-1)) >> PAGE_SHIFT; in iounit_release_scsi_sgl() 193 vaddr = (sg->dma_address - IOUNIT_DMA_BASE) >> PAGE_SHIFT; in iounit_release_scsi_sgl()
|
/arch/xtensa/include/asm/ |
D | dma-mapping.h | 65 sg->dma_address = sg_phys(sg); in dma_map_sg() 81 dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size, in dma_unmap_page() argument
|
/arch/ia64/hp/common/ |
D | sba_iommu.c | 399 startsg->dma_address, startsg->dma_length, in sba_dump_sg() 1245 nents, startsg->dma_address, cnt, in sba_fill_pdir() 1249 nents, startsg->dma_address, cnt, in sba_fill_pdir() 1255 if (startsg->dma_address & PIDE_FLAG) { in sba_fill_pdir() 1256 u32 pide = startsg->dma_address & ~PIDE_FLAG; in sba_fill_pdir() 1258 startsg->dma_address = 0; in sba_fill_pdir() 1261 dma_sg->dma_address = pide | ioc->ibase; in sba_fill_pdir() 1349 startsg->dma_address = startsg->dma_length = 0; in sba_coalesce_chunks() 1361 startsg->dma_address = startsg->dma_length = 0; in sba_coalesce_chunks() 1439 dma_sg->dma_address = (dma_addr_t)(PIDE_FLAG | (idx << iovp_shift) in sba_coalesce_chunks() [all …]
|
/arch/arm64/mm/ |
D | dma-mapping.c | 229 __dma_map_area(phys_to_virt(dma_to_phys(dev, sg->dma_address)), in __swiotlb_map_sg_attrs() 244 __dma_unmap_area(phys_to_virt(dma_to_phys(dev, sg->dma_address)), in __swiotlb_unmap_sg_attrs() 273 __dma_unmap_area(phys_to_virt(dma_to_phys(dev, sg->dma_address)), in __swiotlb_sync_sg_for_cpu() 287 __dma_map_area(phys_to_virt(dma_to_phys(dev, sg->dma_address)), in __swiotlb_sync_sg_for_device()
|
/arch/sh/kernel/ |
D | dma-nommu.c | 40 s->dma_address = sg_phys(s); in nommu_map_sg()
|
/arch/powerpc/kernel/ |
D | dma.c | 136 sg->dma_address = sg_phys(sg) + get_dma_offset(dev); in dma_direct_map_sg() 187 dma_addr_t dma_address, in dma_direct_unmap_page() argument
|
D | iommu.c | 526 outs->dma_address = dma_addr; in iommu_map_sg() 547 outs->dma_address = DMA_ERROR_CODE; in iommu_map_sg() 561 vaddr = s->dma_address & IOMMU_PAGE_MASK(tbl); in iommu_map_sg() 562 npages = iommu_num_pages(s->dma_address, s->dma_length, in iommu_map_sg() 565 s->dma_address = DMA_ERROR_CODE; in iommu_map_sg() 589 dma_addr_t dma_handle = sg->dma_address; in iommu_unmap_sg()
|