/kernel/linux/linux-5.10/arch/alpha/kernel/ |
D | pci_iommu.c | 536 sg->dma_address = -1; in sg_classify() 539 sg->dma_address = -2; in sg_classify() 544 leader->dma_address = leader_flag; in sg_classify() 554 leader->dma_address = leader_flag; in sg_classify() 575 if (leader->dma_address == 0 in sg_fill() 578 out->dma_address = paddr + __direct_map_base; in sg_fill() 582 __va(paddr), size, out->dma_address); in sg_fill() 589 if (leader->dma_address == 0 && dac_allowed) { in sg_fill() 590 out->dma_address = paddr + alpha_mv.pci_dac_offset; in sg_fill() 594 __va(paddr), size, out->dma_address); in sg_fill() [all …]
|
/kernel/linux/linux-5.10/drivers/staging/gasket/ |
D | gasket_page_table.h | 225 dma_addr_t *dma_address, uint64_t index); 228 dma_addr_t dma_address, uint64_t index); 247 dma_addr_t dma_address, ulong vma);
|
D | gasket_ioctl.c | 196 ibuf.dma_address); in gasket_config_coherent_allocator() 206 ibuf.dma_address, in gasket_config_coherent_allocator() 210 &ibuf.dma_address, in gasket_config_coherent_allocator()
|
D | gasket.h | 49 u64 dma_address; member
|
/kernel/linux/linux-5.10/drivers/gpu/drm/ttm/ |
D | ttm_tt.c | 100 sizeof(*ttm->dma_address), in ttm_dma_tt_alloc_page_directory() 104 ttm->dma_address = (void *) (ttm->ttm.pages + ttm->ttm.num_pages); in ttm_dma_tt_alloc_page_directory() 110 ttm->dma_address = kvmalloc_array(ttm->ttm.num_pages, in ttm_sg_tt_alloc_page_directory() 111 sizeof(*ttm->dma_address), in ttm_sg_tt_alloc_page_directory() 113 if (!ttm->dma_address) in ttm_sg_tt_alloc_page_directory() 303 kvfree(ttm_dma->dma_address); in ttm_dma_tt_fini() 305 ttm_dma->dma_address = NULL; in ttm_dma_tt_fini()
|
D | ttm_page_alloc.c | 1118 tt->dma_address[i] = dma_map_page(dev, tt->ttm.pages[i], in ttm_populate_and_map_pages() 1121 if (dma_mapping_error(dev, tt->dma_address[i])) { in ttm_populate_and_map_pages() 1123 dma_unmap_page(dev, tt->dma_address[i], in ttm_populate_and_map_pages() 1125 tt->dma_address[i] = 0; in ttm_populate_and_map_pages() 1132 tt->dma_address[i + 1] = tt->dma_address[i] + PAGE_SIZE; in ttm_populate_and_map_pages() 1148 if (!tt->dma_address[i] || !tt->ttm.pages[i]) { in ttm_unmap_and_unpopulate_pages() 1160 dma_unmap_page(dev, tt->dma_address[i], num_pages * PAGE_SIZE, in ttm_unmap_and_unpopulate_pages()
|
/kernel/linux/linux-5.10/arch/x86/kernel/ |
D | amd_gart_64.c | 287 gart_unmap_page(dev, s->dma_address, s->dma_length, dir, 0); in gart_unmap_sg() 315 s->dma_address = addr; in dma_map_sg_nonforce() 338 unsigned long phys_addr = s->dma_address; in __dma_map_cont() 342 sout->dma_address = iommu_bus_base; in __dma_map_cont() 343 sout->dma_address += iommu_page*PAGE_SIZE + s->offset; in __dma_map_cont() 368 sout->dma_address = start->dma_address; in dma_map_cont() 402 s->dma_address = addr; in gart_map_sg() 460 s->dma_address = DMA_MAPPING_ERROR; in gart_map_sg()
|
/kernel/linux/linux-5.10/arch/sparc/kernel/ |
D | iommu.c | 536 outs->dma_address = dma_addr; in dma_4u_map_sg() 549 outs->dma_address = DMA_MAPPING_ERROR; in dma_4u_map_sg() 561 vaddr = s->dma_address & IO_PAGE_MASK; in dma_4u_map_sg() 562 npages = iommu_num_pages(s->dma_address, s->dma_length, in dma_4u_map_sg() 575 s->dma_address = DMA_MAPPING_ERROR; in dma_4u_map_sg() 598 bus_addr = sg->dma_address & IO_PAGE_MASK; in fetch_sg_ctx() 627 dma_addr_t dma_handle = sg->dma_address; in dma_4u_unmap_sg() 722 iopte = iommu->page_table + ((sglist[0].dma_address - in dma_4u_sync_sg_for_cpu() 728 bus_addr = sglist[0].dma_address & IO_PAGE_MASK; in dma_4u_sync_sg_for_cpu() 736 npages = (IO_PAGE_ALIGN(sgprv->dma_address + sgprv->dma_length) in dma_4u_sync_sg_for_cpu()
|
/kernel/linux/linux-5.10/arch/s390/pci/ |
D | pci_dma.c | 495 s->dma_address = DMA_MAPPING_ERROR; in s390_dma_map_sg() 501 &dma->dma_address, dir)) in s390_dma_map_sg() 504 dma->dma_address += offset; in s390_dma_map_sg() 514 if (__s390_dma_map_sg(dev, start, size, &dma->dma_address, dir)) in s390_dma_map_sg() 517 dma->dma_address += offset; in s390_dma_map_sg() 538 s390_dma_unmap_pages(dev, s->dma_address, s->dma_length, in s390_dma_unmap_sg() 540 s->dma_address = 0; in s390_dma_unmap_sg()
|
/kernel/linux/linux-5.10/arch/ia64/hp/common/ |
D | sba_iommu.c | 391 startsg->dma_address, startsg->dma_length, in sba_dump_sg() 1214 nents, startsg->dma_address, cnt, in sba_fill_pdir() 1218 nents, startsg->dma_address, cnt, in sba_fill_pdir() 1224 if (startsg->dma_address & PIDE_FLAG) { in sba_fill_pdir() 1225 u32 pide = startsg->dma_address & ~PIDE_FLAG; in sba_fill_pdir() 1227 startsg->dma_address = 0; in sba_fill_pdir() 1230 dma_sg->dma_address = pide | ioc->ibase; in sba_fill_pdir() 1318 startsg->dma_address = startsg->dma_length = 0; in sba_coalesce_chunks() 1330 startsg->dma_address = startsg->dma_length = 0; in sba_coalesce_chunks() 1408 dma_sg->dma_address = (dma_addr_t)(PIDE_FLAG | (idx << iovp_shift) in sba_coalesce_chunks() [all …]
|
/kernel/linux/linux-5.10/drivers/xen/ |
D | swiotlb-xen.c | 499 xen_swiotlb_unmap_page(hwdev, sg->dma_address, sg_dma_len(sg), in xen_swiotlb_unmap_sg() 514 sg->dma_address = xen_swiotlb_map_page(dev, sg_page(sg), in xen_swiotlb_map_sg() 516 if (sg->dma_address == DMA_MAPPING_ERROR) in xen_swiotlb_map_sg() 536 xen_swiotlb_sync_single_for_cpu(dev, sg->dma_address, in xen_swiotlb_sync_sg_for_cpu() 549 xen_swiotlb_sync_single_for_device(dev, sg->dma_address, in xen_swiotlb_sync_sg_for_device()
|
/kernel/linux/linux-5.10/arch/arm/kernel/ |
D | dma-isa.c | 95 dma->buf.dma_address = dma_map_single(&isa_dma_dev, in isa_enable_dma() 100 address = dma->buf.dma_address; in isa_enable_dma()
|
/kernel/linux/linux-5.10/include/linux/ |
D | pci-dma-compat.h | 58 pci_unmap_page(struct pci_dev *hwdev, dma_addr_t dma_address, in pci_unmap_page() argument 61 dma_unmap_page(&hwdev->dev, dma_address, size, (enum dma_data_direction)direction); in pci_unmap_page()
|
D | scatterlist.h | 15 dma_addr_t dma_address; member 34 #define sg_dma_address(sg) ((sg)->dma_address)
|
/kernel/linux/linux-5.10/arch/sparc/mm/ |
D | iommu.c | 256 sg->dma_address =__sbus_iommu_map_page(dev, sg_page(sg), in __sbus_iommu_map_sg() 258 if (sg->dma_address == DMA_MAPPING_ERROR) in __sbus_iommu_map_sg() 305 sbus_iommu_unmap_page(dev, sg->dma_address, sg->length, dir, in sbus_iommu_unmap_sg() 307 sg->dma_address = 0x21212121; in sbus_iommu_unmap_sg()
|
D | io-unit.c | 173 sg->dma_address = iounit_get_area(iounit, (unsigned long) sg_virt(sg), sg->length); in iounit_map_sg() 205 len = ((sg->dma_address & ~PAGE_MASK) + sg->length + (PAGE_SIZE-1)) >> PAGE_SHIFT; in iounit_unmap_sg() 206 vaddr = (sg->dma_address - IOUNIT_DMA_BASE) >> PAGE_SHIFT; in iounit_unmap_sg()
|
/kernel/linux/linux-5.10/arch/arm/mach-rpc/ |
D | dma.c | 78 idma->dma_addr = idma->dma.sg->dma_address; in iomd_get_next_sg() 172 idma->dma.buf.dma_address = dma_map_single(&isa_dma_dev, in iomd_enable_dma() 178 idma->dma_addr = idma->dma.sg->dma_address; in iomd_enable_dma()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ttm.c | 296 dma_addr_t *dma_address; in amdgpu_ttm_map_buffer() local 299 dma_address = &dma->dma_address[offset >> PAGE_SHIFT]; in amdgpu_ttm_map_buffer() 300 r = amdgpu_gart_map(adev, 0, num_pages, dma_address, flags, in amdgpu_ttm_map_buffer() 305 dma_addr_t dma_address; in amdgpu_ttm_map_buffer() local 307 dma_address = (mm_node->start << PAGE_SHIFT) + offset; in amdgpu_ttm_map_buffer() 308 dma_address += adev->vm_manager.vram_base_offset; in amdgpu_ttm_map_buffer() 312 &dma_address, flags, cpu_addr); in amdgpu_ttm_map_buffer() 316 dma_address += PAGE_SIZE; in amdgpu_ttm_map_buffer() 1013 gtt->ttm.dma_address, ttm->num_pages); in amdgpu_ttm_tt_pin_userptr() 1075 ttm->pages, gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind() [all …]
|
D | amdgpu_gmc.c | 53 *addr = ttm->dma_address[0]; in amdgpu_gmc_get_pde_for_bo() 131 if (ttm->dma_address[0] + PAGE_SIZE >= adev->gmc.agp_size) in amdgpu_gmc_agp_addr() 134 return adev->gmc.agp_start + ttm->dma_address[0]; in amdgpu_gmc_agp_addr()
|
/kernel/linux/linux-5.10/arch/powerpc/kernel/ |
D | iommu.c | 507 outs->dma_address = dma_addr; in ppc_iommu_map_sg() 528 outs->dma_address = DMA_MAPPING_ERROR; in ppc_iommu_map_sg() 542 vaddr = s->dma_address & IOMMU_PAGE_MASK(tbl); in ppc_iommu_map_sg() 543 npages = iommu_num_pages(s->dma_address, s->dma_length, in ppc_iommu_map_sg() 546 s->dma_address = DMA_MAPPING_ERROR; in ppc_iommu_map_sg() 570 dma_addr_t dma_handle = sg->dma_address; in ppc_iommu_unmap_sg()
|
/kernel/linux/linux-5.10/arch/mips/jazz/ |
D | jazzdma.c | 553 sg->dma_address = vdma_alloc(sg_phys(sg), sg->length); in jazz_dma_map_sg() 554 if (sg->dma_address == DMA_MAPPING_ERROR) in jazz_dma_map_sg() 571 vdma_free(sg->dma_address); in jazz_dma_unmap_sg()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_ttm_buffer.c | 356 viter->dma_address = &__vmw_piter_phys_addr; in vmw_piter_start() 360 viter->dma_address = &__vmw_piter_dma_addr; in vmw_piter_start() 366 viter->dma_address = &__vmw_piter_sg_addr; in vmw_piter_start() 443 vsgt->addrs = vmw_tt->dma_ttm.dma_address; in vmw_ttm_map_dma()
|
/kernel/linux/linux-5.10/include/linux/platform_data/ |
D | dma-ste-dma40.h | 186 sg.dma_address = addr; in stedma40_slave_mem()
|
/kernel/linux/linux-5.10/kernel/dma/ |
D | direct.c | 391 dma_direct_unmap_page(dev, sg->dma_address, sg_dma_len(sg), dir, in dma_direct_unmap_sg() 403 sg->dma_address = dma_direct_map_page(dev, sg_page(sg), in dma_direct_map_sg() 405 if (sg->dma_address == DMA_MAPPING_ERROR) in dma_direct_map_sg()
|
/kernel/linux/linux-5.10/include/drm/ttm/ |
D | ttm_tt.h | 105 dma_addr_t *dma_address; member
|