/drivers/gpu/drm/ttm/ |
D | ttm_tt.c | 106 sizeof(*ttm->dma_address), in ttm_dma_tt_alloc_page_directory() 111 ttm->dma_address = (void *)(ttm->pages + ttm->num_pages); in ttm_dma_tt_alloc_page_directory() 117 ttm->dma_address = kvmalloc_array(ttm->num_pages, in ttm_sg_tt_alloc_page_directory() 118 sizeof(*ttm->dma_address), in ttm_sg_tt_alloc_page_directory() 120 if (!ttm->dma_address) in ttm_sg_tt_alloc_page_directory() 149 ttm->dma_address = NULL; in ttm_tt_init_fields() 173 kvfree(ttm->dma_address); in ttm_tt_fini() 175 ttm->dma_address = NULL; in ttm_tt_fini()
|
D | ttm_pool.c | 396 if (tt->dma_address) in ttm_pool_free_range() 397 ttm_pool_unmap(pool, tt->dma_address[i], nr); in ttm_pool_free_range() 423 dma_addr_t *dma_addr = tt->dma_address; in ttm_pool_alloc()
|
/drivers/xen/ |
D | swiotlb-xen.c | 484 xen_swiotlb_unmap_page(hwdev, sg->dma_address, sg_dma_len(sg), in xen_swiotlb_unmap_sg() 499 sg->dma_address = xen_swiotlb_map_page(dev, sg_page(sg), in xen_swiotlb_map_sg() 501 if (sg->dma_address == DMA_MAPPING_ERROR) in xen_swiotlb_map_sg() 521 xen_swiotlb_sync_single_for_cpu(dev, sg->dma_address, in xen_swiotlb_sync_sg_for_cpu() 534 xen_swiotlb_sync_single_for_device(dev, sg->dma_address, in xen_swiotlb_sync_sg_for_device()
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ttm.c | 248 dma_addr = &bo->ttm->dma_address[mm_cur->start >> PAGE_SHIFT]; in amdgpu_ttm_map_buffer() 254 dma_addr_t dma_address; in amdgpu_ttm_map_buffer() local 256 dma_address = mm_cur->start; in amdgpu_ttm_map_buffer() 257 dma_address += adev->vm_manager.vram_base_offset; in amdgpu_ttm_map_buffer() 261 &dma_address, flags, cpu_addr); in amdgpu_ttm_map_buffer() 265 dma_address += PAGE_SIZE; in amdgpu_ttm_map_buffer() 781 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in amdgpu_ttm_tt_pin_userptr() 843 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind() 857 &(gtt->ttm.dma_address[page_idx]), flags); in amdgpu_ttm_gart_bind() 860 gtt->ttm.dma_address, flags); in amdgpu_ttm_gart_bind() [all …]
|
D | amdgpu_gmc.c | 106 *addr = bo->tbo.ttm->dma_address[0]; in amdgpu_gmc_get_pde_for_bo() 188 if (bo->ttm->dma_address[0] + PAGE_SIZE >= adev->gmc.agp_size) in amdgpu_gmc_agp_addr() 191 return adev->gmc.agp_start + bo->ttm->dma_address[0]; in amdgpu_gmc_agp_addr()
|
D | amdgpu_vram_mgr.c | 597 dma_unmap_resource(dev, sg->dma_address, in amdgpu_vram_mgr_alloc_sgt() 625 dma_unmap_resource(dev, sg->dma_address, in amdgpu_vram_mgr_free_sgt()
|
/drivers/hid/amd-sfh-hid/ |
D | amd_sfh_pcie.c | 62 writeq(info.dma_address, privdata->mmio + AMD_C2P_MSG1); in amd_start_sensor_v2() 151 writeq(info.dma_address, privdata->mmio + AMD_C2P_MSG2); in amd_start_sensor() 374 info.dma_address = cl_data->sensor_dma_addr[i]; in amd_mp2_pci_resume()
|
D | amd_sfh_pcie.h | 111 dma_addr_t dma_address; member
|
D | amd_sfh_client.c | 204 info.dma_address = cl_data->sensor_dma_addr[i]; in amd_sfh_hid_client_init()
|
/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_ttm_buffer.c | 239 viter->dma_address = &__vmw_piter_dma_addr; in vmw_piter_start() 245 viter->dma_address = &__vmw_piter_sg_addr; in vmw_piter_start() 321 vsgt->addrs = vmw_tt->dma_ttm.dma_address; in vmw_ttm_map_dma()
|
/drivers/mmc/host/ |
D | wmt-sdmmc.c | 573 u32 dma_address; in wmt_mci_request() local 633 dma_address = priv->dma_desc_device_addr + 16; in wmt_mci_request() 641 dma_address, 0); in wmt_mci_request() 645 dma_address += 16; in wmt_mci_request()
|
/drivers/dma/ |
D | imx-dma.c | 273 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next() 276 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next() 819 if (sg_dma_len(sgl) & 3 || sgl->dma_address & 3) in imxdma_prep_slave_sg() 823 if (sg_dma_len(sgl) & 1 || sgl->dma_address & 1) in imxdma_prep_slave_sg() 880 imxdmac->sg_list[i].dma_address = dma_addr; in imxdma_prep_dma_cyclic()
|
D | imx-sdma.c | 1524 bd->buffer_addr = sg->dma_address; in sdma_prep_slave_sg() 1543 if (count & 3 || sg->dma_address & 3) in sdma_prep_slave_sg() 1548 if (count & 1 || sg->dma_address & 1) in sdma_prep_slave_sg() 1567 i, count, (u64)sg->dma_address, in sdma_prep_slave_sg()
|
/drivers/scsi/lpfc/ |
D | lpfc_hw4.h | 63 struct dma_address { struct 1194 struct dma_address page[LPFC_MAX_EQ_PAGE]; 1267 struct dma_address lwpd; 1268 struct dma_address buff_fwlog[LPFC_MAX_FWLOG_PAGE]; 1318 struct dma_address page[LPFC_MAX_CQ_PAGE]; 1428 struct dma_address page[1]; 1478 struct dma_address page[LPFC_MAX_WQ_PAGE_V0]; 1512 struct dma_address page[LPFC_MAX_WQ_PAGE-1]; 1636 struct dma_address page[LPFC_MAX_RQ_PAGE]; 1688 struct dma_address page[1]; [all …]
|
D | lpfc_sli4.h | 427 struct dma_address dma_address; member
|
/drivers/atm/ |
D | nicstar.h | 309 u32 dma_address; member 346 u32 dma_address; member
|
D | idt77252.h | 236 u32 dma_address; member
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_bo.c | 557 if (!ttm_dma || !ttm_dma->dma_address) in nouveau_bo_sync_for_device() 580 ttm_dma->dma_address[i], in nouveau_bo_sync_for_device() 593 if (!ttm_dma || !ttm_dma->dma_address) in nouveau_bo_sync_for_cpu() 616 dma_sync_single_for_cpu(drm->dev->dev, ttm_dma->dma_address[i], in nouveau_bo_sync_for_cpu() 1270 drm_prime_sg_to_dma_addr_array(ttm->sg, ttm_dma->dma_address, in nouveau_ttm_tt_populate()
|
D | nouveau_mem.c | 114 args.dma = tt->dma_address; in nouveau_mem_host()
|
/drivers/net/fddi/skfp/ |
D | skfddi.c | 1112 dma_addr_t dma_address; in send_queued_packets() local 1177 dma_address = dma_map_single(&(&bp->pdev)->dev, skb->data, in send_queued_packets() 1181 txd->txd_os.dma_addr = dma_address; // save dma mapping in send_queued_packets() 1183 hwm_tx_frag(smc, skb->data, dma_address, skb->len, in send_queued_packets() 1187 dma_unmap_single(&(&bp->pdev)->dev, dma_address, in send_queued_packets()
|
/drivers/gpu/drm/radeon/ |
D | radeon_ttm.c | 386 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in radeon_ttm_tt_pin_userptr() 461 ttm->pages, gtt->ttm.dma_address, flags); in radeon_ttm_backend_bind() 563 drm_prime_sg_to_dma_addr_array(ttm->sg, gtt->ttm.dma_address, in radeon_ttm_tt_populate()
|
/drivers/usb/serial/ |
D | io_ti.c | 91 u16 dma_address; member 544 status = read_ram(port->port->serial->dev, port->dma_address, in tx_active() 2559 edge_port->dma_address = UMPD_OEDB1_ADDRESS; in edge_port_probe() 2563 edge_port->dma_address = UMPD_OEDB2_ADDRESS; in edge_port_probe() 2574 edge_port->dma_address); in edge_port_probe()
|
/drivers/iommu/ |
D | dma-iommu.c | 709 sgt->sgl->dma_address = iova; in __iommu_dma_alloc_noncontiguous() 734 *dma_handle = sgt.sgl->dma_address; in iommu_dma_alloc_remap() 773 __iommu_dma_unmap(dev, sgt->sgl->dma_address, size); in iommu_dma_free_noncontiguous()
|
/drivers/crypto/ |
D | sahara.c | 503 dev->hw_link[i]->p = sg->dma_address; in sahara_hw_descriptor_create() 519 dev->hw_link[j]->p = sg->dma_address; in sahara_hw_descriptor_create() 805 dev->hw_link[i]->p = sg->dma_address; in sahara_sha_hw_links_create()
|
/drivers/scsi/bnx2fc/ |
D | bnx2fc_hwi.c | 1995 dma_addr_t dma_address; in bnx2fc_free_hash_table() local 1997 dma_address = le32_to_cpu(*pbl); in bnx2fc_free_hash_table() 1999 dma_address += ((u64)le32_to_cpu(*pbl)) << 32; in bnx2fc_free_hash_table() 2004 dma_address); in bnx2fc_free_hash_table()
|