/drivers/staging/android/ion/ |
D | ion_cma_heap.c | 37 void *cpu_addr; member 62 info->cpu_addr = dma_alloc_coherent(dev, len, &(info->handle), in ion_cma_allocate() 65 if (!info->cpu_addr) { in ion_cma_allocate() 74 if (dma_get_sgtable(dev, info->table, info->cpu_addr, info->handle, in ion_cma_allocate() 85 dma_free_coherent(dev, len, info->cpu_addr, info->handle); in ion_cma_allocate() 98 dma_free_coherent(dev, buffer->size, info->cpu_addr, info->handle); in ion_cma_free() 112 return dma_mmap_coherent(dev, vma, info->cpu_addr, info->handle, in ion_cma_mmap() 121 return info->cpu_addr; in ion_cma_map_kernel()
|
/drivers/base/ |
D | dma-mapping.c | 231 void *cpu_addr, dma_addr_t handle, size_t size) in dma_common_get_sgtable() argument 233 struct page *page = virt_to_page(cpu_addr); in dma_common_get_sgtable() 249 void *cpu_addr, dma_addr_t dma_addr, size_t size) in dma_common_mmap() argument 255 unsigned long pfn = page_to_pfn(virt_to_page(cpu_addr)); in dma_common_mmap() 260 if (dma_mmap_from_coherent(dev, vma, cpu_addr, size, &ret)) in dma_common_mmap() 331 void dma_common_free_remap(void *cpu_addr, size_t size, unsigned long vm_flags) in dma_common_free_remap() argument 333 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_free_remap() 336 WARN(1, "trying to free invalid coherent area: %p\n", cpu_addr); in dma_common_free_remap() 340 unmap_kernel_range((unsigned long)cpu_addr, PAGE_ALIGN(size)); in dma_common_free_remap() 341 vunmap(cpu_addr); in dma_common_free_remap()
|
/drivers/media/common/saa7146/ |
D | saa7146_core.c | 427 dev->d_rps0.cpu_addr = pci_zalloc_consistent(pci, SAA7146_RPS_MEM, in saa7146_init_one() 429 if (!dev->d_rps0.cpu_addr) in saa7146_init_one() 432 dev->d_rps1.cpu_addr = pci_zalloc_consistent(pci, SAA7146_RPS_MEM, in saa7146_init_one() 434 if (!dev->d_rps1.cpu_addr) in saa7146_init_one() 437 dev->d_i2c.cpu_addr = pci_zalloc_consistent(pci, SAA7146_RPS_MEM, in saa7146_init_one() 439 if (!dev->d_i2c.cpu_addr) in saa7146_init_one() 486 pci_free_consistent(pci, SAA7146_RPS_MEM, dev->d_i2c.cpu_addr, in saa7146_init_one() 489 pci_free_consistent(pci, SAA7146_RPS_MEM, dev->d_rps1.cpu_addr, in saa7146_init_one() 492 pci_free_consistent(pci, SAA7146_RPS_MEM, dev->d_rps0.cpu_addr, in saa7146_init_one() 515 { dev->d_i2c.cpu_addr, dev->d_i2c.dma_handle }, in saa7146_remove_one() [all …]
|
/drivers/net/wireless/ath/wcn36xx/ |
D | dxe.c | 183 wcn_ch->cpu_addr = dma_alloc_coherent(dev, size, &wcn_ch->dma_addr, in wcn36xx_dxe_init_descs() 185 if (!wcn_ch->cpu_addr) in wcn36xx_dxe_init_descs() 188 memset(wcn_ch->cpu_addr, 0, size); in wcn36xx_dxe_init_descs() 190 cur_dxe = (struct wcn36xx_dxe_desc *)wcn_ch->cpu_addr; in wcn36xx_dxe_init_descs() 540 void *cpu_addr; in wcn36xx_dxe_allocate_mem_pools() local 549 cpu_addr = dma_alloc_coherent(wcn->dev, s, &wcn->mgmt_mem_pool.phy_addr, in wcn36xx_dxe_allocate_mem_pools() 551 if (!cpu_addr) in wcn36xx_dxe_allocate_mem_pools() 554 wcn->mgmt_mem_pool.virt_addr = cpu_addr; in wcn36xx_dxe_allocate_mem_pools() 555 memset(cpu_addr, 0, s); in wcn36xx_dxe_allocate_mem_pools() 564 cpu_addr = dma_alloc_coherent(wcn->dev, s, &wcn->data_mem_pool.phy_addr, in wcn36xx_dxe_allocate_mem_pools() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_fence.c | 100 if (drv->cpu_addr) in amdgpu_fence_write() 101 *drv->cpu_addr = cpu_to_le32(seq); in amdgpu_fence_write() 117 if (drv->cpu_addr) in amdgpu_fence_read() 118 seq = le32_to_cpu(*drv->cpu_addr); in amdgpu_fence_read() 328 ring->fence_drv.cpu_addr = &adev->wb.wb[ring->fence_offs]; in amdgpu_fence_driver_start_ring() 333 ring->fence_drv.cpu_addr = adev->uvd.cpu_addr + index; in amdgpu_fence_driver_start_ring() 345 ring->fence_drv.gpu_addr, ring->fence_drv.cpu_addr); in amdgpu_fence_driver_start_ring() 369 ring->fence_drv.cpu_addr = NULL; in amdgpu_fence_driver_init_ring()
|
D | amdgpu_object.c | 245 u64 *gpu_addr, void **cpu_addr) in amdgpu_bo_create_kernel() argument 269 if (cpu_addr) { in amdgpu_bo_create_kernel() 270 r = amdgpu_bo_kmap(*bo_ptr, cpu_addr); in amdgpu_bo_create_kernel() 298 void **cpu_addr) in amdgpu_bo_free_kernel() argument 304 if (cpu_addr) in amdgpu_bo_free_kernel() 315 if (cpu_addr) in amdgpu_bo_free_kernel() 316 *cpu_addr = NULL; in amdgpu_bo_free_kernel()
|
D | amdgpu_object.h | 132 u64 *gpu_addr, void **cpu_addr); 134 void **cpu_addr);
|
/drivers/pci/host/ |
D | pci-xgene.c | 275 u64 cpu_addr, u64 pci_addr) in xgene_pcie_setup_ob_reg() argument 297 xgene_pcie_writel(port, offset, lower_32_bits(cpu_addr)); in xgene_pcie_setup_ob_reg() 298 xgene_pcie_writel(port, offset + 0x04, upper_32_bits(cpu_addr)); in xgene_pcie_setup_ob_reg() 400 u64 cpu_addr = range->cpu_addr; in xgene_pcie_setup_ib_reg() local 417 bar_low = pcie_bar_low_val((u32)cpu_addr, flags); in xgene_pcie_setup_ib_reg() 423 writel(upper_32_bits(cpu_addr), bar_addr + 0x4); in xgene_pcie_setup_ib_reg() 433 xgene_pcie_writel(port, IBAR3L + 0x4, upper_32_bits(cpu_addr)); in xgene_pcie_setup_ib_reg() 476 u64 end = range.cpu_addr + range.size - 1; in xgene_pcie_parse_map_dma_ranges() 479 range.flags, range.cpu_addr, end, range.pci_addr); in xgene_pcie_parse_map_dma_ranges()
|
D | pcie-designware.c | 194 int type, u64 cpu_addr, u64 pci_addr, u32 size) in dw_pcie_prog_outbound_atu() argument 200 lower_32_bits(cpu_addr)); in dw_pcie_prog_outbound_atu() 202 upper_32_bits(cpu_addr)); in dw_pcie_prog_outbound_atu() 204 lower_32_bits(cpu_addr + size - 1)); in dw_pcie_prog_outbound_atu() 217 lower_32_bits(cpu_addr)); in dw_pcie_prog_outbound_atu() 219 upper_32_bits(cpu_addr)); in dw_pcie_prog_outbound_atu() 221 lower_32_bits(cpu_addr + size - 1)); in dw_pcie_prog_outbound_atu() 684 u64 cpu_addr; in dw_pcie_rd_other_conf() local 695 cpu_addr = pp->cfg0_base; in dw_pcie_rd_other_conf() 700 cpu_addr = pp->cfg1_base; in dw_pcie_rd_other_conf() [all …]
|
D | pcie-rcar.c | 968 u64 cpu_addr = range->cpu_addr; in rcar_pcie_inbound_ranges() local 969 u64 cpu_end = range->cpu_addr + range->size; in rcar_pcie_inbound_ranges() 983 if (cpu_addr > 0) { in rcar_pcie_inbound_ranges() 984 unsigned long nr_zeros = __ffs64(cpu_addr); in rcar_pcie_inbound_ranges() 997 while (cpu_addr < cpu_end) { in rcar_pcie_inbound_ranges() 1004 rcar_pci_write_reg(pcie, lower_32_bits(cpu_addr), PCIELAR(idx)); in rcar_pcie_inbound_ranges() 1010 rcar_pci_write_reg(pcie, upper_32_bits(cpu_addr), in rcar_pcie_inbound_ranges() 1015 cpu_addr += size; in rcar_pcie_inbound_ranges() 1059 u64 end = range.cpu_addr + range.size - 1; in rcar_pcie_parse_map_dma_ranges() 1062 range.flags, range.cpu_addr, end, range.pci_addr); in rcar_pcie_parse_map_dma_ranges()
|
/drivers/infiniband/hw/hfi1/ |
D | dma.c | 68 static u64 hfi1_dma_map_single(struct ib_device *dev, void *cpu_addr, in hfi1_dma_map_single() argument 74 return (u64)cpu_addr; in hfi1_dma_map_single() 166 void *cpu_addr, u64 dma_handle) in hfi1_dma_free_coherent() argument 168 free_pages((unsigned long)cpu_addr, get_order(size)); in hfi1_dma_free_coherent()
|
/drivers/infiniband/hw/qib/ |
D | qib_dma.c | 53 static u64 qib_dma_map_single(struct ib_device *dev, void *cpu_addr, in qib_dma_map_single() argument 57 return (u64) cpu_addr; in qib_dma_map_single() 152 void *cpu_addr, u64 dma_handle) in qib_dma_free_coherent() argument 154 free_pages((unsigned long) cpu_addr, get_order(size)); in qib_dma_free_coherent()
|
/drivers/infiniband/sw/rdmavt/ |
D | dma.c | 69 static u64 rvt_dma_map_single(struct ib_device *dev, void *cpu_addr, in rvt_dma_map_single() argument 75 return (u64)cpu_addr; in rvt_dma_map_single() 179 void *cpu_addr, u64 dma_handle) in rvt_dma_free_coherent() argument 181 free_pages((unsigned long)cpu_addr, get_order(size)); in rvt_dma_free_coherent()
|
/drivers/remoteproc/ |
D | wkup_m3_rproc.c | 43 void __iomem *cpu_addr; member 106 va = (__force void *)(wkupm3->mem[i].cpu_addr + offset); in wkup_m3_rproc_da_to_va() 179 wkupm3->mem[i].cpu_addr = devm_ioremap_resource(dev, res); in wkup_m3_rproc_probe() 180 if (IS_ERR(wkupm3->mem[i].cpu_addr)) { in wkup_m3_rproc_probe() 183 ret = PTR_ERR(wkupm3->mem[i].cpu_addr); in wkup_m3_rproc_probe()
|
/drivers/infiniband/sw/rxe/ |
D | rxe_dma.c | 45 void *cpu_addr, size_t size, in rxe_dma_map_single() argument 49 return (uintptr_t)cpu_addr; in rxe_dma_map_single() 164 void *cpu_addr, u64 dma_handle) in rxe_dma_free_coherent() argument 166 free_pages((unsigned long)cpu_addr, get_order(size)); in rxe_dma_free_coherent()
|
/drivers/isdn/hardware/eicon/ |
D | divasmain.c | 358 void *cpu_addr; in diva_init_dma_map() local 362 if (!(cpu_addr = diva_pci_alloc_consistent(pdev, in diva_init_dma_map() 369 diva_init_dma_map_entry(pmap, i, cpu_addr, in diva_init_dma_map() 373 i, (unsigned long) cpu_addr, in diva_init_dma_map() 390 void *cpu_addr; in diva_free_dma_map() local 395 diva_get_dma_map_entry(pmap, i, &cpu_addr, &phys_addr); in diva_free_dma_map() 396 if (!cpu_addr) { in diva_free_dma_map() 404 (unsigned long) cpu_addr, (dword) dma_handle, in diva_free_dma_map()
|
/drivers/gpu/drm/radeon/ |
D | radeon_fence.c | 66 if (drv->cpu_addr) { in radeon_fence_write() 67 *drv->cpu_addr = cpu_to_le32(seq); in radeon_fence_write() 89 if (drv->cpu_addr) { in radeon_fence_read() 90 seq = le32_to_cpu(*drv->cpu_addr); in radeon_fence_read() 835 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring() 842 rdev->fence_drv[ring].cpu_addr = rdev->uvd.cpu_addr + index; in radeon_fence_driver_start_ring() 855 rdev->fence_drv[ring].cpu_addr = &rdev->wb.wb[index/4]; in radeon_fence_driver_start_ring() 861 ring, rdev->fence_drv[ring].gpu_addr, rdev->fence_drv[ring].cpu_addr); in radeon_fence_driver_start_ring() 880 rdev->fence_drv[ring].cpu_addr = NULL; in radeon_fence_driver_init_ring()
|
D | radeon_vce.c | 223 void *cpu_addr; in radeon_vce_resume() local 235 r = radeon_bo_kmap(rdev->vce.vcpu_bo, &cpu_addr); in radeon_vce_resume() 242 memset(cpu_addr, 0, radeon_bo_size(rdev->vce.vcpu_bo)); in radeon_vce_resume() 244 r = vce_v1_0_load_fw(rdev, cpu_addr); in radeon_vce_resume() 246 memcpy(cpu_addr, rdev->vce_fw->data, rdev->vce_fw->size); in radeon_vce_resume()
|
/drivers/of/ |
D | address.c | 269 range->cpu_addr = of_translate_address(parser->node, in of_pci_range_parser_one() 278 u64 pci_addr, cpu_addr, size; in of_pci_range_parser_one() local 283 cpu_addr = of_translate_address(parser->node, in of_pci_range_parser_one() 290 cpu_addr != range->cpu_addr + range->size) in of_pci_range_parser_one() 326 err = pci_register_io_range(range->cpu_addr, range->size); in of_pci_range_to_resource() 329 port = pci_address_to_pio(range->cpu_addr); in of_pci_range_to_resource() 337 upper_32_bits(range->cpu_addr)) { in of_pci_range_to_resource() 342 res->start = range->cpu_addr; in of_pci_range_to_resource()
|
D | of_pci.c | 216 range.cpu_addr, range.cpu_addr + range.size - 1, in of_pci_get_host_bridge_resources() 223 if (range.cpu_addr == OF_BAD_ADDR || range.size == 0) in of_pci_get_host_bridge_resources() 248 *io_base = range.cpu_addr; in of_pci_get_host_bridge_resources()
|
/drivers/media/pci/smipcie/ |
D | smipcie-main.c | 323 port->cpu_addr[0], (finishedData / 188)); in smi_dma_xfer() 346 port->cpu_addr[1], (finishedData / 188)); in smi_dma_xfer() 362 if (port->cpu_addr[0]) { in smi_port_dma_free() 364 port->cpu_addr[0], port->dma_addr[0]); in smi_port_dma_free() 365 port->cpu_addr[0] = NULL; in smi_port_dma_free() 367 if (port->cpu_addr[1]) { in smi_port_dma_free() 369 port->cpu_addr[1], port->dma_addr[1]); in smi_port_dma_free() 370 port->cpu_addr[1] = NULL; in smi_port_dma_free() 410 port->cpu_addr[0] = pci_alloc_consistent(port->dev->pci_dev, in smi_port_init() 413 if (!port->cpu_addr[0]) { in smi_port_init() [all …]
|
/drivers/acpi/ |
D | pci_root.c | 737 resource_size_t cpu_addr = res->start; in acpi_pci_root_remap_iospace() local 738 resource_size_t pci_addr = cpu_addr - entry->offset; in acpi_pci_root_remap_iospace() 742 if (pci_register_io_range(cpu_addr, length)) in acpi_pci_root_remap_iospace() 745 port = pci_address_to_pio(cpu_addr); in acpi_pci_root_remap_iospace() 753 if (pci_remap_iospace(res, cpu_addr) < 0) in acpi_pci_root_remap_iospace() 756 pr_info("Remapped I/O %pa to %pR\n", &cpu_addr, res); in acpi_pci_root_remap_iospace()
|
/drivers/scsi/ |
D | 3w-xxxx.c | 834 unsigned long *cpu_addr = NULL; in tw_allocate_memory() local 838 cpu_addr = pci_alloc_consistent(tw_dev->tw_pci_dev, size*TW_Q_LENGTH, &dma_handle); in tw_allocate_memory() 839 if (cpu_addr == NULL) { in tw_allocate_memory() 844 …if ((unsigned long)cpu_addr % (tw_dev->tw_pci_dev->device == TW_DEVICE_ID ? TW_ALIGNMENT_6000 : TW… in tw_allocate_memory() 846 pci_free_consistent(tw_dev->tw_pci_dev, size*TW_Q_LENGTH, cpu_addr, dma_handle); in tw_allocate_memory() 850 memset(cpu_addr, 0, size*TW_Q_LENGTH); in tw_allocate_memory() 856 …tw_dev->command_packet_virtual_address[i] = (unsigned long *)((unsigned char *)cpu_addr + (i*size)… in tw_allocate_memory() 860 tw_dev->alignment_virtual_address[i] = (unsigned long *)((unsigned char *)cpu_addr + (i*size)); in tw_allocate_memory() 881 unsigned long *cpu_addr; in tw_chrdev_ioctl() local 912 …cpu_addr = dma_alloc_coherent(&tw_dev->tw_pci_dev->dev, data_buffer_length_adjusted+sizeof(TW_New_… in tw_chrdev_ioctl() [all …]
|
D | 3w-sas.c | 650 unsigned long *cpu_addr; in twl_allocate_memory() local 653 cpu_addr = pci_zalloc_consistent(tw_dev->tw_pci_dev, size * TW_Q_LENGTH, in twl_allocate_memory() 655 if (!cpu_addr) { in twl_allocate_memory() 664 tw_dev->command_packet_virt[i] = (TW_Command_Full *)((unsigned char *)cpu_addr + (i*size)); in twl_allocate_memory() 668 tw_dev->generic_buffer_virt[i] = (unsigned long *)((unsigned char *)cpu_addr + (i*size)); in twl_allocate_memory() 672 … tw_dev->sense_buffer_virt[i] = (TW_Command_Apache_Header *)((unsigned char *)cpu_addr + (i*size)); in twl_allocate_memory() 722 unsigned long *cpu_addr, data_buffer_length_adjusted = 0, flags = 0; in twl_chrdev_ioctl() local 755 …cpu_addr = dma_alloc_coherent(&tw_dev->tw_pci_dev->dev, data_buffer_length_adjusted+sizeof(TW_Ioct… in twl_chrdev_ioctl() 756 if (!cpu_addr) { in twl_chrdev_ioctl() 761 tw_ioctl = (TW_Ioctl_Buf_Apache *)cpu_addr; in twl_chrdev_ioctl() [all …]
|
D | 3w-9xxx.c | 524 unsigned long *cpu_addr; in twa_allocate_memory() local 527 cpu_addr = pci_alloc_consistent(tw_dev->tw_pci_dev, size*TW_Q_LENGTH, &dma_handle); in twa_allocate_memory() 528 if (!cpu_addr) { in twa_allocate_memory() 533 if ((unsigned long)cpu_addr % (TW_ALIGNMENT_9000)) { in twa_allocate_memory() 535 pci_free_consistent(tw_dev->tw_pci_dev, size*TW_Q_LENGTH, cpu_addr, dma_handle); in twa_allocate_memory() 539 memset(cpu_addr, 0, size*TW_Q_LENGTH); in twa_allocate_memory() 545 tw_dev->command_packet_virt[i] = (TW_Command_Full *)((unsigned char *)cpu_addr + (i*size)); in twa_allocate_memory() 549 tw_dev->generic_buffer_virt[i] = (unsigned long *)((unsigned char *)cpu_addr + (i*size)); in twa_allocate_memory() 643 unsigned long *cpu_addr, data_buffer_length_adjusted = 0, flags = 0; in twa_chrdev_ioctl() local 682 …cpu_addr = dma_alloc_coherent(&tw_dev->tw_pci_dev->dev, data_buffer_length_adjusted+sizeof(TW_Ioct… in twa_chrdev_ioctl() [all …]
|