/drivers/media/common/videobuf2/ |
D | videobuf2-vmalloc.c | 26 void *vaddr; member 48 buf->vaddr = vmalloc_user(buf->size); in vb2_vmalloc_alloc() 49 if (!buf->vaddr) { in vb2_vmalloc_alloc() 69 vfree(buf->vaddr); in vb2_vmalloc_put() 74 static void *vb2_vmalloc_get_userptr(struct device *dev, unsigned long vaddr, in vb2_vmalloc_get_userptr() argument 88 offset = vaddr & ~PAGE_MASK; in vb2_vmalloc_get_userptr() 90 vec = vb2_create_framevec(vaddr, size); in vb2_vmalloc_get_userptr() 107 buf->vaddr = (__force void *) in vb2_vmalloc_get_userptr() 110 buf->vaddr = vm_map_ram(frame_vector_pages(vec), n_pages, -1, in vb2_vmalloc_get_userptr() 114 if (!buf->vaddr) in vb2_vmalloc_get_userptr() [all …]
|
D | videobuf2-dma-sg.c | 36 void *vaddr; member 115 buf->vaddr = NULL; in vb2_dma_sg_alloc() 185 if (buf->vaddr) in vb2_dma_sg_put() 186 vm_unmap_ram(buf->vaddr, buf->num_pages); in vb2_dma_sg_put() 221 static void *vb2_dma_sg_get_userptr(struct device *dev, unsigned long vaddr, in vb2_dma_sg_get_userptr() argument 236 buf->vaddr = NULL; in vb2_dma_sg_get_userptr() 239 buf->offset = vaddr & ~PAGE_MASK; in vb2_dma_sg_get_userptr() 242 vec = vb2_create_framevec(vaddr, size); in vb2_dma_sg_get_userptr() 291 if (buf->vaddr) in vb2_dma_sg_put_userptr() 292 vm_unmap_ram(buf->vaddr, buf->num_pages); in vb2_dma_sg_put_userptr() [all …]
|
D | videobuf2-dma-contig.c | 27 void *vaddr; member 80 if (!buf->vaddr && buf->db_attach) in vb2_dc_vaddr() 81 buf->vaddr = dma_buf_vmap(buf->db_attach->dmabuf); in vb2_dc_vaddr() 83 return buf->vaddr; in vb2_dc_vaddr() 163 buf->vaddr = buf->cookie; in vb2_dc_alloc() 342 return buf->vaddr ? buf->vaddr + pgnum * PAGE_SIZE : NULL; in vb2_dc_dmabuf_ops_kmap() 349 return buf->vaddr; in vb2_dc_dmabuf_ops_vmap() 453 static void *vb2_dc_get_userptr(struct device *dev, unsigned long vaddr, in vb2_dc_get_userptr() argument 466 if (!IS_ALIGNED(vaddr | size, dma_align)) { in vb2_dc_get_userptr() 486 offset = lower_32_bits(offset_in_page(vaddr)); in vb2_dc_get_userptr() [all …]
|
/drivers/net/ethernet/freescale/fman/ |
D | fman_muram.c | 47 unsigned long vaddr) in fman_muram_vbase_to_offset() argument 49 return vaddr - (unsigned long)muram->vbase; in fman_muram_vbase_to_offset() 68 void __iomem *vaddr; in fman_muram_init() local 81 vaddr = ioremap(base, size); in fman_muram_init() 82 if (!vaddr) { in fman_muram_init() 87 ret = gen_pool_add_virt(muram->pool, (unsigned long)vaddr, in fman_muram_init() 91 iounmap(vaddr); in fman_muram_init() 95 memset_io(vaddr, 0, (int)size); in fman_muram_init() 97 muram->vbase = vaddr; in fman_muram_init() 134 unsigned long vaddr; in fman_muram_alloc() local [all …]
|
/drivers/gpu/drm/ |
D | drm_format_helper.c | 36 void drm_fb_memcpy(void *dst, void *vaddr, struct drm_framebuffer *fb, in drm_fb_memcpy() argument 43 vaddr += clip_offset(clip, fb->pitches[0], cpp); in drm_fb_memcpy() 45 memcpy(dst, vaddr, len); in drm_fb_memcpy() 46 vaddr += fb->pitches[0]; in drm_fb_memcpy() 62 void drm_fb_memcpy_dstclip(void __iomem *dst, void *vaddr, in drm_fb_memcpy_dstclip() argument 71 vaddr += offset; in drm_fb_memcpy_dstclip() 74 memcpy_toio(dst, vaddr, len); in drm_fb_memcpy_dstclip() 75 vaddr += fb->pitches[0]; in drm_fb_memcpy_dstclip() 88 void drm_fb_swab16(u16 *dst, void *vaddr, struct drm_framebuffer *fb, in drm_fb_swab16() argument 104 src = vaddr + (y * fb->pitches[0]); in drm_fb_swab16() [all …]
|
D | drm_gem_cma_helper.c | 105 cma_obj->vaddr = dma_alloc_wc(drm->dev, size, &cma_obj->paddr, in drm_gem_cma_create() 107 if (!cma_obj->vaddr) { in drm_gem_cma_create() 183 if (cma_obj->vaddr) in drm_gem_cma_free_object() 184 dma_buf_vunmap(gem_obj->import_attach->dmabuf, cma_obj->vaddr); in drm_gem_cma_free_object() 186 } else if (cma_obj->vaddr) { in drm_gem_cma_free_object() 188 cma_obj->vaddr, cma_obj->paddr); in drm_gem_cma_free_object() 282 ret = dma_mmap_wc(cma_obj->base.dev->dev, vma, cma_obj->vaddr, in drm_gem_cma_mmap_obj() 391 return cma_obj->vaddr ? (unsigned long)cma_obj->vaddr : -EINVAL; in drm_gem_cma_get_unmapped_area() 411 drm_printf_indent(p, indent, "vaddr=%p\n", cma_obj->vaddr); in drm_gem_cma_print_info() 437 ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, in drm_gem_cma_prime_get_sg_table() [all …]
|
D | drm_client.c | 237 drm_gem_vunmap(buffer->gem, buffer->vaddr); in drm_client_buffer_delete() 307 void *vaddr; in drm_client_buffer_vmap() local 309 if (buffer->vaddr) in drm_client_buffer_vmap() 310 return buffer->vaddr; in drm_client_buffer_vmap() 320 vaddr = drm_gem_vmap(buffer->gem); in drm_client_buffer_vmap() 321 if (IS_ERR(vaddr)) in drm_client_buffer_vmap() 322 return vaddr; in drm_client_buffer_vmap() 324 buffer->vaddr = vaddr; in drm_client_buffer_vmap() 326 return vaddr; in drm_client_buffer_vmap() 340 drm_gem_vunmap(buffer->gem, buffer->vaddr); in drm_client_buffer_vunmap() [all …]
|
/drivers/misc/sgi-gru/ |
D | grufault.c | 48 struct vm_area_struct *gru_find_vma(unsigned long vaddr) in gru_find_vma() argument 52 vma = find_vma(current->mm, vaddr); in gru_find_vma() 53 if (vma && vma->vm_start <= vaddr && vma->vm_ops == &gru_vm_ops) in gru_find_vma() 66 static struct gru_thread_state *gru_find_lock_gts(unsigned long vaddr) in gru_find_lock_gts() argument 73 vma = gru_find_vma(vaddr); in gru_find_lock_gts() 75 gts = gru_find_thread_state(vma, TSID(vaddr, vma)); in gru_find_lock_gts() 83 static struct gru_thread_state *gru_alloc_locked_gts(unsigned long vaddr) in gru_alloc_locked_gts() argument 90 vma = gru_find_vma(vaddr); in gru_alloc_locked_gts() 94 gts = gru_alloc_thread_state(vma, TSID(vaddr, vma)); in gru_alloc_locked_gts() 178 unsigned long vaddr, int write, in non_atomic_pte_lookup() argument [all …]
|
/drivers/gpu/drm/i915/gem/selftests/ |
D | i915_gem_object_blt.c | 22 u32 *vaddr; in igt_fill_blt() local 52 vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); in igt_fill_blt() 53 if (IS_ERR(vaddr)) { in igt_fill_blt() 54 err = PTR_ERR(vaddr); in igt_fill_blt() 62 memset32(vaddr, val ^ 0xdeadbeaf, in igt_fill_blt() 81 if (vaddr[i] != val) { in igt_fill_blt() 83 vaddr[i], val); in igt_fill_blt() 113 u32 *vaddr; in igt_copy_blt() local 137 vaddr = i915_gem_object_pin_map(src, I915_MAP_WB); in igt_copy_blt() 138 if (IS_ERR(vaddr)) { in igt_copy_blt() [all …]
|
D | i915_gem_client_blt.c | 22 u32 *vaddr; in igt_client_fill() local 46 vaddr = i915_gem_object_pin_map(obj, I915_MAP_WB); in igt_client_fill() 47 if (IS_ERR(vaddr)) { in igt_client_fill() 48 err = PTR_ERR(vaddr); in igt_client_fill() 62 memset32(vaddr, val ^ 0xdeadbeaf, in igt_client_fill() 81 if (vaddr[i] != val) { in igt_client_fill() 83 vaddr[i], val); in igt_client_fill()
|
/drivers/scsi/ |
D | hpsa.h | 171 void __iomem *vaddr; member 422 writel(c->busaddr, h->vaddr + SA5_REQUEST_PORT_OFFSET); in SA5_submit_command() 423 (void) readl(h->vaddr + SA5_SCRATCHPAD_OFFSET); in SA5_submit_command() 429 writel(c->busaddr, h->vaddr + SA5_REQUEST_PORT_OFFSET); in SA5_submit_command_no_read() 435 writel(c->busaddr, h->vaddr + SA5_REQUEST_PORT_OFFSET); in SA5_submit_command_ioaccel2() 447 writel(0, h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5_intr_mask() 448 (void) readl(h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5_intr_mask() 452 h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5_intr_mask() 453 (void) readl(h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5_intr_mask() 464 writel(0, h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5B_intr_mask() [all …]
|
D | sun3_scsi.c | 389 unsigned char *vaddr; in sun3scsi_dma_finish() local 391 vaddr = (unsigned char *)dvma_vmetov(sun3_dma_orig_addr); in sun3scsi_dma_finish() 393 vaddr += (sun3_dma_orig_count - fifo); in sun3scsi_dma_finish() 394 vaddr--; in sun3scsi_dma_finish() 398 *vaddr = (dregs->bpack_lo & 0xff00) >> 8; in sun3scsi_dma_finish() 399 vaddr--; in sun3scsi_dma_finish() 403 *vaddr = (dregs->bpack_hi & 0x00ff); in sun3scsi_dma_finish() 404 vaddr--; in sun3scsi_dma_finish() 408 *vaddr = (dregs->bpack_hi & 0xff00) >> 8; in sun3scsi_dma_finish() 440 unsigned char *vaddr; in sun3scsi_dma_finish() local [all …]
|
/drivers/media/v4l2-core/ |
D | videobuf-vmalloc.c | 102 __func__, i, mem->vaddr); in videobuf_vm_close() 104 vfree(mem->vaddr); in videobuf_vm_close() 105 mem->vaddr = NULL; in videobuf_vm_close() 170 if (!mem->vaddr) { in __videobuf_iolock() 189 mem->vaddr = vmalloc_user(pages); in __videobuf_iolock() 190 if (!mem->vaddr) { in __videobuf_iolock() 195 mem->vaddr, pages); in __videobuf_iolock() 234 mem->vaddr = vmalloc_user(pages); in __videobuf_mmap_mapper() 235 if (!mem->vaddr) { in __videobuf_mmap_mapper() 239 dprintk(1, "vmalloc is at addr %p (%d pages)\n", mem->vaddr, pages); in __videobuf_mmap_mapper() [all …]
|
D | videobuf-dma-contig.c | 25 void *vaddr; member 42 mem->vaddr = dma_alloc_coherent(dev, mem->size, in __videobuf_dc_alloc() 45 if (!mem->vaddr) { in __videobuf_dc_alloc() 50 dev_dbg(dev, "dma mapped data is at %p (%ld)\n", mem->vaddr, mem->size); in __videobuf_dc_alloc() 58 dma_free_coherent(dev, mem->size, mem->vaddr, mem->dma_handle); in __videobuf_dc_free() 60 mem->vaddr = NULL; in __videobuf_dc_free() 114 i, mem->vaddr); in videobuf_vm_close() 117 mem->vaddr = NULL; in videobuf_vm_close() 231 return mem->vaddr; in __videobuf_to_vaddr() 248 if (!mem->vaddr) { in __videobuf_iolock() [all …]
|
/drivers/xen/xenbus/ |
D | xenbus_client.c | 78 void **vaddr); 79 int (*unmap)(struct xenbus_device *dev, void *vaddr); 359 int xenbus_grant_ring(struct xenbus_device *dev, void *vaddr, in xenbus_grant_ring() argument 367 virt_to_gfn(vaddr), 0); in xenbus_grant_ring() 375 vaddr = vaddr + XEN_PAGE_SIZE; in xenbus_grant_ring() 449 unsigned int nr_grefs, void **vaddr) in xenbus_map_ring_valloc() argument 451 return ring_ops->map(dev, gnt_refs, nr_grefs, vaddr); in xenbus_map_ring_valloc() 535 unsigned long vaddr = (unsigned long)gfn_to_virt(gfn); in xenbus_map_ring_setup_grant_hvm() local 537 info->phys_addrs[info->idx] = vaddr; in xenbus_map_ring_setup_grant_hvm() 538 info->addrs[info->idx] = vaddr; in xenbus_map_ring_setup_grant_hvm() [all …]
|
/drivers/gpu/drm/i915/gt/ |
D | intel_timeline.c | 29 void *vaddr; member 133 __idle_hwsp_free(cl->hwsp, ptr_unmask_bits(cl->vaddr, CACHELINE_BITS)); in __idle_cacheline_free() 145 if (ptr_test_bit(cl->vaddr, CACHELINE_FREE)) in __cacheline_retire() 162 void *vaddr; in cacheline_alloc() local 170 vaddr = i915_gem_object_pin_map(hwsp->vma->obj, I915_MAP_WB); in cacheline_alloc() 171 if (IS_ERR(vaddr)) { in cacheline_alloc() 173 return ERR_CAST(vaddr); in cacheline_alloc() 178 cl->vaddr = page_pack_bits(vaddr, cacheline); in cacheline_alloc() 200 GEM_BUG_ON(ptr_test_bit(cl->vaddr, CACHELINE_FREE)); in cacheline_free() 201 cl->vaddr = ptr_set_bit(cl->vaddr, CACHELINE_FREE); in cacheline_free() [all …]
|
/drivers/gpu/drm/i915/gem/ |
D | i915_gem_phys.c | 27 char *vaddr; in i915_gem_object_get_pages_phys() local 44 vaddr = phys->vaddr; in i915_gem_object_get_pages_phys() 56 memcpy(vaddr, src, PAGE_SIZE); in i915_gem_object_get_pages_phys() 57 drm_clflush_virt_range(vaddr, PAGE_SIZE); in i915_gem_object_get_pages_phys() 61 vaddr += PAGE_SIZE; in i915_gem_object_get_pages_phys() 105 char *vaddr = obj->phys_handle->vaddr; in i915_gem_object_put_pages_phys() local 117 drm_clflush_virt_range(vaddr, PAGE_SIZE); in i915_gem_object_put_pages_phys() 118 memcpy(dst, vaddr, PAGE_SIZE); in i915_gem_object_put_pages_phys() 125 vaddr += PAGE_SIZE; in i915_gem_object_put_pages_phys()
|
/drivers/misc/mei/ |
D | dma-ring.c | 30 if (dscr->vaddr) in mei_dmam_dscr_alloc() 33 dscr->vaddr = dmam_alloc_coherent(dev->dev, dscr->size, &dscr->daddr, in mei_dmam_dscr_alloc() 35 if (!dscr->vaddr) in mei_dmam_dscr_alloc() 50 if (!dscr->vaddr) in mei_dmam_dscr_free() 53 dmam_free_coherent(dev->dev, dscr->size, dscr->vaddr, dscr->daddr); in mei_dmam_dscr_free() 54 dscr->vaddr = NULL; in mei_dmam_dscr_free() 98 return !!dev->dr_dscr[DMA_DSCR_HOST].vaddr; in mei_dma_ring_is_allocated() 104 return (struct hbm_dma_ring_ctrl *)dev->dr_dscr[DMA_DSCR_CTRL].vaddr; in mei_dma_ring_ctrl() 131 unsigned char *dbuf = dev->dr_dscr[DMA_DSCR_DEVICE].vaddr; in mei_dma_copy_from() 151 unsigned char *hbuf = dev->dr_dscr[DMA_DSCR_HOST].vaddr; in mei_dma_copy_to()
|
/drivers/gpu/drm/tegra/ |
D | falcon.c | 61 u32 *firmware_vaddr = falcon->firmware.vaddr; in falcon_copy_firmware_image() 86 struct falcon_fw_bin_header_v1 *bin = (void *)falcon->firmware.vaddr; in falcon_parse_firmware_image() 107 os = falcon->firmware.vaddr + bin->os_header_offset; in falcon_parse_firmware_image() 139 falcon->firmware.vaddr = falcon->ops->alloc(falcon, firmware->size, in falcon_load_firmware() 141 if (IS_ERR(falcon->firmware.vaddr)) { in falcon_load_firmware() 143 return PTR_ERR(falcon->firmware.vaddr); in falcon_load_firmware() 163 falcon->firmware.paddr, falcon->firmware.vaddr); in falcon_load_firmware() 174 falcon->firmware.vaddr = NULL; in falcon_init() 186 if (falcon->firmware.vaddr) { in falcon_exit() 189 falcon->firmware.vaddr); in falcon_exit() [all …]
|
/drivers/staging/android/ion/ |
D | ion_buffer.c | 229 void *vaddr; in ion_buffer_kmap_get() local 233 return buffer->vaddr; in ion_buffer_kmap_get() 235 vaddr = ion_heap_map_kernel(buffer->heap, buffer); in ion_buffer_kmap_get() 236 if (WARN_ONCE(!vaddr, in ion_buffer_kmap_get() 239 if (IS_ERR(vaddr)) in ion_buffer_kmap_get() 240 return vaddr; in ion_buffer_kmap_get() 241 buffer->vaddr = vaddr; in ion_buffer_kmap_get() 243 return vaddr; in ion_buffer_kmap_get() 251 buffer->vaddr = NULL; in ion_buffer_kmap_put()
|
/drivers/gpu/drm/i915/selftests/ |
D | igt_spinner.c | 15 void *vaddr; in igt_spinner_init() local 36 vaddr = i915_gem_object_pin_map(spin->hws, I915_MAP_WB); in igt_spinner_init() 37 if (IS_ERR(vaddr)) { in igt_spinner_init() 38 err = PTR_ERR(vaddr); in igt_spinner_init() 41 spin->seqno = memset(vaddr, 0xff, PAGE_SIZE); in igt_spinner_init() 44 vaddr = i915_gem_object_pin_map(spin->obj, mode); in igt_spinner_init() 45 if (IS_ERR(vaddr)) { in igt_spinner_init() 46 err = PTR_ERR(vaddr); in igt_spinner_init() 49 spin->batch = vaddr; in igt_spinner_init()
|
/drivers/gpu/drm/vkms/ |
D | vkms_gem.c | 36 WARN_ON(gem->vaddr); in vkms_gem_free_object() 47 unsigned long vaddr = vmf->address; in vkms_gem_fault() local 52 page_offset = (vaddr - vma->vm_start) >> PAGE_SHIFT; in vkms_gem_fault() 171 WARN_ON(vkms_obj->vaddr); in vkms_gem_vunmap() 180 vunmap(vkms_obj->vaddr); in vkms_gem_vunmap() 181 vkms_obj->vaddr = NULL; in vkms_gem_vunmap() 196 if (!vkms_obj->vaddr) { in vkms_gem_vmap() 205 vkms_obj->vaddr = vmap(pages, n_pages, VM_MAP, PAGE_KERNEL); in vkms_gem_vmap() 206 if (!vkms_obj->vaddr) in vkms_gem_vmap()
|
/drivers/dma/dw-edma/ |
D | dw-edma-pcie.c | 133 dw->rg_region.vaddr = pcim_iomap_table(pdev)[pdata->rg_bar]; in dw_edma_pcie_probe() 134 dw->rg_region.vaddr += pdata->rg_off; in dw_edma_pcie_probe() 139 dw->ll_region.vaddr = pcim_iomap_table(pdev)[pdata->ll_bar]; in dw_edma_pcie_probe() 140 dw->ll_region.vaddr += pdata->ll_off; in dw_edma_pcie_probe() 145 dw->dt_region.vaddr = pcim_iomap_table(pdev)[pdata->dt_bar]; in dw_edma_pcie_probe() 146 dw->dt_region.vaddr += pdata->dt_off; in dw_edma_pcie_probe() 163 dw->rg_region.vaddr, &dw->rg_region.paddr); in dw_edma_pcie_probe() 167 dw->ll_region.vaddr, &dw->ll_region.paddr); in dw_edma_pcie_probe() 171 dw->dt_region.vaddr, &dw->dt_region.paddr); in dw_edma_pcie_probe()
|
/drivers/net/ethernet/freescale/dpaa2/ |
D | dpaa2-eth-trace.h | 98 void *vaddr, 107 TP_ARGS(netdev, vaddr, size, dma_addr, map_size, bpid), 115 __field(void *, vaddr) 127 __entry->vaddr = vaddr; 140 __entry->vaddr,
|
/drivers/i2c/busses/ |
D | i2c-ibm_iic.c | 82 volatile struct iic_regs __iomem *iic = dev->vaddr; in dump_iic_regs() 126 out_8(&dev->vaddr->intmsk, enable ? INTRMSK_EIMTC : 0); in iic_interrupt_mode() 134 volatile struct iic_regs __iomem *iic = dev->vaddr; in iic_dev_init() 179 volatile struct iic_regs __iomem *iic = dev->vaddr; in iic_dev_reset() 240 volatile struct iic_regs __iomem *iic = dev->vaddr; in iic_smbus_quick() 326 volatile struct iic_regs __iomem *iic = dev->vaddr; in iic_handler() 344 volatile struct iic_regs __iomem *iic = dev->vaddr; in iic_xfer_result() 377 volatile struct iic_regs __iomem *iic = dev->vaddr; in iic_abort_xfer() 409 volatile struct iic_regs __iomem *iic = dev->vaddr; in iic_wait_for_tc() 460 volatile struct iic_regs __iomem *iic = dev->vaddr; in iic_xfer_bytes() [all …]
|