Home
last modified time | relevance | path

Searched refs:vaddr (Results 1 – 25 of 420) sorted by relevance

12345678910>>...17

/drivers/media/common/videobuf2/
Dvideobuf2-vmalloc.c26 void *vaddr; member
47 buf->vaddr = vmalloc_user(buf->size); in vb2_vmalloc_alloc()
48 if (!buf->vaddr) { in vb2_vmalloc_alloc()
68 vfree(buf->vaddr); in vb2_vmalloc_put()
74 unsigned long vaddr, unsigned long size) in vb2_vmalloc_get_userptr() argument
86 offset = vaddr & ~PAGE_MASK; in vb2_vmalloc_get_userptr()
88 vec = vb2_create_framevec(vaddr, size); in vb2_vmalloc_get_userptr()
105 buf->vaddr = (__force void *) in vb2_vmalloc_get_userptr()
108 buf->vaddr = vm_map_ram(frame_vector_pages(vec), n_pages, -1); in vb2_vmalloc_get_userptr()
111 if (!buf->vaddr) in vb2_vmalloc_get_userptr()
[all …]
Dvideobuf2-dma-contig.c28 void *vaddr; member
98 if (buf->vaddr) in vb2_dc_vaddr()
99 return buf->vaddr; in vb2_dc_vaddr()
105 buf->vaddr = map.vaddr; in vb2_dc_vaddr()
107 return buf->vaddr; in vb2_dc_vaddr()
111 buf->vaddr = dma_vmap_noncontiguous(buf->dev, buf->size, in vb2_dc_vaddr()
113 return buf->vaddr; in vb2_dc_vaddr()
136 if (buf->vaddr) in vb2_dc_prepare()
137 flush_kernel_vmap_range(buf->vaddr, buf->size); in vb2_dc_prepare()
156 if (buf->vaddr) in vb2_dc_finish()
[all …]
Dvideobuf2-dma-sg.c36 void *vaddr; member
116 buf->vaddr = NULL; in vb2_dma_sg_alloc()
190 if (buf->vaddr) in vb2_dma_sg_put()
191 vm_unmap_ram(buf->vaddr, buf->num_pages); in vb2_dma_sg_put()
224 unsigned long vaddr, unsigned long size) in vb2_dma_sg_get_userptr() argument
237 buf->vaddr = NULL; in vb2_dma_sg_get_userptr()
240 buf->offset = vaddr & ~PAGE_MASK; in vb2_dma_sg_get_userptr()
244 vec = vb2_create_framevec(vaddr, size); in vb2_dma_sg_get_userptr()
291 if (buf->vaddr) in vb2_dma_sg_put_userptr()
292 vm_unmap_ram(buf->vaddr, buf->num_pages); in vb2_dma_sg_put_userptr()
[all …]
/drivers/net/ethernet/freescale/fman/
Dfman_muram.c20 unsigned long vaddr) in fman_muram_vbase_to_offset() argument
22 return vaddr - (unsigned long)muram->vbase; in fman_muram_vbase_to_offset()
41 void __iomem *vaddr; in fman_muram_init() local
54 vaddr = ioremap(base, size); in fman_muram_init()
55 if (!vaddr) { in fman_muram_init()
60 ret = gen_pool_add_virt(muram->pool, (unsigned long)vaddr, in fman_muram_init()
64 iounmap(vaddr); in fman_muram_init()
68 memset_io(vaddr, 0, (int)size); in fman_muram_init()
70 muram->vbase = vaddr; in fman_muram_init()
107 unsigned long vaddr; in fman_muram_alloc() local
[all …]
/drivers/misc/sgi-gru/
Dgrufault.c48 struct vm_area_struct *gru_find_vma(unsigned long vaddr) in gru_find_vma() argument
52 vma = vma_lookup(current->mm, vaddr); in gru_find_vma()
66 static struct gru_thread_state *gru_find_lock_gts(unsigned long vaddr) in gru_find_lock_gts() argument
73 vma = gru_find_vma(vaddr); in gru_find_lock_gts()
75 gts = gru_find_thread_state(vma, TSID(vaddr, vma)); in gru_find_lock_gts()
83 static struct gru_thread_state *gru_alloc_locked_gts(unsigned long vaddr) in gru_alloc_locked_gts() argument
90 vma = gru_find_vma(vaddr); in gru_alloc_locked_gts()
94 gts = gru_alloc_thread_state(vma, TSID(vaddr, vma)); in gru_alloc_locked_gts()
178 unsigned long vaddr, int write, in non_atomic_pte_lookup() argument
188 if (get_user_pages(vaddr, 1, write ? FOLL_WRITE : 0, &page, NULL) <= 0) in non_atomic_pte_lookup()
[all …]
/drivers/scsi/
Dhpsa.h172 void __iomem *vaddr; member
423 writel(c->busaddr, h->vaddr + SA5_REQUEST_PORT_OFFSET); in SA5_submit_command()
424 (void) readl(h->vaddr + SA5_SCRATCHPAD_OFFSET); in SA5_submit_command()
430 writel(c->busaddr, h->vaddr + SA5_REQUEST_PORT_OFFSET); in SA5_submit_command_no_read()
436 writel(c->busaddr, h->vaddr + SA5_REQUEST_PORT_OFFSET); in SA5_submit_command_ioaccel2()
448 writel(0, h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5_intr_mask()
449 (void) readl(h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5_intr_mask()
453 h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5_intr_mask()
454 (void) readl(h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5_intr_mask()
465 writel(0, h->vaddr + SA5_REPLY_INTR_MASK_OFFSET); in SA5B_intr_mask()
[all …]
Dsun3_scsi.c390 unsigned char *vaddr; in sun3scsi_dma_finish() local
392 vaddr = (unsigned char *)dvma_vmetov(sun3_dma_orig_addr); in sun3scsi_dma_finish()
394 vaddr += (sun3_dma_orig_count - fifo); in sun3scsi_dma_finish()
395 vaddr--; in sun3scsi_dma_finish()
399 *vaddr = (dregs->bpack_lo & 0xff00) >> 8; in sun3scsi_dma_finish()
400 vaddr--; in sun3scsi_dma_finish()
404 *vaddr = (dregs->bpack_hi & 0x00ff); in sun3scsi_dma_finish()
405 vaddr--; in sun3scsi_dma_finish()
409 *vaddr = (dregs->bpack_hi & 0xff00) >> 8; in sun3scsi_dma_finish()
441 unsigned char *vaddr; in sun3scsi_dma_finish() local
[all …]
/drivers/dma-buf/heaps/
Dcma_heap.c40 void *vaddr; member
132 invalidate_kernel_vmap_range(buffer->vaddr, buffer->len); in cma_heap_dma_buf_begin_cpu_access()
153 flush_kernel_vmap_range(buffer->vaddr, buffer->len); in cma_heap_dma_buf_end_cpu_access()
198 void *vaddr; in cma_heap_do_vmap() local
200 vaddr = vmap(buffer->pages, buffer->pagecount, VM_MAP, PAGE_KERNEL); in cma_heap_do_vmap()
201 if (!vaddr) in cma_heap_do_vmap()
204 return vaddr; in cma_heap_do_vmap()
210 void *vaddr; in cma_heap_vmap() local
216 iosys_map_set_vaddr(map, buffer->vaddr); in cma_heap_vmap()
220 vaddr = cma_heap_do_vmap(buffer); in cma_heap_vmap()
[all …]
Dsystem_heap.c36 void *vaddr; member
178 invalidate_kernel_vmap_range(buffer->vaddr, buffer->len); in system_heap_dma_buf_begin_cpu_access()
201 flush_kernel_vmap_range(buffer->vaddr, buffer->len); in system_heap_dma_buf_end_cpu_access()
248 void *vaddr; in system_heap_do_vmap() local
261 vaddr = vmap(pages, npages, VM_MAP, pgprot); in system_heap_do_vmap()
264 if (!vaddr) in system_heap_do_vmap()
267 return vaddr; in system_heap_do_vmap()
273 void *vaddr; in system_heap_vmap() local
279 iosys_map_set_vaddr(map, buffer->vaddr); in system_heap_vmap()
283 vaddr = system_heap_do_vmap(buffer); in system_heap_vmap()
[all …]
/drivers/xen/xenbus/
Dxenbus_client.c91 void **vaddr);
92 int (*unmap)(struct xenbus_device *dev, void *vaddr);
379 int xenbus_setup_ring(struct xenbus_device *dev, gfp_t gfp, void **vaddr, in xenbus_setup_ring() argument
388 addr = *vaddr = alloc_pages_exact(ring_size, gfp | __GFP_ZERO); in xenbus_setup_ring()
389 if (!*vaddr) { in xenbus_setup_ring()
404 if (is_vmalloc_addr(*vaddr)) in xenbus_setup_ring()
419 if (*vaddr) in xenbus_setup_ring()
420 free_pages_exact(*vaddr, ring_size); in xenbus_setup_ring()
423 *vaddr = NULL; in xenbus_setup_ring()
438 void xenbus_teardown_ring(void **vaddr, unsigned int nr_pages, in xenbus_teardown_ring() argument
[all …]
/drivers/media/v4l2-core/
Dvideobuf-vmalloc.c102 __func__, i, mem->vaddr); in videobuf_vm_close()
104 vfree(mem->vaddr); in videobuf_vm_close()
105 mem->vaddr = NULL; in videobuf_vm_close()
170 if (!mem->vaddr) { in __videobuf_iolock()
189 mem->vaddr = vmalloc_user(pages); in __videobuf_iolock()
190 if (!mem->vaddr) { in __videobuf_iolock()
195 mem->vaddr, pages); in __videobuf_iolock()
234 mem->vaddr = vmalloc_user(pages); in __videobuf_mmap_mapper()
235 if (!mem->vaddr) { in __videobuf_mmap_mapper()
239 dprintk(1, "vmalloc is at addr %p (%d pages)\n", mem->vaddr, pages); in __videobuf_mmap_mapper()
[all …]
Dvideobuf-dma-contig.c25 void *vaddr; member
42 mem->vaddr = dma_alloc_coherent(dev, mem->size, &mem->dma_handle, in __videobuf_dc_alloc()
44 if (!mem->vaddr) { in __videobuf_dc_alloc()
49 dev_dbg(dev, "dma mapped data is at %p (%ld)\n", mem->vaddr, mem->size); in __videobuf_dc_alloc()
57 dma_free_coherent(dev, mem->size, mem->vaddr, mem->dma_handle); in __videobuf_dc_free()
59 mem->vaddr = NULL; in __videobuf_dc_free()
113 i, mem->vaddr); in videobuf_vm_close()
116 mem->vaddr = NULL; in videobuf_vm_close()
230 return mem->vaddr; in __videobuf_to_vaddr()
247 if (!mem->vaddr) { in __videobuf_iolock()
[all …]
/drivers/gpu/drm/i915/gt/
Dselftest_mocs.c20 void *vaddr; member
83 arg->vaddr = i915_gem_object_pin_map_unlocked(arg->scratch->obj, I915_MAP_WB); in live_mocs_init()
84 if (IS_ERR(arg->vaddr)) { in live_mocs_init()
85 err = PTR_ERR(arg->vaddr); in live_mocs_init()
160 u32 **vaddr) in check_mocs_table() argument
169 if (**vaddr != expect) { in check_mocs_table()
171 engine->name, i, **vaddr, expect); in check_mocs_table()
174 ++*vaddr; in check_mocs_table()
192 u32 **vaddr) in check_l3cc_table() argument
203 if (!mcr_range(engine->i915, reg) && **vaddr != expect) { in check_l3cc_table()
[all …]
Dshmem_utils.c59 void *vaddr; in shmem_pin_map() local
73 vaddr = vmap(pages, n_pages, VM_MAP_PUT_PAGES, PAGE_KERNEL); in shmem_pin_map()
74 if (!vaddr) in shmem_pin_map()
77 return vaddr; in shmem_pin_map()
101 void *vaddr; in __shmem_rw() local
108 vaddr = kmap(page); in __shmem_rw()
110 memcpy(vaddr + offset_in_page(off), ptr, this); in __shmem_rw()
113 memcpy(ptr, vaddr + offset_in_page(off), this); in __shmem_rw()
136 void *vaddr; in shmem_read_to_iosys_map() local
143 vaddr = kmap(page); in shmem_read_to_iosys_map()
[all …]
Dgen8_ppgtt.c241 u64 *vaddr; in __gen8_ppgtt_clear() local
258 vaddr = px_vaddr(pt); in __gen8_ppgtt_clear()
259 memset64(vaddr + pte, in __gen8_ppgtt_clear()
431 gen8_pte_t *vaddr; in gen8_ppgtt_insert_pte() local
434 vaddr = px_vaddr(i915_pt_entry(pd, gen8_pd_index(idx, 1))); in gen8_ppgtt_insert_pte()
437 vaddr[gen8_pd_index(idx, 0)] = pte_encode | iter->dma; in gen8_ppgtt_insert_pte()
460 drm_clflush_virt_range(vaddr, PAGE_SIZE); in gen8_ppgtt_insert_pte()
461 vaddr = px_vaddr(i915_pt_entry(pd, gen8_pd_index(idx, 1))); in gen8_ppgtt_insert_pte()
464 drm_clflush_virt_range(vaddr, PAGE_SIZE); in gen8_ppgtt_insert_pte()
491 gen8_pte_t *vaddr; in xehpsdv_ppgtt_insert_huge() local
[all …]
Dintel_gtt.c87 void *vaddr; in map_pt_dma() local
90 vaddr = i915_gem_object_pin_map_unlocked(obj, type); in map_pt_dma()
91 if (IS_ERR(vaddr)) in map_pt_dma()
92 return PTR_ERR(vaddr); in map_pt_dma()
101 void *vaddr; in map_pt_dma_locked() local
104 vaddr = i915_gem_object_pin_map(obj, type); in map_pt_dma_locked()
105 if (IS_ERR(vaddr)) in map_pt_dma_locked()
106 return PTR_ERR(vaddr); in map_pt_dma_locked()
310 void *vaddr = __px_vaddr(p); in fill_page_dma() local
312 memset64(vaddr, val, count); in fill_page_dma()
[all …]
/drivers/gpu/drm/i915/gem/
Di915_gem_phys.c27 void *vaddr; in i915_gem_object_get_pages_phys() local
39 vaddr = dma_alloc_coherent(obj->base.dev->dev, in i915_gem_object_get_pages_phys()
42 if (!vaddr) in i915_gem_object_get_pages_phys()
56 sg_assign_page(sg, (struct page *)vaddr); in i915_gem_object_get_pages_phys()
60 dst = vaddr; in i915_gem_object_get_pages_phys()
91 vaddr, dma); in i915_gem_object_get_pages_phys()
100 void *vaddr = sg_page(pages->sgl); in i915_gem_object_put_pages_phys() local
106 void *src = vaddr; in i915_gem_object_put_pages_phys()
137 vaddr, dma); in i915_gem_object_put_pages_phys()
143 void *vaddr = sg_page(obj->mm.pages->sgl) + args->offset; in i915_gem_object_pwrite_phys() local
[all …]
/drivers/gpu/drm/
Ddrm_gem_dma_helper.c147 dma_obj->vaddr = dma_alloc_noncoherent(drm->dev, size, in drm_gem_dma_create()
152 dma_obj->vaddr = dma_alloc_wc(drm->dev, size, in drm_gem_dma_create()
156 if (!dma_obj->vaddr) { in drm_gem_dma_create()
229 struct iosys_map map = IOSYS_MAP_INIT_VADDR(dma_obj->vaddr); in drm_gem_dma_free()
232 if (dma_obj->vaddr) in drm_gem_dma_free()
235 } else if (dma_obj->vaddr) { in drm_gem_dma_free()
238 dma_obj->vaddr, dma_obj->dma_addr, in drm_gem_dma_free()
242 dma_obj->vaddr, dma_obj->dma_addr); in drm_gem_dma_free()
389 return dma_obj->vaddr ? (unsigned long)dma_obj->vaddr : -EINVAL; in drm_gem_dma_get_unmapped_area()
406 drm_printf_indent(p, indent, "vaddr=%p\n", dma_obj->vaddr); in drm_gem_dma_print_info()
[all …]
Ddrm_format_helper.c46 const void *vaddr, const struct drm_framebuffer *fb, in __drm_fb_xfrm() argument
70 vaddr += clip_offset(clip, fb->pitches[0], fb->format->cpp[0]); in __drm_fb_xfrm()
74 sbuf = memcpy(stmp, vaddr, sbuf_len); in __drm_fb_xfrm()
76 sbuf = vaddr; in __drm_fb_xfrm()
78 vaddr += fb->pitches[0]; in __drm_fb_xfrm()
89 const void *vaddr, const struct drm_framebuffer *fb, in __drm_fb_xfrm_toio() argument
114 vaddr += clip_offset(clip, fb->pitches[0], fb->format->cpp[0]); in __drm_fb_xfrm_toio()
118 sbuf = memcpy(stmp, vaddr, sbuf_len); in __drm_fb_xfrm_toio()
120 sbuf = vaddr; in __drm_fb_xfrm_toio()
123 vaddr += fb->pitches[0]; in __drm_fb_xfrm_toio()
[all …]
/drivers/gpu/drm/i915/selftests/
Digt_spinner.c47 void *vaddr; in igt_spinner_pin_obj() local
58 vaddr = i915_gem_object_pin_map(obj, mode); in igt_spinner_pin_obj()
63 if (IS_ERR(vaddr)) in igt_spinner_pin_obj()
64 return vaddr; in igt_spinner_pin_obj()
76 return vaddr; in igt_spinner_pin_obj()
83 void *vaddr; in igt_spinner_pin() local
90 vaddr = igt_spinner_pin_obj(ce, ww, spin->hws, I915_MAP_WB, &spin->hws_vma); in igt_spinner_pin()
91 if (IS_ERR(vaddr)) in igt_spinner_pin()
92 return PTR_ERR(vaddr); in igt_spinner_pin()
94 spin->seqno = memset(vaddr, 0xff, PAGE_SIZE); in igt_spinner_pin()
[all …]
/drivers/dma/dw-edma/
Ddw-edma-pcie.c229 ll_region->vaddr = pcim_iomap_table(pdev)[ll_block->bar]; in dw_edma_pcie_probe()
230 if (!ll_region->vaddr) in dw_edma_pcie_probe()
233 ll_region->vaddr += ll_block->off; in dw_edma_pcie_probe()
238 dt_region->vaddr = pcim_iomap_table(pdev)[dt_block->bar]; in dw_edma_pcie_probe()
239 if (!dt_region->vaddr) in dw_edma_pcie_probe()
242 dt_region->vaddr += dt_block->off; in dw_edma_pcie_probe()
254 ll_region->vaddr = pcim_iomap_table(pdev)[ll_block->bar]; in dw_edma_pcie_probe()
255 if (!ll_region->vaddr) in dw_edma_pcie_probe()
258 ll_region->vaddr += ll_block->off; in dw_edma_pcie_probe()
263 dt_region->vaddr = pcim_iomap_table(pdev)[dt_block->bar]; in dw_edma_pcie_probe()
[all …]
/drivers/misc/mei/
Ddma-ring.c30 if (dscr->vaddr) in mei_dmam_dscr_alloc()
33 dscr->vaddr = dmam_alloc_coherent(dev->dev, dscr->size, &dscr->daddr, in mei_dmam_dscr_alloc()
35 if (!dscr->vaddr) in mei_dmam_dscr_alloc()
50 if (!dscr->vaddr) in mei_dmam_dscr_free()
53 dmam_free_coherent(dev->dev, dscr->size, dscr->vaddr, dscr->daddr); in mei_dmam_dscr_free()
54 dscr->vaddr = NULL; in mei_dmam_dscr_free()
98 return !!dev->dr_dscr[DMA_DSCR_HOST].vaddr; in mei_dma_ring_is_allocated()
104 return (struct hbm_dma_ring_ctrl *)dev->dr_dscr[DMA_DSCR_CTRL].vaddr; in mei_dma_ring_ctrl()
131 unsigned char *dbuf = dev->dr_dscr[DMA_DSCR_DEVICE].vaddr; in mei_dma_copy_from()
151 unsigned char *hbuf = dev->dr_dscr[DMA_DSCR_HOST].vaddr; in mei_dma_copy_to()
/drivers/mailbox/
Dpcc.c76 void __iomem *vaddr; member
117 static void read_register(void __iomem *vaddr, u64 *val, unsigned int bit_width) in read_register() argument
121 *val = readb(vaddr); in read_register()
124 *val = readw(vaddr); in read_register()
127 *val = readl(vaddr); in read_register()
130 *val = readq(vaddr); in read_register()
135 static void write_register(void __iomem *vaddr, u64 val, unsigned int bit_width) in write_register() argument
139 writeb(val, vaddr); in write_register()
142 writew(val, vaddr); in write_register()
145 writel(val, vaddr); in write_register()
[all …]
/drivers/gpu/drm/etnaviv/
Detnaviv_gem_prime.c30 void *vaddr; in etnaviv_gem_prime_vmap() local
32 vaddr = etnaviv_gem_vmap(obj); in etnaviv_gem_prime_vmap()
33 if (!vaddr) in etnaviv_gem_prime_vmap()
35 iosys_map_set_vaddr(map, vaddr); in etnaviv_gem_prime_vmap()
65 struct iosys_map map = IOSYS_MAP_INIT_VADDR(etnaviv_obj->vaddr); in etnaviv_gem_prime_release()
67 if (etnaviv_obj->vaddr) in etnaviv_gem_prime_release()
88 return map.vaddr; in etnaviv_gem_prime_vmap_impl()
/drivers/gpu/drm/msm/
Dmsm_gem_prime.c42 void *vaddr; in msm_gem_prime_vmap() local
44 vaddr = msm_gem_get_vaddr(obj); in msm_gem_prime_vmap()
45 if (IS_ERR(vaddr)) in msm_gem_prime_vmap()
46 return PTR_ERR(vaddr); in msm_gem_prime_vmap()
47 iosys_map_set_vaddr(map, vaddr); in msm_gem_prime_vmap()

12345678910>>...17