Home
last modified time | relevance | path

Searched refs:vma (Results 1 – 25 of 262) sorted by relevance

1234567891011

/drivers/gpu/drm/
Ddrm_vm.c43 static void drm_vm_open(struct vm_area_struct *vma);
44 static void drm_vm_close(struct vm_area_struct *vma);
46 static pgprot_t drm_io_prot(uint32_t map_type, struct vm_area_struct *vma) in drm_io_prot() argument
48 pgprot_t tmp = vm_get_page_prot(vma->vm_flags); in drm_io_prot()
60 if (efi_range_is_wc(vma->vm_start, vma->vm_end - in drm_io_prot()
61 vma->vm_start)) in drm_io_prot()
71 static pgprot_t drm_dma_prot(uint32_t map_type, struct vm_area_struct *vma) in drm_dma_prot() argument
73 pgprot_t tmp = vm_get_page_prot(vma->vm_flags); in drm_dma_prot()
92 static int drm_do_vm_fault(struct vm_area_struct *vma, struct vm_fault *vmf) in drm_do_vm_fault() argument
94 struct drm_file *priv = vma->vm_file->private_data; in drm_do_vm_fault()
[all …]
Ddrm_info.c232 struct vm_area_struct *vma; in drm_vma_info() local
243 vma = pt->vma; in drm_vma_info()
244 if (!vma) in drm_vma_info()
249 (void *)vma->vm_start, (void *)vma->vm_end, in drm_vma_info()
250 vma->vm_flags & VM_READ ? 'r' : '-', in drm_vma_info()
251 vma->vm_flags & VM_WRITE ? 'w' : '-', in drm_vma_info()
252 vma->vm_flags & VM_EXEC ? 'x' : '-', in drm_vma_info()
253 vma->vm_flags & VM_MAYSHARE ? 's' : 'p', in drm_vma_info()
254 vma->vm_flags & VM_LOCKED ? 'l' : '-', in drm_vma_info()
255 vma->vm_flags & VM_IO ? 'i' : '-', in drm_vma_info()
[all …]
/drivers/media/v4l2-core/
Dvideobuf2-memops.c36 struct vm_area_struct *vb2_get_vma(struct vm_area_struct *vma) in vb2_get_vma() argument
44 if (vma->vm_ops && vma->vm_ops->open) in vb2_get_vma()
45 vma->vm_ops->open(vma); in vb2_get_vma()
47 if (vma->vm_file) in vb2_get_vma()
48 get_file(vma->vm_file); in vb2_get_vma()
50 memcpy(vma_copy, vma, sizeof(*vma)); in vb2_get_vma()
67 void vb2_put_vma(struct vm_area_struct *vma) in vb2_put_vma() argument
69 if (!vma) in vb2_put_vma()
72 if (vma->vm_ops && vma->vm_ops->close) in vb2_put_vma()
73 vma->vm_ops->close(vma); in vb2_put_vma()
[all …]
Dvideobuf-dma-contig.c66 static void videobuf_vm_open(struct vm_area_struct *vma) in videobuf_vm_open() argument
68 struct videobuf_mapping *map = vma->vm_private_data; in videobuf_vm_open()
71 map, map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open()
76 static void videobuf_vm_close(struct vm_area_struct *vma) in videobuf_vm_close() argument
78 struct videobuf_mapping *map = vma->vm_private_data; in videobuf_vm_close()
83 map, map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close()
164 struct vm_area_struct *vma; in videobuf_dma_contig_user_get() local
176 vma = find_vma(mm, vb->baddr); in videobuf_dma_contig_user_get()
177 if (!vma) in videobuf_dma_contig_user_get()
180 if ((vb->baddr + mem->size) > vma->vm_end) in videobuf_dma_contig_user_get()
[all …]
Dvideobuf-vmalloc.c54 static void videobuf_vm_open(struct vm_area_struct *vma) in videobuf_vm_open() argument
56 struct videobuf_mapping *map = vma->vm_private_data; in videobuf_vm_open()
59 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open()
64 static void videobuf_vm_close(struct vm_area_struct *vma) in videobuf_vm_close() argument
66 struct videobuf_mapping *map = vma->vm_private_data; in videobuf_vm_close()
71 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close()
212 rc = remap_vmalloc_range(mem->vma, (void *)vb->baddr, 0); in __videobuf_iolock()
234 struct vm_area_struct *vma) in __videobuf_mmap_mapper() argument
250 buf->baddr = vma->vm_start; in __videobuf_mmap_mapper()
256 pages = PAGE_ALIGN(vma->vm_end - vma->vm_start); in __videobuf_mmap_mapper()
[all …]
Dvideobuf2-vmalloc.c27 struct vm_area_struct *vma; member
78 struct vm_area_struct *vma; in vb2_vmalloc_get_userptr() local
90 vma = find_vma(current->mm, vaddr); in vb2_vmalloc_get_userptr()
91 if (vma && (vma->vm_flags & VM_PFNMAP) && (vma->vm_pgoff)) { in vb2_vmalloc_get_userptr()
92 if (vb2_get_contig_userptr(vaddr, size, &vma, &physp)) in vb2_vmalloc_get_userptr()
94 buf->vma = vma; in vb2_vmalloc_get_userptr()
153 if (buf->vma) in vb2_vmalloc_put_userptr()
154 vb2_put_vma(buf->vma); in vb2_vmalloc_put_userptr()
179 static int vb2_vmalloc_mmap(void *buf_priv, struct vm_area_struct *vma) in vb2_vmalloc_mmap() argument
189 ret = remap_vmalloc_range(vma, buf->vaddr, 0); in vb2_vmalloc_mmap()
[all …]
Dvideobuf2-dma-contig.c42 struct vm_area_struct *vma; member
186 static int vb2_dc_mmap(void *buf_priv, struct vm_area_struct *vma) in vb2_dc_mmap() argument
200 vma->vm_pgoff = 0; in vb2_dc_mmap()
202 ret = dma_mmap_coherent(buf->dev, vma, buf->vaddr, in vb2_dc_mmap()
210 vma->vm_flags |= VM_DONTEXPAND | VM_DONTDUMP; in vb2_dc_mmap()
211 vma->vm_private_data = &buf->handler; in vb2_dc_mmap()
212 vma->vm_ops = &vb2_common_vm_ops; in vb2_dc_mmap()
214 vma->vm_ops->open(vma); in vb2_dc_mmap()
217 __func__, (unsigned long)buf->dma_addr, vma->vm_start, in vb2_dc_mmap()
357 struct vm_area_struct *vma) in vb2_dc_dmabuf_ops_mmap() argument
[all …]
/drivers/infiniband/hw/ehca/
Dehca_uverbs.c71 static void ehca_mm_open(struct vm_area_struct *vma) in ehca_mm_open() argument
73 u32 *count = (u32 *)vma->vm_private_data; in ehca_mm_open()
76 vma->vm_start, vma->vm_end); in ehca_mm_open()
82 vma->vm_start, vma->vm_end); in ehca_mm_open()
84 vma->vm_start, vma->vm_end, *count); in ehca_mm_open()
87 static void ehca_mm_close(struct vm_area_struct *vma) in ehca_mm_close() argument
89 u32 *count = (u32 *)vma->vm_private_data; in ehca_mm_close()
92 vma->vm_start, vma->vm_end); in ehca_mm_close()
97 vma->vm_start, vma->vm_end, *count); in ehca_mm_close()
105 static int ehca_mmap_fw(struct vm_area_struct *vma, struct h_galpas *galpas, in ehca_mmap_fw() argument
[all …]
/drivers/xen/
Dprivcmd.c45 static int privcmd_enforce_singleshot_mapping(struct vm_area_struct *vma);
159 struct vm_area_struct *vma; member
167 struct vm_area_struct *vma = st->vma; in mmap_mfn_range() local
177 ((msg->va+(msg->npages<<PAGE_SHIFT)) > vma->vm_end)) in mmap_mfn_range()
180 rc = xen_remap_domain_mfn_range(vma, in mmap_mfn_range()
183 vma->vm_page_prot, in mmap_mfn_range()
197 struct vm_area_struct *vma; in privcmd_ioctl_mmap() local
223 vma = find_vma(mm, msg->va); in privcmd_ioctl_mmap()
226 if (!vma || (msg->va != vma->vm_start) || in privcmd_ioctl_mmap()
227 !privcmd_enforce_singleshot_mapping(vma)) in privcmd_ioctl_mmap()
[all …]
Dgntdev.c82 struct vm_area_struct *vma; member
233 unsigned int pgnr = (addr - map->vma->vm_start) >> PAGE_SHIFT; in find_grant_ptes()
372 static void gntdev_vma_open(struct vm_area_struct *vma) in gntdev_vma_open() argument
374 struct grant_map *map = vma->vm_private_data; in gntdev_vma_open()
376 pr_debug("gntdev_vma_open %p\n", vma); in gntdev_vma_open()
380 static void gntdev_vma_close(struct vm_area_struct *vma) in gntdev_vma_close() argument
382 struct grant_map *map = vma->vm_private_data; in gntdev_vma_close()
383 struct file *file = vma->vm_file; in gntdev_vma_close()
386 pr_debug("gntdev_vma_close %p\n", vma); in gntdev_vma_close()
396 map->vma = NULL; in gntdev_vma_close()
[all …]
/drivers/gpu/drm/nouveau/core/subdev/vm/
Dbase.c32 nouveau_vm_map_at(struct nouveau_vma *vma, u64 delta, struct nouveau_mem *node) in nouveau_vm_map_at() argument
34 struct nouveau_vm *vm = vma->vm; in nouveau_vm_map_at()
37 int big = vma->node->type != vmm->spg_shift; in nouveau_vm_map_at()
38 u32 offset = vma->node->offset + (delta >> 12); in nouveau_vm_map_at()
39 u32 bits = vma->node->type - 12; in nouveau_vm_map_at()
58 vmm->map(vma, pgt, node, pte, len, phys, delta); in nouveau_vm_map_at()
68 delta += (u64)len << vma->node->type; in nouveau_vm_map_at()
76 nouveau_vm_map(struct nouveau_vma *vma, struct nouveau_mem *node) in nouveau_vm_map() argument
78 nouveau_vm_map_at(vma, 0, node); in nouveau_vm_map()
82 nouveau_vm_map_sg_table(struct nouveau_vma *vma, u64 delta, u64 length, in nouveau_vm_map_sg_table() argument
[all …]
/drivers/gpu/drm/exynos/
Dexynos_drm_gem.c55 struct vm_area_struct *vma) in update_vm_cache_attr() argument
61 vma->vm_page_prot = vm_get_page_prot(vma->vm_flags); in update_vm_cache_attr()
63 vma->vm_page_prot = in update_vm_cache_attr()
64 pgprot_writecombine(vm_get_page_prot(vma->vm_flags)); in update_vm_cache_attr()
66 vma->vm_page_prot = in update_vm_cache_attr()
67 pgprot_noncached(vm_get_page_prot(vma->vm_flags)); in update_vm_cache_attr()
78 struct vm_area_struct *vma, in exynos_drm_gem_map_buf() argument
105 return vm_insert_mixed(vma, f_vaddr, pfn); in exynos_drm_gem_map_buf()
364 struct vm_area_struct *vma) in exynos_drm_gem_mmap_buffer() argument
376 vma->vm_flags |= VM_IO | VM_DONTEXPAND | VM_DONTDUMP; in exynos_drm_gem_mmap_buffer()
[all …]
Dexynos_drm_gem.h70 struct vm_area_struct *vma; member
164 int exynos_drm_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf);
167 int exynos_drm_gem_mmap(struct file *filp, struct vm_area_struct *vma);
169 static inline int vma_is_io(struct vm_area_struct *vma) in vma_is_io() argument
171 return !!(vma->vm_flags & (VM_IO | VM_PFNMAP)); in vma_is_io()
175 struct vm_area_struct *exynos_gem_get_vma(struct vm_area_struct *vma);
178 void exynos_gem_put_vma(struct vm_area_struct *vma);
184 struct vm_area_struct *vma);
189 struct vm_area_struct *vma);
/drivers/char/
Dmspec.c145 mspec_open(struct vm_area_struct *vma) in mspec_open() argument
149 vdata = vma->vm_private_data; in mspec_open()
160 mspec_close(struct vm_area_struct *vma) in mspec_close() argument
166 vdata = vma->vm_private_data; in mspec_close()
200 mspec_fault(struct vm_area_struct *vma, struct vm_fault *vmf) in mspec_fault() argument
205 struct vma_data *vdata = vma->vm_private_data; in mspec_fault()
236 vm_insert_pfn(vma, (unsigned long)vmf->virtual_address, pfn); in mspec_fault()
255 mspec_mmap(struct file *file, struct vm_area_struct *vma, in mspec_mmap() argument
261 if (vma->vm_pgoff != 0) in mspec_mmap()
264 if ((vma->vm_flags & VM_SHARED) == 0) in mspec_mmap()
[all …]
Duv_mmtimer.c43 static int uv_mmtimer_mmap(struct file *file, struct vm_area_struct *vma);
147 static int uv_mmtimer_mmap(struct file *file, struct vm_area_struct *vma) in uv_mmtimer_mmap() argument
151 if (vma->vm_end - vma->vm_start != PAGE_SIZE) in uv_mmtimer_mmap()
154 if (vma->vm_flags & VM_WRITE) in uv_mmtimer_mmap()
160 vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot); in uv_mmtimer_mmap()
166 if (remap_pfn_range(vma, vma->vm_start, uv_mmtimer_addr >> PAGE_SHIFT, in uv_mmtimer_mmap()
167 PAGE_SIZE, vma->vm_page_prot)) { in uv_mmtimer_mmap()
/drivers/sbus/char/
Dflash.c37 flash_mmap(struct file *file, struct vm_area_struct *vma) in flash_mmap() argument
47 if ((vma->vm_flags & VM_READ) && in flash_mmap()
48 (vma->vm_flags & VM_WRITE)) { in flash_mmap()
52 if (vma->vm_flags & VM_READ) { in flash_mmap()
55 } else if (vma->vm_flags & VM_WRITE) { in flash_mmap()
65 if ((vma->vm_pgoff << PAGE_SHIFT) > size) in flash_mmap()
67 addr = vma->vm_pgoff + (addr >> PAGE_SHIFT); in flash_mmap()
69 if (vma->vm_end - (vma->vm_start + (vma->vm_pgoff << PAGE_SHIFT)) > size) in flash_mmap()
70 size = vma->vm_end - (vma->vm_start + (vma->vm_pgoff << PAGE_SHIFT)); in flash_mmap()
72 vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot); in flash_mmap()
[all …]
/drivers/gpu/drm/ttm/
Dttm_bo_vm.c74 static int ttm_bo_vm_fault(struct vm_area_struct *vma, struct vm_fault *vmf) in ttm_bo_vm_fault() argument
77 vma->vm_private_data; in ttm_bo_vm_fault()
148 page_offset = ((address - vma->vm_start) >> PAGE_SHIFT) + in ttm_bo_vm_fault()
149 bo->vm_node->start - vma->vm_pgoff; in ttm_bo_vm_fault()
150 page_last = vma_pages(vma) + in ttm_bo_vm_fault()
151 bo->vm_node->start - vma->vm_pgoff; in ttm_bo_vm_fault()
172 vma->vm_page_prot = ttm_io_prot(bo->mem.placement, in ttm_bo_vm_fault()
173 vma->vm_page_prot); in ttm_bo_vm_fault()
176 vma->vm_page_prot = (bo->mem.placement & TTM_PL_FLAG_CACHED) ? in ttm_bo_vm_fault()
177 vm_get_page_prot(vma->vm_flags) : in ttm_bo_vm_fault()
[all …]
/drivers/infiniband/hw/ipath/
Dipath_mmap.c64 static void ipath_vma_open(struct vm_area_struct *vma) in ipath_vma_open() argument
66 struct ipath_mmap_info *ip = vma->vm_private_data; in ipath_vma_open()
71 static void ipath_vma_close(struct vm_area_struct *vma) in ipath_vma_close() argument
73 struct ipath_mmap_info *ip = vma->vm_private_data; in ipath_vma_close()
89 int ipath_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) in ipath_mmap() argument
92 unsigned long offset = vma->vm_pgoff << PAGE_SHIFT; in ipath_mmap()
93 unsigned long size = vma->vm_end - vma->vm_start; in ipath_mmap()
115 ret = remap_vmalloc_range(vma, ip->obj, 0); in ipath_mmap()
118 vma->vm_ops = &ipath_vm_ops; in ipath_mmap()
119 vma->vm_private_data = ip; in ipath_mmap()
[all …]
/drivers/infiniband/hw/qib/
Dqib_mmap.c64 static void qib_vma_open(struct vm_area_struct *vma) in qib_vma_open() argument
66 struct qib_mmap_info *ip = vma->vm_private_data; in qib_vma_open()
71 static void qib_vma_close(struct vm_area_struct *vma) in qib_vma_close() argument
73 struct qib_mmap_info *ip = vma->vm_private_data; in qib_vma_close()
89 int qib_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) in qib_mmap() argument
92 unsigned long offset = vma->vm_pgoff << PAGE_SHIFT; in qib_mmap()
93 unsigned long size = vma->vm_end - vma->vm_start; in qib_mmap()
115 ret = remap_vmalloc_range(vma, ip->obj, 0); in qib_mmap()
118 vma->vm_ops = &qib_vm_ops; in qib_mmap()
119 vma->vm_private_data = ip; in qib_mmap()
[all …]
Dqib_file_ops.c722 static int qib_mmap_mem(struct vm_area_struct *vma, struct qib_ctxtdata *rcd, in qib_mmap_mem() argument
729 if ((vma->vm_end - vma->vm_start) > len) { in qib_mmap_mem()
732 vma->vm_end - vma->vm_start, len); in qib_mmap_mem()
742 if (vma->vm_flags & VM_WRITE) { in qib_mmap_mem()
750 vma->vm_flags &= ~VM_MAYWRITE; in qib_mmap_mem()
754 ret = remap_pfn_range(vma, vma->vm_start, pfn, in qib_mmap_mem()
755 len, vma->vm_page_prot); in qib_mmap_mem()
764 static int mmap_ureg(struct vm_area_struct *vma, struct qib_devdata *dd, in mmap_ureg() argument
777 if ((vma->vm_end - vma->vm_start) > sz) { in mmap_ureg()
780 vma->vm_end - vma->vm_start); in mmap_ureg()
[all …]
/drivers/misc/sgi-gru/
Dgrufile.c68 static void gru_vma_close(struct vm_area_struct *vma) in gru_vma_close() argument
74 if (!vma->vm_private_data) in gru_vma_close()
77 vdata = vma->vm_private_data; in gru_vma_close()
78 vma->vm_private_data = NULL; in gru_vma_close()
79 gru_dbg(grudev, "vma %p, file %p, vdata %p\n", vma, vma->vm_file, in gru_vma_close()
102 static int gru_file_mmap(struct file *file, struct vm_area_struct *vma) in gru_file_mmap() argument
104 if ((vma->vm_flags & (VM_SHARED | VM_WRITE)) != (VM_SHARED | VM_WRITE)) in gru_file_mmap()
107 if (vma->vm_start & (GRU_GSEG_PAGESIZE - 1) || in gru_file_mmap()
108 vma->vm_end & (GRU_GSEG_PAGESIZE - 1)) in gru_file_mmap()
111 vma->vm_flags |= VM_IO | VM_PFNMAP | VM_LOCKED | in gru_file_mmap()
[all …]
/drivers/uio/
Duio.c584 static int uio_find_mem_index(struct vm_area_struct *vma) in uio_find_mem_index() argument
586 struct uio_device *idev = vma->vm_private_data; in uio_find_mem_index()
588 if (vma->vm_pgoff < MAX_UIO_MAPS) { in uio_find_mem_index()
589 if (idev->info->mem[vma->vm_pgoff].size == 0) in uio_find_mem_index()
591 return (int)vma->vm_pgoff; in uio_find_mem_index()
596 static void uio_vma_open(struct vm_area_struct *vma) in uio_vma_open() argument
598 struct uio_device *idev = vma->vm_private_data; in uio_vma_open()
602 static void uio_vma_close(struct vm_area_struct *vma) in uio_vma_close() argument
604 struct uio_device *idev = vma->vm_private_data; in uio_vma_close()
608 static int uio_vma_fault(struct vm_area_struct *vma, struct vm_fault *vmf) in uio_vma_fault() argument
[all …]
/drivers/media/platform/omap3isp/
Dispqueue.c99 struct vm_area_struct *vma; in isp_video_buffer_lock_vma() local
122 vma = find_vma(current->mm, start); in isp_video_buffer_lock_vma()
123 if (vma == NULL) { in isp_video_buffer_lock_vma()
129 vma->vm_flags |= VM_LOCKED; in isp_video_buffer_lock_vma()
131 vma->vm_flags &= ~VM_LOCKED; in isp_video_buffer_lock_vma()
133 start = vma->vm_end + 1; in isp_video_buffer_lock_vma()
134 } while (vma->vm_end < end); in isp_video_buffer_lock_vma()
364 struct vm_area_struct *vma; in isp_video_buffer_prepare_pfnmap() local
380 vma = find_vma(current->mm, start); in isp_video_buffer_prepare_pfnmap()
381 if (vma == NULL || vma->vm_end < end) in isp_video_buffer_prepare_pfnmap()
[all …]
/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c144 struct vm_area_struct *vma) in omap_gem_dmabuf_mmap() argument
153 if (omap_gem_mmap_size(obj) < vma->vm_end - vma->vm_start) { in omap_gem_dmabuf_mmap()
163 vma->vm_flags |= VM_IO | VM_PFNMAP | VM_DONTEXPAND | VM_DONTDUMP; in omap_gem_dmabuf_mmap()
164 vma->vm_ops = obj->dev->driver->gem_vm_ops; in omap_gem_dmabuf_mmap()
165 vma->vm_private_data = obj; in omap_gem_dmabuf_mmap()
166 vma->vm_page_prot = pgprot_writecombine(vm_get_page_prot(vma->vm_flags)); in omap_gem_dmabuf_mmap()
174 vma->vm_ops->open(vma); in omap_gem_dmabuf_mmap()
178 return omap_gem_mmap_obj(obj, vma); in omap_gem_dmabuf_mmap()
/drivers/video/
Dfb_defio.c40 static int fb_deferred_io_fault(struct vm_area_struct *vma, in fb_deferred_io_fault() argument
45 struct fb_info *info = vma->vm_private_data; in fb_deferred_io_fault()
57 if (vma->vm_file) in fb_deferred_io_fault()
58 page->mapping = vma->vm_file->f_mapping; in fb_deferred_io_fault()
93 static int fb_deferred_io_mkwrite(struct vm_area_struct *vma, in fb_deferred_io_mkwrite() argument
97 struct fb_info *info = vma->vm_private_data; in fb_deferred_io_mkwrite()
107 file_update_time(vma->vm_file); in fb_deferred_io_mkwrite()
166 static int fb_deferred_io_mmap(struct fb_info *info, struct vm_area_struct *vma) in fb_deferred_io_mmap() argument
168 vma->vm_ops = &fb_deferred_io_vm_ops; in fb_deferred_io_mmap()
169 vma->vm_flags |= VM_DONTEXPAND | VM_DONTDUMP; in fb_deferred_io_mmap()
[all …]

1234567891011