Home
last modified time | relevance | path

Searched refs:vma (Results 1 – 25 of 406) sorted by relevance

12345678910>>...17

/drivers/gpu/drm/i915/
Di915_vma.h51 static inline bool i915_vma_is_active(const struct i915_vma *vma) in i915_vma_is_active() argument
53 return !i915_active_is_idle(&vma->active); in i915_vma_is_active()
60 int __must_check _i915_vma_move_to_active(struct i915_vma *vma,
65 i915_vma_move_to_active(struct i915_vma *vma, struct i915_request *rq, in i915_vma_move_to_active() argument
68 return _i915_vma_move_to_active(vma, rq, &rq->fence, flags); in i915_vma_move_to_active()
73 static inline bool i915_vma_is_ggtt(const struct i915_vma *vma) in i915_vma_is_ggtt() argument
75 return test_bit(I915_VMA_GGTT_BIT, __i915_vma_flags(vma)); in i915_vma_is_ggtt()
78 static inline bool i915_vma_is_dpt(const struct i915_vma *vma) in i915_vma_is_dpt() argument
80 return i915_is_dpt(vma->vm); in i915_vma_is_dpt()
83 static inline bool i915_vma_has_ggtt_write(const struct i915_vma *vma) in i915_vma_has_ggtt_write() argument
[all …]
Di915_vma.c47 static inline void assert_vma_held_evict(const struct i915_vma *vma) in assert_vma_held_evict() argument
54 if (kref_read(&vma->vm->ref)) in assert_vma_held_evict()
55 assert_object_held_shared(vma->obj); in assert_vma_held_evict()
65 static void i915_vma_free(struct i915_vma *vma) in i915_vma_free() argument
67 return kmem_cache_free(slab_vmas, vma); in i915_vma_free()
74 static void vma_print_allocator(struct i915_vma *vma, const char *reason) in vma_print_allocator() argument
78 if (!vma->node.stack) { in vma_print_allocator()
79 drm_dbg(vma->obj->base.dev, in vma_print_allocator()
81 vma->node.start, vma->node.size, reason); in vma_print_allocator()
85 stack_depot_snprint(vma->node.stack, buf, sizeof(buf), 0); in vma_print_allocator()
[all …]
Di915_gem_evict.c41 static bool dying_vma(struct i915_vma *vma)
43 return !kref_read(&vma->obj->base.refcount);
67 static bool grab_vma(struct i915_vma *vma, struct i915_gem_ww_ctx *ww) in grab_vma() argument
73 if (i915_gem_object_get_rcu(vma->obj)) { in grab_vma()
74 if (!i915_gem_object_trylock(vma->obj, ww)) { in grab_vma()
75 i915_gem_object_put(vma->obj); in grab_vma()
80 atomic_and(~I915_VMA_PIN_MASK, &vma->flags); in grab_vma()
86 static void ungrab_vma(struct i915_vma *vma) in ungrab_vma() argument
88 if (dying_vma(vma)) in ungrab_vma()
91 i915_gem_object_unlock(vma->obj); in ungrab_vma()
[all …]
Di915_gem.c98 struct i915_vma *vma; in i915_gem_get_aperture_ioctl() local
105 list_for_each_entry(vma, &ggtt->vm.bound_list, vm_link) in i915_gem_get_aperture_ioctl()
106 if (i915_vma_is_pinned(vma)) in i915_gem_get_aperture_ioctl()
107 pinned += vma->node.size; in i915_gem_get_aperture_ioctl()
124 struct i915_vma *vma; in i915_gem_object_unbind() local
129 if (list_empty(&obj->vma.list)) in i915_gem_object_unbind()
142 spin_lock(&obj->vma.lock); in i915_gem_object_unbind()
143 while (!ret && (vma = list_first_entry_or_null(&obj->vma.list, in i915_gem_object_unbind()
146 list_move_tail(&vma->obj_link, &still_in_list); in i915_gem_object_unbind()
147 if (!i915_vma_is_bound(vma, I915_VMA_BIND_MASK)) in i915_gem_object_unbind()
[all …]
/drivers/gpu/drm/
Ddrm_vm.c59 struct vm_area_struct *vma; member
63 static void drm_vm_open(struct vm_area_struct *vma);
64 static void drm_vm_close(struct vm_area_struct *vma);
67 struct vm_area_struct *vma) in drm_io_prot() argument
69 pgprot_t tmp = vm_get_page_prot(vma->vm_flags); in drm_io_prot()
78 if (efi_range_is_wc(vma->vm_start, vma->vm_end - in drm_io_prot()
79 vma->vm_start)) in drm_io_prot()
89 static pgprot_t drm_dma_prot(uint32_t map_type, struct vm_area_struct *vma) in drm_dma_prot() argument
91 pgprot_t tmp = vm_get_page_prot(vma->vm_flags); in drm_dma_prot()
112 struct vm_area_struct *vma = vmf->vma; in drm_vm_fault() local
[all …]
/drivers/gpu/drm/i915/display/
Dintel_fb_pin.c30 struct i915_vma *vma; in intel_pin_fb_obj_dpt() local
73 vma = i915_vma_instance(obj, vm, view); in intel_pin_fb_obj_dpt()
74 if (IS_ERR(vma)) { in intel_pin_fb_obj_dpt()
75 ret = PTR_ERR(vma); in intel_pin_fb_obj_dpt()
79 if (i915_vma_misplaced(vma, 0, alignment, 0)) { in intel_pin_fb_obj_dpt()
80 ret = i915_vma_unbind(vma); in intel_pin_fb_obj_dpt()
85 ret = i915_vma_pin_ww(vma, &ww, 0, alignment, PIN_GLOBAL); in intel_pin_fb_obj_dpt()
90 vma = ERR_PTR(ret); in intel_pin_fb_obj_dpt()
94 vma->display_alignment = max(vma->display_alignment, alignment); in intel_pin_fb_obj_dpt()
98 i915_vma_get(vma); in intel_pin_fb_obj_dpt()
[all …]
Dintel_plane_initial.c18 struct i915_vma **vma) in intel_reuse_initial_plane_obj() argument
38 *vma = plane_state->ggtt_vma; in intel_reuse_initial_plane_obj()
52 struct i915_vma *vma; in initial_plane_vma() local
141 vma = i915_vma_instance(obj, &to_gt(i915)->ggtt->vm, NULL); in initial_plane_vma()
142 if (IS_ERR(vma)) in initial_plane_vma()
148 if (i915_vma_pin(vma, 0, 0, pinctl)) in initial_plane_vma()
152 !i915_vma_is_map_and_fenceable(vma)) in initial_plane_vma()
155 return vma; in initial_plane_vma()
170 struct i915_vma *vma; in intel_alloc_initial_plane_obj() local
185 vma = initial_plane_vma(dev_priv, plane_config); in intel_alloc_initial_plane_obj()
[all …]
/drivers/gpu/drm/nouveau/
Dnouveau_vmm.c29 nouveau_vma_unmap(struct nouveau_vma *vma) in nouveau_vma_unmap() argument
31 if (vma->mem) { in nouveau_vma_unmap()
32 nvif_vmm_unmap(&vma->vmm->vmm, vma->addr); in nouveau_vma_unmap()
33 vma->mem = NULL; in nouveau_vma_unmap()
38 nouveau_vma_map(struct nouveau_vma *vma, struct nouveau_mem *mem) in nouveau_vma_map() argument
40 struct nvif_vma tmp = { .addr = vma->addr }; in nouveau_vma_map()
41 int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp); in nouveau_vma_map()
44 vma->mem = mem; in nouveau_vma_map()
51 struct nouveau_vma *vma; in nouveau_vma_find() local
53 list_for_each_entry(vma, &nvbo->vma_list, head) { in nouveau_vma_find()
[all …]
/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dvmm.c802 struct nvkm_vma *vma = kzalloc(sizeof(*vma), GFP_KERNEL); in nvkm_vma_new() local
803 if (vma) { in nvkm_vma_new()
804 vma->addr = addr; in nvkm_vma_new()
805 vma->size = size; in nvkm_vma_new()
806 vma->page = NVKM_VMA_PAGE_NONE; in nvkm_vma_new()
807 vma->refd = NVKM_VMA_PAGE_NONE; in nvkm_vma_new()
809 return vma; in nvkm_vma_new()
813 nvkm_vma_tail(struct nvkm_vma *vma, u64 tail) in nvkm_vma_tail() argument
817 BUG_ON(vma->size == tail); in nvkm_vma_tail()
819 if (!(new = nvkm_vma_new(vma->addr + (vma->size - tail), tail))) in nvkm_vma_tail()
[all …]
Duvmm.c113 struct nvkm_vma *vma; in nvkm_uvmm_mthd_unmap() local
126 vma = nvkm_vmm_node_search(vmm, addr); in nvkm_uvmm_mthd_unmap()
127 if (ret = -ENOENT, !vma || vma->addr != addr) { in nvkm_uvmm_mthd_unmap()
129 addr, vma ? vma->addr : ~0ULL); in nvkm_uvmm_mthd_unmap()
133 if (ret = -ENOENT, vma->busy) { in nvkm_uvmm_mthd_unmap()
134 VMM_DEBUG(vmm, "denied %016llx: %d", addr, vma->busy); in nvkm_uvmm_mthd_unmap()
138 if (ret = -EINVAL, !vma->memory) { in nvkm_uvmm_mthd_unmap()
143 nvkm_vmm_unmap_locked(vmm, vma, false); in nvkm_uvmm_mthd_unmap()
159 struct nvkm_vma *vma; in nvkm_uvmm_mthd_map() local
181 if (ret = -ENOENT, !(vma = nvkm_vmm_node_search(vmm, addr))) { in nvkm_uvmm_mthd_map()
[all …]
/drivers/gpu/drm/i915/selftests/
Di915_gem_gtt.c397 struct i915_vma *vma; in close_object_list() local
399 vma = i915_vma_instance(obj, vm, NULL); in close_object_list()
400 if (!IS_ERR(vma)) in close_object_list()
401 ignored = i915_vma_unbind_unlocked(vma); in close_object_list()
420 struct i915_vma *vma; in fill_hole() local
461 vma = i915_vma_instance(obj, vm, NULL); in fill_hole()
462 if (IS_ERR(vma)) in fill_hole()
471 err = i915_vma_pin(vma, 0, 0, offset | flags); in fill_hole()
478 if (!drm_mm_node_allocated(&vma->node) || in fill_hole()
479 i915_vma_misplaced(vma, 0, 0, offset | flags)) { in fill_hole()
[all …]
Di915_vma.c37 static bool assert_vma(struct i915_vma *vma, in assert_vma() argument
43 if (vma->vm != ctx->vm) { in assert_vma()
48 if (vma->size != obj->base.size) { in assert_vma()
50 vma->size, obj->base.size); in assert_vma()
54 if (vma->gtt_view.type != I915_GTT_VIEW_NORMAL) { in assert_vma()
56 vma->gtt_view.type); in assert_vma()
68 struct i915_vma *vma; in checked_vma_instance() local
71 vma = i915_vma_instance(obj, vm, view); in checked_vma_instance()
72 if (IS_ERR(vma)) in checked_vma_instance()
73 return vma; in checked_vma_instance()
[all …]
/drivers/gpu/drm/msm/
Dmsm_gem_vma.c42 void msm_gem_vma_purge(struct msm_gem_vma *vma) in msm_gem_vma_purge() argument
44 struct msm_gem_address_space *aspace = vma->aspace; in msm_gem_vma_purge()
45 unsigned size = vma->node.size; in msm_gem_vma_purge()
48 if (!vma->mapped) in msm_gem_vma_purge()
51 aspace->mmu->funcs->unmap(aspace->mmu, vma->iova, size); in msm_gem_vma_purge()
53 vma->mapped = false; in msm_gem_vma_purge()
58 msm_gem_vma_map(struct msm_gem_vma *vma, int prot, in msm_gem_vma_map() argument
61 struct msm_gem_address_space *aspace = vma->aspace; in msm_gem_vma_map()
64 if (GEM_WARN_ON(!vma->iova)) in msm_gem_vma_map()
67 if (vma->mapped) in msm_gem_vma_map()
[all …]
Dmsm_gem.c289 struct vm_area_struct *vma = vmf->vma; in msm_gem_fault() local
290 struct drm_gem_object *obj = vma->vm_private_data; in msm_gem_fault()
321 pgoff = (vmf->address - vma->vm_start) >> PAGE_SHIFT; in msm_gem_fault()
328 ret = vmf_insert_pfn(vma, vmf->address, pfn); in msm_gem_fault()
369 struct msm_gem_vma *vma; in add_vma() local
373 vma = msm_gem_vma_new(aspace); in add_vma()
374 if (!vma) in add_vma()
377 list_add_tail(&vma->list, &msm_obj->vmas); in add_vma()
379 return vma; in add_vma()
386 struct msm_gem_vma *vma; in lookup_vma() local
[all …]
/drivers/pci/
Dmmap.c23 struct vm_area_struct *vma, in pci_mmap_resource_range() argument
30 if (vma->vm_pgoff + vma_pages(vma) > size) in pci_mmap_resource_range()
34 vma->vm_page_prot = pgprot_writecombine(vma->vm_page_prot); in pci_mmap_resource_range()
36 vma->vm_page_prot = pgprot_device(vma->vm_page_prot); in pci_mmap_resource_range()
39 ret = pci_iobar_pfn(pdev, bar, vma); in pci_mmap_resource_range()
43 vma->vm_pgoff += (pci_resource_start(pdev, bar) >> PAGE_SHIFT); in pci_mmap_resource_range()
45 vma->vm_ops = &pci_phys_vm_ops; in pci_mmap_resource_range()
47 return io_remap_pfn_range(vma, vma->vm_start, vma->vm_pgoff, in pci_mmap_resource_range()
48 vma->vm_end - vma->vm_start, in pci_mmap_resource_range()
49 vma->vm_page_prot); in pci_mmap_resource_range()
/drivers/gpu/drm/i915/gt/
Dintel_ring.c37 struct i915_vma *vma = ring->vma; in intel_ring_pin() local
46 flags = PIN_OFFSET_BIAS | i915_ggtt_pin_bias(vma); in intel_ring_pin()
48 if (i915_gem_object_is_stolen(vma->obj)) in intel_ring_pin()
53 ret = i915_ggtt_pin(vma, ww, 0, flags); in intel_ring_pin()
57 if (i915_vma_is_map_and_fenceable(vma) && !HAS_LLC(vma->vm->i915)) { in intel_ring_pin()
58 addr = (void __force *)i915_vma_pin_iomap(vma); in intel_ring_pin()
60 int type = intel_gt_coherent_map_type(vma->vm->gt, vma->obj, false); in intel_ring_pin()
62 addr = i915_gem_object_pin_map(vma->obj, type); in intel_ring_pin()
70 i915_vma_make_unshrinkable(vma); in intel_ring_pin()
79 i915_vma_unpin(vma); in intel_ring_pin()
[all …]
Dintel_ggtt_fencing.c201 struct i915_vma *vma) in fence_update() argument
210 if (vma) { in fence_update()
211 GEM_BUG_ON(!i915_gem_object_get_stride(vma->obj) || in fence_update()
212 !i915_gem_object_get_tiling(vma->obj)); in fence_update()
214 if (!i915_vma_is_map_and_fenceable(vma)) in fence_update()
219 ret = i915_vma_sync(vma); in fence_update()
224 GEM_BUG_ON(vma->fence_size > i915_vma_size(vma)); in fence_update()
225 fence->start = i915_ggtt_offset(vma); in fence_update()
226 fence->size = vma->fence_size; in fence_update()
227 fence->stride = i915_gem_object_get_stride(vma->obj); in fence_update()
[all …]
/drivers/gpu/drm/i915/gem/
Di915_gem_mman.c30 __vma_matches(struct vm_area_struct *vma, struct file *filp, in __vma_matches() argument
33 if (vma->vm_file != filp) in __vma_matches()
36 return vma->vm_start == addr && in __vma_matches()
37 (vma->vm_end - vma->vm_start) == PAGE_ALIGN(size); in __vma_matches()
107 struct vm_area_struct *vma; in i915_gem_mmap_ioctl() local
113 vma = find_vma(mm, addr); in i915_gem_mmap_ioctl()
114 if (vma && __vma_matches(vma, obj->base.filp, addr, args->size)) in i915_gem_mmap_ioctl()
115 vma->vm_page_prot = in i915_gem_mmap_ioctl()
116 pgprot_writecombine(vm_get_page_prot(vma->vm_flags)); in i915_gem_mmap_ioctl()
252 struct vm_area_struct *area = vmf->vma; in vm_fault_cpu()
[all …]
Di915_gem_tiling.c161 static bool i915_vma_fence_prepare(struct i915_vma *vma, in i915_vma_fence_prepare() argument
164 struct drm_i915_private *i915 = vma->vm->i915; in i915_vma_fence_prepare()
167 if (!i915_vma_is_map_and_fenceable(vma)) in i915_vma_fence_prepare()
170 size = i915_gem_fence_size(i915, vma->size, tiling_mode, stride); in i915_vma_fence_prepare()
171 if (i915_vma_size(vma) < size) in i915_vma_fence_prepare()
174 alignment = i915_gem_fence_alignment(i915, vma->size, tiling_mode, stride); in i915_vma_fence_prepare()
175 if (!IS_ALIGNED(i915_ggtt_offset(vma), alignment)) in i915_vma_fence_prepare()
188 struct i915_vma *vma, *vn; in i915_gem_object_fence_prepare() local
197 spin_lock(&obj->vma.lock); in i915_gem_object_fence_prepare()
198 for_each_ggtt_vma(vma, obj) { in i915_gem_object_fence_prepare()
[all …]
Di915_gem_execbuffer.c38 struct i915_vma *vma; member
251 struct eb_vma *vma; member
379 const struct i915_vma *vma, in eb_vma_misplaced() argument
382 const u64 start = i915_vma_offset(vma); in eb_vma_misplaced()
383 const u64 size = i915_vma_size(vma); in eb_vma_misplaced()
404 !i915_vma_is_map_and_fenceable(vma)) in eb_vma_misplaced()
441 struct i915_vma *vma = ev->vma; in eb_pin_vma() local
445 if (vma->node.size) in eb_pin_vma()
446 pin_flags = __i915_vma_offset(vma); in eb_pin_vma()
455 err = i915_vma_pin_ww(vma, &eb->ww, 0, 0, pin_flags); in eb_pin_vma()
[all …]
/drivers/gpu/drm/ttm/
Dttm_bo_vm.c62 mmap_read_unlock(vmf->vma->vm_mm); in ttm_bo_vm_fault_idle()
134 mmap_read_unlock(vmf->vma->vm_mm); in ttm_bo_vm_reserve()
185 struct vm_area_struct *vma = vmf->vma; in ttm_bo_vm_fault_reserved() local
186 struct ttm_buffer_object *bo = vma->vm_private_data; in ttm_bo_vm_fault_reserved()
210 page_offset = ((address - vma->vm_start) >> PAGE_SHIFT) + in ttm_bo_vm_fault_reserved()
211 vma->vm_pgoff - drm_vma_node_start(&bo->base.vma_node); in ttm_bo_vm_fault_reserved()
212 page_last = vma_pages(vma) + vma->vm_pgoff - in ttm_bo_vm_fault_reserved()
266 ret = vmf_insert_pfn_prot(vma, address, pfn, prot); in ttm_bo_vm_fault_reserved()
293 struct vm_area_struct *vma = vmf->vma; in ttm_bo_vm_dummy_page() local
294 struct ttm_buffer_object *bo = vma->vm_private_data; in ttm_bo_vm_dummy_page()
[all …]
/drivers/char/
Dmspec.c88 mspec_open(struct vm_area_struct *vma) in mspec_open() argument
92 vdata = vma->vm_private_data; in mspec_open()
103 mspec_close(struct vm_area_struct *vma) in mspec_close() argument
109 vdata = vma->vm_private_data; in mspec_close()
142 struct vma_data *vdata = vmf->vma->vm_private_data; in mspec_fault()
164 return vmf_insert_pfn(vmf->vma, vmf->address, pfn); in mspec_fault()
181 mspec_mmap(struct file *file, struct vm_area_struct *vma, in mspec_mmap() argument
187 if (vma->vm_pgoff != 0) in mspec_mmap()
190 if ((vma->vm_flags & VM_SHARED) == 0) in mspec_mmap()
193 if ((vma->vm_flags & VM_WRITE) == 0) in mspec_mmap()
[all …]
/drivers/gpu/drm/i915/gem/selftests/
Dhuge_pages.c362 static int igt_check_page_sizes(struct i915_vma *vma) in igt_check_page_sizes() argument
364 struct drm_i915_private *i915 = vma->vm->i915; in igt_check_page_sizes()
366 struct drm_i915_gem_object *obj = vma->obj; in igt_check_page_sizes()
370 err = i915_vma_sync(vma); in igt_check_page_sizes()
374 if (!HAS_PAGE_SIZES(i915, vma->page_sizes.sg)) { in igt_check_page_sizes()
376 vma->page_sizes.sg & ~supported, supported); in igt_check_page_sizes()
380 if (!HAS_PAGE_SIZES(i915, vma->resource->page_sizes_gtt)) { in igt_check_page_sizes()
382 vma->resource->page_sizes_gtt & ~supported, supported); in igt_check_page_sizes()
386 if (vma->page_sizes.phys != obj->mm.page_sizes.phys) { in igt_check_page_sizes()
388 vma->page_sizes.phys, obj->mm.page_sizes.phys); in igt_check_page_sizes()
[all …]
/drivers/misc/ocxl/
Dcontext.c98 static vm_fault_t map_afu_irq(struct vm_area_struct *vma, unsigned long address, in map_afu_irq() argument
108 return vmf_insert_pfn(vma, address, trigger_addr >> PAGE_SHIFT); in map_afu_irq()
111 static vm_fault_t map_pp_mmio(struct vm_area_struct *vma, unsigned long address, in map_pp_mmio() argument
134 ret = vmf_insert_pfn(vma, address, pp_mmio_addr >> PAGE_SHIFT); in map_pp_mmio()
141 struct vm_area_struct *vma = vmf->vma; in ocxl_mmap_fault() local
142 struct ocxl_context *ctx = vma->vm_file->private_data; in ocxl_mmap_fault()
151 ret = map_pp_mmio(vma, vmf->address, offset, ctx); in ocxl_mmap_fault()
153 ret = map_afu_irq(vma, vmf->address, offset, ctx); in ocxl_mmap_fault()
162 struct vm_area_struct *vma) in check_mmap_afu_irq() argument
164 int irq_id = ocxl_irq_offset_to_id(ctx, vma->vm_pgoff << PAGE_SHIFT); in check_mmap_afu_irq()
[all …]
/drivers/xen/
Dprivcmd.c68 struct vm_area_struct *vma,
221 struct vm_area_struct *vma; member
229 struct vm_area_struct *vma = st->vma; in mmap_gfn_range() local
239 ((msg->va+(msg->npages<<PAGE_SHIFT)) > vma->vm_end)) in mmap_gfn_range()
242 rc = xen_remap_domain_gfn_range(vma, in mmap_gfn_range()
245 vma->vm_page_prot, in mmap_gfn_range()
260 struct vm_area_struct *vma; in privcmd_ioctl_mmap() local
290 vma = vma_lookup(mm, msg->va); in privcmd_ioctl_mmap()
293 if (!vma || (msg->va != vma->vm_start) || vma->vm_private_data) in privcmd_ioctl_mmap()
295 vma->vm_private_data = PRIV_VMA_LOCKED; in privcmd_ioctl_mmap()
[all …]

12345678910>>...17