• Home
  • Raw
  • Download

Lines Matching refs:obj

42 	struct drm_i915_gem_object *obj;  in i915_gem_object_alloc()  local
44 obj = kmem_cache_zalloc(slab_objects, GFP_KERNEL); in i915_gem_object_alloc()
45 if (!obj) in i915_gem_object_alloc()
47 obj->base.funcs = &i915_gem_object_funcs; in i915_gem_object_alloc()
49 return obj; in i915_gem_object_alloc()
52 void i915_gem_object_free(struct drm_i915_gem_object *obj) in i915_gem_object_free() argument
54 return kmem_cache_free(slab_objects, obj); in i915_gem_object_free()
57 void i915_gem_object_init(struct drm_i915_gem_object *obj, in i915_gem_object_init() argument
65 BUILD_BUG_ON(offsetof(typeof(*obj), base) != in i915_gem_object_init()
66 offsetof(typeof(*obj), __do_not_access.base)); in i915_gem_object_init()
68 spin_lock_init(&obj->vma.lock); in i915_gem_object_init()
69 INIT_LIST_HEAD(&obj->vma.list); in i915_gem_object_init()
71 INIT_LIST_HEAD(&obj->mm.link); in i915_gem_object_init()
73 INIT_LIST_HEAD(&obj->lut_list); in i915_gem_object_init()
74 spin_lock_init(&obj->lut_lock); in i915_gem_object_init()
76 spin_lock_init(&obj->mmo.lock); in i915_gem_object_init()
77 obj->mmo.offsets = RB_ROOT; in i915_gem_object_init()
79 init_rcu_head(&obj->rcu); in i915_gem_object_init()
81 obj->ops = ops; in i915_gem_object_init()
83 obj->flags = flags; in i915_gem_object_init()
85 obj->mm.madv = I915_MADV_WILLNEED; in i915_gem_object_init()
86 INIT_RADIX_TREE(&obj->mm.get_page.radix, GFP_KERNEL | __GFP_NOWARN); in i915_gem_object_init()
87 mutex_init(&obj->mm.get_page.lock); in i915_gem_object_init()
88 INIT_RADIX_TREE(&obj->mm.get_dma_page.radix, GFP_KERNEL | __GFP_NOWARN); in i915_gem_object_init()
89 mutex_init(&obj->mm.get_dma_page.lock); in i915_gem_object_init()
97 void i915_gem_object_set_cache_coherency(struct drm_i915_gem_object *obj, in i915_gem_object_set_cache_coherency() argument
100 obj->cache_level = cache_level; in i915_gem_object_set_cache_coherency()
103 obj->cache_coherent = (I915_BO_CACHE_COHERENT_FOR_READ | in i915_gem_object_set_cache_coherency()
105 else if (HAS_LLC(to_i915(obj->base.dev))) in i915_gem_object_set_cache_coherency()
106 obj->cache_coherent = I915_BO_CACHE_COHERENT_FOR_READ; in i915_gem_object_set_cache_coherency()
108 obj->cache_coherent = 0; in i915_gem_object_set_cache_coherency()
110 obj->cache_dirty = in i915_gem_object_set_cache_coherency()
111 !(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE); in i915_gem_object_set_cache_coherency()
116 struct drm_i915_gem_object *obj = to_intel_bo(gem); in i915_gem_close_object() local
123 spin_lock(&obj->lut_lock); in i915_gem_close_object()
124 list_for_each_entry_safe(lut, ln, &obj->lut_list, obj_link) { in i915_gem_close_object()
133 if (&ln->obj_link != &obj->lut_list) { in i915_gem_close_object()
135 if (cond_resched_lock(&obj->lut_lock)) in i915_gem_close_object()
140 spin_unlock(&obj->lut_lock); in i915_gem_close_object()
142 spin_lock(&obj->mmo.lock); in i915_gem_close_object()
143 rbtree_postorder_for_each_entry_safe(mmo, mn, &obj->mmo.offsets, offset) in i915_gem_close_object()
145 spin_unlock(&obj->mmo.lock); in i915_gem_close_object()
159 GEM_BUG_ON(vma->obj != obj); in i915_gem_close_object()
167 i915_gem_object_put(obj); in i915_gem_close_object()
173 struct drm_i915_gem_object *obj = in __i915_gem_free_object_rcu() local
174 container_of(head, typeof(*obj), rcu); in __i915_gem_free_object_rcu()
175 struct drm_i915_private *i915 = to_i915(obj->base.dev); in __i915_gem_free_object_rcu()
177 dma_resv_fini(&obj->base._resv); in __i915_gem_free_object_rcu()
178 i915_gem_object_free(obj); in __i915_gem_free_object_rcu()
184 static void __i915_gem_object_free_mmaps(struct drm_i915_gem_object *obj) in __i915_gem_object_free_mmaps() argument
188 if (obj->userfault_count) in __i915_gem_object_free_mmaps()
189 i915_gem_object_release_mmap_gtt(obj); in __i915_gem_object_free_mmaps()
191 if (!RB_EMPTY_ROOT(&obj->mmo.offsets)) { in __i915_gem_object_free_mmaps()
194 i915_gem_object_release_mmap_offset(obj); in __i915_gem_object_free_mmaps()
197 &obj->mmo.offsets, in __i915_gem_object_free_mmaps()
199 drm_vma_offset_remove(obj->base.dev->vma_offset_manager, in __i915_gem_object_free_mmaps()
203 obj->mmo.offsets = RB_ROOT; in __i915_gem_object_free_mmaps()
207 void __i915_gem_free_object(struct drm_i915_gem_object *obj) in __i915_gem_free_object() argument
209 trace_i915_gem_object_destroy(obj); in __i915_gem_free_object()
211 if (!list_empty(&obj->vma.list)) { in __i915_gem_free_object()
220 spin_lock(&obj->vma.lock); in __i915_gem_free_object()
221 while ((vma = list_first_entry_or_null(&obj->vma.list, in __i915_gem_free_object()
224 GEM_BUG_ON(vma->obj != obj); in __i915_gem_free_object()
225 spin_unlock(&obj->vma.lock); in __i915_gem_free_object()
235 spin_lock(&obj->vma.lock); in __i915_gem_free_object()
237 spin_unlock(&obj->vma.lock); in __i915_gem_free_object()
240 __i915_gem_object_free_mmaps(obj); in __i915_gem_free_object()
242 GEM_BUG_ON(!list_empty(&obj->lut_list)); in __i915_gem_free_object()
244 atomic_set(&obj->mm.pages_pin_count, 0); in __i915_gem_free_object()
245 __i915_gem_object_put_pages(obj); in __i915_gem_free_object()
246 GEM_BUG_ON(i915_gem_object_has_pages(obj)); in __i915_gem_free_object()
247 bitmap_free(obj->bit_17); in __i915_gem_free_object()
249 if (obj->base.import_attach) in __i915_gem_free_object()
250 drm_prime_gem_destroy(&obj->base, NULL); in __i915_gem_free_object()
252 drm_gem_free_mmap_offset(&obj->base); in __i915_gem_free_object()
254 if (obj->ops->release) in __i915_gem_free_object()
255 obj->ops->release(obj); in __i915_gem_free_object()
257 if (obj->mm.n_placements > 1) in __i915_gem_free_object()
258 kfree(obj->mm.placements); in __i915_gem_free_object()
260 if (obj->shares_resv_from) in __i915_gem_free_object()
261 i915_vm_resv_put(obj->shares_resv_from); in __i915_gem_free_object()
267 struct drm_i915_gem_object *obj, *on; in __i915_gem_free_objects() local
269 llist_for_each_entry_safe(obj, on, freed, freed) { in __i915_gem_free_objects()
271 if (obj->ops->delayed_free) { in __i915_gem_free_objects()
272 obj->ops->delayed_free(obj); in __i915_gem_free_objects()
275 __i915_gem_free_object(obj); in __i915_gem_free_objects()
278 call_rcu(&obj->rcu, __i915_gem_free_object_rcu); in __i915_gem_free_objects()
301 struct drm_i915_gem_object *obj = to_intel_bo(gem_obj); in i915_gem_free_object() local
302 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_free_object()
304 GEM_BUG_ON(i915_gem_object_is_framebuffer(obj)); in i915_gem_free_object()
321 i915_gem_object_make_unshrinkable(obj); in i915_gem_free_object()
334 if (llist_add(&obj->freed, &i915->mm.free_list)) in i915_gem_free_object()
338 void __i915_gem_object_flush_frontbuffer(struct drm_i915_gem_object *obj, in __i915_gem_object_flush_frontbuffer() argument
343 front = __intel_frontbuffer_get(obj); in __i915_gem_object_flush_frontbuffer()
350 void __i915_gem_object_invalidate_frontbuffer(struct drm_i915_gem_object *obj, in __i915_gem_object_invalidate_frontbuffer() argument
355 front = __intel_frontbuffer_get(obj); in __i915_gem_object_invalidate_frontbuffer()
363 i915_gem_object_read_from_page_kmap(struct drm_i915_gem_object *obj, u64 offset, void *dst, int siz… in i915_gem_object_read_from_page_kmap() argument
368 src_map = kmap_atomic(i915_gem_object_get_page(obj, offset >> PAGE_SHIFT)); in i915_gem_object_read_from_page_kmap()
371 if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_READ)) in i915_gem_object_read_from_page_kmap()
379 i915_gem_object_read_from_page_iomap(struct drm_i915_gem_object *obj, u64 offset, void *dst, int si… in i915_gem_object_read_from_page_iomap() argument
383 dma_addr_t dma = i915_gem_object_get_dma_address(obj, offset >> PAGE_SHIFT); in i915_gem_object_read_from_page_iomap()
385 src_map = io_mapping_map_wc(&obj->mm.region->iomap, in i915_gem_object_read_from_page_iomap()
386 dma - obj->mm.region->region.start, in i915_gem_object_read_from_page_iomap()
410 int i915_gem_object_read_from_page(struct drm_i915_gem_object *obj, u64 offset, void *dst, int size) in i915_gem_object_read_from_page() argument
412 GEM_BUG_ON(offset >= obj->base.size); in i915_gem_object_read_from_page()
414 GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); in i915_gem_object_read_from_page()
416 if (i915_gem_object_has_struct_page(obj)) in i915_gem_object_read_from_page()
417 i915_gem_object_read_from_page_kmap(obj, offset, dst, size); in i915_gem_object_read_from_page()
418 else if (i915_gem_object_has_iomem(obj)) in i915_gem_object_read_from_page()
419 i915_gem_object_read_from_page_iomap(obj, offset, dst, size); in i915_gem_object_read_from_page()
439 bool i915_gem_object_evictable(struct drm_i915_gem_object *obj) in i915_gem_object_evictable() argument
442 int pin_count = atomic_read(&obj->mm.pages_pin_count); in i915_gem_object_evictable()
447 spin_lock(&obj->vma.lock); in i915_gem_object_evictable()
448 list_for_each_entry(vma, &obj->vma.list, obj_link) { in i915_gem_object_evictable()
450 spin_unlock(&obj->vma.lock); in i915_gem_object_evictable()
456 spin_unlock(&obj->vma.lock); in i915_gem_object_evictable()
470 bool i915_gem_object_migratable(struct drm_i915_gem_object *obj) in i915_gem_object_migratable() argument
472 struct intel_memory_region *mr = READ_ONCE(obj->mm.region); in i915_gem_object_migratable()
477 return obj->mm.n_placements > 1; in i915_gem_object_migratable()
489 bool i915_gem_object_has_struct_page(const struct drm_i915_gem_object *obj) in i915_gem_object_has_struct_page() argument
492 if (IS_DGFX(to_i915(obj->base.dev)) && in i915_gem_object_has_struct_page()
493 i915_gem_object_evictable((void __force *)obj)) in i915_gem_object_has_struct_page()
494 assert_object_held_shared(obj); in i915_gem_object_has_struct_page()
496 return obj->mem_flags & I915_BO_FLAG_STRUCT_PAGE; in i915_gem_object_has_struct_page()
508 bool i915_gem_object_has_iomem(const struct drm_i915_gem_object *obj) in i915_gem_object_has_iomem() argument
511 if (IS_DGFX(to_i915(obj->base.dev)) && in i915_gem_object_has_iomem()
512 i915_gem_object_evictable((void __force *)obj)) in i915_gem_object_has_iomem()
513 assert_object_held_shared(obj); in i915_gem_object_has_iomem()
515 return obj->mem_flags & I915_BO_FLAG_IOMEM; in i915_gem_object_has_iomem()
535 bool i915_gem_object_can_migrate(struct drm_i915_gem_object *obj, in i915_gem_object_can_migrate() argument
538 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_can_migrate()
539 unsigned int num_allowed = obj->mm.n_placements; in i915_gem_object_can_migrate()
544 GEM_BUG_ON(obj->mm.madv != I915_MADV_WILLNEED); in i915_gem_object_can_migrate()
550 if (obj->mm.region == mr) in i915_gem_object_can_migrate()
553 if (!i915_gem_object_evictable(obj)) in i915_gem_object_can_migrate()
556 if (!obj->ops->migrate) in i915_gem_object_can_migrate()
559 if (!(obj->flags & I915_BO_ALLOC_USER)) in i915_gem_object_can_migrate()
566 if (mr == obj->mm.placements[i]) in i915_gem_object_can_migrate()
598 int i915_gem_object_migrate(struct drm_i915_gem_object *obj, in i915_gem_object_migrate() argument
602 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_migrate()
606 GEM_BUG_ON(obj->mm.madv != I915_MADV_WILLNEED); in i915_gem_object_migrate()
607 assert_object_held(obj); in i915_gem_object_migrate()
612 if (!i915_gem_object_can_migrate(obj, id)) in i915_gem_object_migrate()
615 if (!obj->ops->migrate) { in i915_gem_object_migrate()
616 if (GEM_WARN_ON(obj->mm.region != mr)) in i915_gem_object_migrate()
621 return obj->ops->migrate(obj, mr); in i915_gem_object_migrate()
632 bool i915_gem_object_placement_possible(struct drm_i915_gem_object *obj, in i915_gem_object_placement_possible() argument
637 if (!obj->mm.n_placements) { in i915_gem_object_placement_possible()
640 return i915_gem_object_has_iomem(obj); in i915_gem_object_placement_possible()
642 return i915_gem_object_has_pages(obj); in i915_gem_object_placement_possible()
650 for (i = 0; i < obj->mm.n_placements; i++) { in i915_gem_object_placement_possible()
651 if (obj->mm.placements[i]->type == type) in i915_gem_object_placement_possible()