/drivers/gpu/drm/ttm/ |
D | ttm_bo.c | 98 dma_resv_assert_held(bo->base.resv); in ttm_bo_move_to_lru_tail() 139 dma_resv_assert_held(pos->first->base.resv); in ttm_bo_bulk_move_lru_tail() 140 dma_resv_assert_held(pos->last->base.resv); in ttm_bo_bulk_move_lru_tail() 154 dma_resv_assert_held(pos->first->base.resv); in ttm_bo_bulk_move_lru_tail() 155 dma_resv_assert_held(pos->last->base.resv); in ttm_bo_bulk_move_lru_tail() 236 if (bo->base.resv == &bo->base._resv) in ttm_bo_individualize_resv() 241 r = dma_resv_copy_fences(&bo->base._resv, bo->base.resv); in ttm_bo_individualize_resv() 252 bo->base.resv = &bo->base._resv; in ttm_bo_individualize_resv() 261 struct dma_resv *resv = &bo->base._resv; in ttm_bo_flush_all_fences() local 267 fobj = dma_resv_shared_list(resv); in ttm_bo_flush_all_fences() [all …]
|
D | ttm_execbuf_util.c | 42 dma_resv_unlock(bo->base.resv); in ttm_eu_backoff_reservation_reverse() 58 dma_resv_unlock(bo->base.resv); in ttm_eu_backoff_reservation() 107 ret = dma_resv_reserve_shared(bo->base.resv, in ttm_eu_reserve_buffers() 124 ret = dma_resv_reserve_shared(bo->base.resv, in ttm_eu_reserve_buffers() 159 dma_resv_add_shared_fence(bo->base.resv, fence); in ttm_eu_fence_buffer_objects() 161 dma_resv_add_excl_fence(bo->base.resv, fence); in ttm_eu_fence_buffer_objects() 163 dma_resv_unlock(bo->base.resv); in ttm_eu_fence_buffer_objects()
|
D | ttm_bo_vm.c | 74 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault_idle() 138 if (unlikely(!dma_resv_trylock(bo->base.resv))) { in ttm_bo_vm_reserve() 148 if (!dma_resv_lock_interruptible(bo->base.resv, in ttm_bo_vm_reserve() 150 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_reserve() 157 if (dma_resv_lock_interruptible(bo->base.resv, NULL)) in ttm_bo_vm_reserve() 166 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_reserve() 350 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault()
|
/drivers/dma-buf/ |
D | dma-buf.c | 104 if (dmabuf->resv == (struct dma_resv *)&dmabuf[1]) in dma_buf_release() 105 dma_resv_fini(dmabuf->resv); in dma_buf_release() 237 static bool dma_buf_poll_shared(struct dma_resv *resv, in dma_buf_poll_shared() argument 240 struct dma_resv_list *fobj = dma_resv_shared_list(resv); in dma_buf_poll_shared() 249 dma_resv_held(resv)); in dma_buf_poll_shared() 260 static bool dma_buf_poll_excl(struct dma_resv *resv, in dma_buf_poll_excl() argument 263 struct dma_fence *fence = dma_resv_excl_fence(resv); in dma_buf_poll_excl() 281 struct dma_resv *resv; in dma_buf_poll() local 285 if (!dmabuf || !dmabuf->resv) in dma_buf_poll() 288 resv = dmabuf->resv; in dma_buf_poll() [all …]
|
/drivers/gpu/drm/i915/ |
D | dma_resv_utils.c | 10 void dma_resv_prune(struct dma_resv *resv) in dma_resv_prune() argument 12 if (dma_resv_trylock(resv)) { in dma_resv_prune() 13 if (dma_resv_test_signaled(resv, true)) in dma_resv_prune() 14 dma_resv_add_excl_fence(resv, NULL); in dma_resv_prune() 15 dma_resv_unlock(resv); in dma_resv_prune()
|
/drivers/gpu/drm/vgem/ |
D | vgem_fence.c | 131 struct dma_resv *resv; in vgem_fence_attach_ioctl() local 153 resv = obj->resv; in vgem_fence_attach_ioctl() 154 if (!dma_resv_test_signaled(resv, arg->flags & VGEM_FENCE_WRITE)) { in vgem_fence_attach_ioctl() 161 dma_resv_lock(resv, NULL); in vgem_fence_attach_ioctl() 163 dma_resv_add_excl_fence(resv, fence); in vgem_fence_attach_ioctl() 164 else if ((ret = dma_resv_reserve_shared(resv, 1)) == 0) in vgem_fence_attach_ioctl() 165 dma_resv_add_shared_fence(resv, fence); in vgem_fence_attach_ioctl() 166 dma_resv_unlock(resv); in vgem_fence_attach_ioctl()
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_dma_buf.c | 334 struct dma_resv *resv = dma_buf->resv; in amdgpu_dma_buf_create_obj() local 341 dma_resv_lock(resv, NULL); in amdgpu_dma_buf_create_obj() 351 ttm_bo_type_sg, resv, &gobj); in amdgpu_dma_buf_create_obj() 359 dma_resv_unlock(resv); in amdgpu_dma_buf_create_obj() 363 dma_resv_unlock(resv); in amdgpu_dma_buf_create_obj() 379 struct ww_acquire_ctx *ticket = dma_resv_locking_ctx(obj->resv); in amdgpu_dma_buf_move_notify() 398 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_dma_buf_move_notify() local 405 r = dma_resv_lock(resv, ticket); in amdgpu_dma_buf_move_notify() 414 if (!dma_resv_trylock(resv)) in amdgpu_dma_buf_move_notify() 426 dma_resv_unlock(resv); in amdgpu_dma_buf_move_notify()
|
D | amdgpu_vm.c | 375 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_base_init() 665 if (abo->tbo.base.resv == vm->root.bo->tbo.base.resv) in amdgpu_vm_del_from_lru_notify() 926 struct dma_resv *resv; in amdgpu_vm_pt_create() local 952 bp.resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_pt_create() 964 if (!bp.resv) in amdgpu_vm_pt_create() 965 WARN_ON(dma_resv_lock(bo->tbo.base.resv, in amdgpu_vm_pt_create() 967 resv = bp.resv; in amdgpu_vm_pt_create() 973 bp.resv = bo->tbo.base.resv; in amdgpu_vm_pt_create() 978 if (!resv) in amdgpu_vm_pt_create() 979 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_vm_pt_create() [all …]
|
D | amdgpu_object.c | 250 bp.resv = NULL; in amdgpu_bo_create_reserved() 534 .resv = bp->resv in amdgpu_bo_create() 592 bp->resv, bp->destroy); in amdgpu_bo_create() 608 r = amdgpu_fill_buffer(bo, 0, bo->tbo.base.resv, &fence); in amdgpu_bo_create() 617 if (!bp->resv) in amdgpu_bo_create() 630 if (!bp->resv) in amdgpu_bo_create() 631 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_bo_create() 802 r = dma_resv_wait_timeout(bo->tbo.base.resv, false, false, in amdgpu_bo_kmap() 1140 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_bo_get_tiling_flags() 1309 && bo->base.resv != &bo->base._resv); in amdgpu_bo_release_notify() [all …]
|
D | amdgpu_vm_cpu.c | 48 struct dma_resv *resv, in amdgpu_vm_cpu_prepare() argument 51 if (!resv) in amdgpu_vm_cpu_prepare() 54 return amdgpu_bo_sync_wait_resv(p->adev, resv, sync_mode, p->vm, true); in amdgpu_vm_cpu_prepare()
|
D | amdgpu_sync.c | 253 struct dma_resv *resv, enum amdgpu_sync_mode mode, in amdgpu_sync_resv() argument 261 if (resv == NULL) in amdgpu_sync_resv() 265 f = dma_resv_excl_fence(resv); in amdgpu_sync_resv() 279 flist = dma_resv_shared_list(resv); in amdgpu_sync_resv() 285 dma_resv_held(resv)); in amdgpu_sync_resv()
|
D | amdgpu_gem.c | 74 dma_resv_unlock(bo->base.resv); in amdgpu_gem_fault() 98 struct dma_resv *resv, in amdgpu_gem_object_create() argument 112 bp.resv = resv; in amdgpu_gem_object_create() 173 abo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_gem_object_open() 229 fence = dma_resv_excl_fence(bo->tbo.base.resv); in amdgpu_gem_object_close() 293 struct dma_resv *resv = NULL; in amdgpu_gem_create_ioctl() local 336 resv = vm->root.bo->tbo.base.resv; in amdgpu_gem_create_ioctl() 343 flags, ttm_bo_type_device, resv, &gobj); in amdgpu_gem_create_ioctl() 533 ret = dma_resv_wait_timeout(robj->tbo.base.resv, true, true, timeout); in amdgpu_gem_wait_idle_ioctl()
|
/drivers/gpu/drm/i915/gem/ |
D | i915_gem_wait.c | 36 i915_gem_object_wait_reservation(struct dma_resv *resv, in i915_gem_object_wait_reservation() argument 48 ret = dma_resv_get_fences(resv, &excl, &count, &shared); in i915_gem_object_wait_reservation() 76 excl = dma_resv_get_excl_unlocked(resv); in i915_gem_object_wait_reservation() 89 dma_resv_prune(resv); in i915_gem_object_wait_reservation() 161 ret = dma_resv_get_fences(obj->base.resv, &excl, &count, in i915_gem_object_wait_priority() 173 excl = dma_resv_get_excl_unlocked(obj->base.resv); in i915_gem_object_wait_priority() 197 timeout = i915_gem_object_wait_reservation(obj->base.resv, in i915_gem_object_wait()
|
D | i915_gem_busy.c | 113 seq = raw_read_seqcount(&obj->base.resv->seq); in i915_gem_busy_ioctl() 116 args->busy = busy_check_writer(dma_resv_excl_fence(obj->base.resv)); in i915_gem_busy_ioctl() 119 list = dma_resv_shared_list(obj->base.resv); in i915_gem_busy_ioctl() 131 if (args->busy && read_seqcount_retry(&obj->base.resv->seq, seq)) in i915_gem_busy_ioctl()
|
D | i915_gem_object.h | 156 #define assert_object_held(obj) dma_resv_assert_held((obj)->base.resv) 179 ret = dma_resv_lock_interruptible(obj->base.resv, ww ? &ww->ctx : NULL); in __i915_gem_object_lock() 181 ret = dma_resv_lock(obj->base.resv, ww ? &ww->ctx : NULL); in __i915_gem_object_lock() 213 return dma_resv_trylock(obj->base.resv); in i915_gem_object_trylock() 221 dma_resv_unlock(obj->base.resv); in i915_gem_object_unlock() 513 fence = dma_resv_get_excl_unlocked(obj->base.resv); in i915_gem_object_last_write_engine()
|
/drivers/gpu/drm/radeon/ |
D | radeon_prime.c | 47 struct dma_resv *resv = attach->dmabuf->resv; in radeon_gem_prime_import_sg_table() local 52 dma_resv_lock(resv, NULL); in radeon_gem_prime_import_sg_table() 54 RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo); in radeon_gem_prime_import_sg_table() 55 dma_resv_unlock(resv); in radeon_gem_prime_import_sg_table()
|
D | radeon_benchmark.c | 38 struct dma_resv *resv) in radeon_benchmark_do_move() argument 51 resv); in radeon_benchmark_do_move() 56 resv); in radeon_benchmark_do_move() 125 dobj->tbo.base.resv); in radeon_benchmark_move() 136 dobj->tbo.base.resv); in radeon_benchmark_move()
|
D | radeon_sync.c | 91 struct dma_resv *resv, in radeon_sync_resv() argument 101 f = dma_resv_excl_fence(resv); in radeon_sync_resv() 108 flist = dma_resv_shared_list(resv); in radeon_sync_resv() 114 dma_resv_held(resv)); in radeon_sync_resv()
|
D | radeon_object.c | 154 struct dma_resv *resv, in radeon_bo_create() argument 227 &bo->placement, page_align, !kernel, sg, resv, in radeon_bo_create() 564 dma_resv_assert_held(bo->tbo.base.resv); in radeon_bo_get_surface_reg() 690 dma_resv_assert_held(bo->tbo.base.resv); in radeon_bo_get_tiling_flags() 702 dma_resv_assert_held(bo->tbo.base.resv); in radeon_bo_check_tiling() 810 struct dma_resv *resv = bo->tbo.base.resv; in radeon_bo_fence() local 813 dma_resv_add_shared_fence(resv, &fence->base); in radeon_bo_fence() 815 dma_resv_add_excl_fence(resv, &fence->base); in radeon_bo_fence()
|
/drivers/gpu/drm/ |
D | drm_gem_ttm_helper.c | 69 dma_resv_lock(gem->resv, NULL); in drm_gem_ttm_vmap() 71 dma_resv_unlock(gem->resv); in drm_gem_ttm_vmap() 90 dma_resv_lock(gem->resv, NULL); in drm_gem_ttm_vunmap() 92 dma_resv_unlock(gem->resv); in drm_gem_ttm_vunmap()
|
D | drm_gem.c | 163 if (!obj->resv) in drm_gem_private_object_init() 164 obj->resv = &obj->_resv; in drm_gem_private_object_init() 758 ret = dma_resv_wait_timeout(obj->resv, wait_all, true, timeout); in drm_gem_dma_resv_wait() 1209 ret = dma_resv_lock_slow_interruptible(obj->resv, in drm_gem_lock_reservations() 1221 ret = dma_resv_lock_interruptible(objs[i]->resv, in drm_gem_lock_reservations() 1227 dma_resv_unlock(objs[j]->resv); in drm_gem_lock_reservations() 1230 dma_resv_unlock(objs[contended]->resv); in drm_gem_lock_reservations() 1255 dma_resv_unlock(objs[i]->resv); in drm_gem_unlock_reservations() 1334 dma_resv_get_excl_unlocked(obj->resv); in drm_gem_fence_array_add_implicit() 1339 ret = dma_resv_get_fences(obj->resv, NULL, in drm_gem_fence_array_add_implicit()
|
/drivers/infiniband/core/ |
D | umem_dmabuf.c | 21 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_map_pages() 68 fence = dma_resv_excl_fence(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_map_pages() 78 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_unmap_pages() 170 dma_resv_lock(dmabuf->resv, NULL); in ib_umem_dmabuf_release() 172 dma_resv_unlock(dmabuf->resv); in ib_umem_dmabuf_release()
|
/drivers/net/ethernet/netronome/nfp/crypto/ |
D | fw.h | 17 u8 resv[2]; member 37 u8 resv[3]; member 86 u8 resv[3]; member
|
/drivers/gpu/drm/msm/ |
D | msm_gem.h | 172 dma_resv_lock(obj->resv, NULL); in msm_gem_lock() 178 return dma_resv_trylock(obj->resv); in msm_gem_trylock() 184 return dma_resv_lock_interruptible(obj->resv, NULL); in msm_gem_lock_interruptible() 190 dma_resv_unlock(obj->resv); in msm_gem_unlock() 196 return dma_resv_is_locked(obj->resv); in msm_gem_is_locked()
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_fence.c | 346 struct dma_resv *resv = nvbo->bo.base.resv; in nouveau_fence_sync() local 352 ret = dma_resv_reserve_shared(resv, 1); in nouveau_fence_sync() 359 fobj = dma_resv_shared_list(resv); in nouveau_fence_sync() 370 dma_resv_held(resv)); in nouveau_fence_sync() 385 fence = dma_resv_excl_fence(resv); in nouveau_fence_sync()
|