Home
last modified time | relevance | path

Searched refs:resv (Results 1 – 25 of 179) sorted by relevance

12345678

/drivers/dma-buf/
Dst-dma-resv.c40 struct dma_resv resv; in sanitycheck() local
53 dma_resv_init(&resv); in sanitycheck()
54 r = dma_resv_lock(&resv, NULL); in sanitycheck()
58 dma_resv_unlock(&resv); in sanitycheck()
59 dma_resv_fini(&resv); in sanitycheck()
66 struct dma_resv resv; in test_signaling() local
76 dma_resv_init(&resv); in test_signaling()
77 r = dma_resv_lock(&resv, NULL); in test_signaling()
83 r = dma_resv_reserve_fences(&resv, 1); in test_signaling()
89 dma_resv_add_fence(&resv, f, usage); in test_signaling()
[all …]
Ddma-buf.c112 if (dmabuf->resv == (struct dma_resv *)&dmabuf[1]) in dma_buf_release()
113 dma_resv_fini(dmabuf->resv); in dma_buf_release()
249 static bool dma_buf_poll_add_cb(struct dma_resv *resv, bool write, in dma_buf_poll_add_cb() argument
256 dma_resv_for_each_fence(&cursor, resv, dma_resv_usage_rw(write), in dma_buf_poll_add_cb()
271 struct dma_resv *resv; in dma_buf_poll() local
275 if (!dmabuf || !dmabuf->resv) in dma_buf_poll()
278 resv = dmabuf->resv; in dma_buf_poll()
286 dma_resv_lock(resv, NULL); in dma_buf_poll()
303 if (!dma_buf_poll_add_cb(resv, true, dcb)) in dma_buf_poll()
326 if (!dma_buf_poll_add_cb(resv, false, dcb)) in dma_buf_poll()
[all …]
/drivers/gpu/drm/ttm/
Dttm_bo.c76 dma_resv_assert_held(bo->base.resv); in ttm_bo_move_to_lru_tail()
100 dma_resv_assert_held(bo->base.resv); in ttm_bo_set_bulk_move()
148 ret = dma_resv_reserve_fences(bo->base.resv, 1); in ttm_bo_handle_move_mem()
190 if (bo->base.resv == &bo->base._resv) in ttm_bo_individualize_resv()
195 r = dma_resv_copy_fences(&bo->base._resv, bo->base.resv); in ttm_bo_individualize_resv()
206 bo->base.resv = &bo->base._resv; in ttm_bo_individualize_resv()
215 struct dma_resv *resv = &bo->base._resv; in ttm_bo_flush_all_fences() local
219 dma_resv_iter_begin(&cursor, resv, DMA_RESV_USAGE_BOOKKEEP); in ttm_bo_flush_all_fences()
245 struct dma_resv *resv = &bo->base._resv; in ttm_bo_cleanup_refs() local
248 if (dma_resv_test_signaled(resv, DMA_RESV_USAGE_BOOKKEEP)) in ttm_bo_cleanup_refs()
[all …]
Dttm_execbuf_util.c38 dma_resv_unlock(bo->base.resv); in ttm_eu_backoff_reservation_reverse()
54 dma_resv_unlock(bo->base.resv); in ttm_eu_backoff_reservation()
102 ret = dma_resv_reserve_fences(bo->base.resv, in ttm_eu_reserve_buffers()
119 ret = dma_resv_reserve_fences(bo->base.resv, in ttm_eu_reserve_buffers()
153 dma_resv_add_fence(bo->base.resv, fence, entry->num_shared ? in ttm_eu_fence_buffer_objects()
156 dma_resv_unlock(bo->base.resv); in ttm_eu_fence_buffer_objects()
Dttm_bo_vm.c49 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_KERNEL)) in ttm_bo_vm_fault_idle()
63 (void)dma_resv_wait_timeout(bo->base.resv, in ttm_bo_vm_fault_idle()
66 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault_idle()
74 err = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_KERNEL, true, in ttm_bo_vm_fault_idle()
125 if (unlikely(!dma_resv_trylock(bo->base.resv))) { in ttm_bo_vm_reserve()
135 if (!dma_resv_lock_interruptible(bo->base.resv, in ttm_bo_vm_reserve()
137 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_reserve()
144 if (dma_resv_lock_interruptible(bo->base.resv, NULL)) in ttm_bo_vm_reserve()
154 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_reserve()
344 dma_resv_unlock(bo->base.resv); in ttm_bo_vm_fault()
Dttm_bo_util.c250 fbo->base.base.resv = &fbo->base.base._resv; in ttm_buffer_object_transfer()
474 dma_resv_assert_held(bo->base.resv); in ttm_bo_vmap()
542 dma_resv_assert_held(bo->base.resv); in ttm_bo_vunmap()
562 ret = dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP, in ttm_bo_wait_free_node()
662 dma_resv_add_fence(bo->base.resv, fence, DMA_RESV_USAGE_KERNEL); in ttm_bo_move_accel_cleanup()
721 if (dma_resv_test_signaled(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP)) { in ttm_bo_pipeline_gutting()
755 ret = dma_resv_copy_fences(&ghost->base._resv, bo->base.resv); in ttm_bo_pipeline_gutting()
758 dma_resv_wait_timeout(bo->base.resv, DMA_RESV_USAGE_BOOKKEEP, in ttm_bo_pipeline_gutting()
/drivers/gpu/drm/
Ddrm_gem_shmem_helper.c145 dma_resv_lock(shmem->base.resv, NULL); in drm_gem_shmem_free()
160 dma_resv_unlock(shmem->base.resv); in drm_gem_shmem_free()
173 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_get_pages()
211 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_put_pages()
235 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_pin_locked()
244 dma_resv_assert_held(shmem->base.resv); in drm_gem_shmem_unpin_locked()
266 ret = dma_resv_lock_interruptible(shmem->base.resv, NULL); in drm_gem_shmem_pin()
270 dma_resv_unlock(shmem->base.resv); in drm_gem_shmem_pin()
289 dma_resv_lock(shmem->base.resv, NULL); in drm_gem_shmem_unpin()
291 dma_resv_unlock(shmem->base.resv); in drm_gem_shmem_unpin()
[all …]
Ddrm_exec.c60 dma_resv_unlock(obj->resv); in drm_exec_unlock_all()
167 ret = dma_resv_lock_slow_interruptible(obj->resv, in drm_exec_lock_contended()
172 dma_resv_lock_slow(obj->resv, &exec->ticket); in drm_exec_lock_contended()
183 dma_resv_unlock(obj->resv); in drm_exec_lock_contended()
216 ret = dma_resv_lock_interruptible(obj->resv, &exec->ticket); in drm_exec_lock_obj()
218 ret = dma_resv_lock(obj->resv, &exec->ticket); in drm_exec_lock_obj()
240 dma_resv_unlock(obj->resv); in drm_exec_lock_obj()
260 dma_resv_unlock(obj->resv); in drm_exec_unlock_obj()
292 ret = dma_resv_reserve_fences(obj->resv, num_fences); in drm_exec_prepare_obj()
Ddrm_gem.c164 if (!obj->resv) in drm_gem_private_object_init()
165 obj->resv = &obj->_resv; in drm_gem_private_object_init()
782 ret = dma_resv_wait_timeout(obj->resv, dma_resv_usage_rw(wait_all), in drm_gem_dma_resv_wait()
1182 dma_resv_assert_held(obj->resv); in drm_gem_vmap()
1199 dma_resv_assert_held(obj->resv); in drm_gem_vunmap()
1216 dma_resv_lock(obj->resv, NULL); in drm_gem_vmap_unlocked()
1218 dma_resv_unlock(obj->resv); in drm_gem_vmap_unlocked()
1226 dma_resv_lock(obj->resv, NULL); in drm_gem_vunmap_unlocked()
1228 dma_resv_unlock(obj->resv); in drm_gem_vunmap_unlocked()
1258 ret = dma_resv_lock_slow_interruptible(obj->resv, in drm_gem_lock_reservations()
[all …]
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_dma_buf.c320 struct dma_resv *resv = dma_buf->resv; in amdgpu_dma_buf_create_obj() local
327 dma_resv_lock(resv, NULL); in amdgpu_dma_buf_create_obj()
339 ttm_bo_type_sg, resv, &gobj, 0); in amdgpu_dma_buf_create_obj()
347 dma_resv_unlock(resv); in amdgpu_dma_buf_create_obj()
351 dma_resv_unlock(resv); in amdgpu_dma_buf_create_obj()
367 struct ww_acquire_ctx *ticket = dma_resv_locking_ctx(obj->resv); in amdgpu_dma_buf_move_notify()
386 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_dma_buf_move_notify() local
393 r = dma_resv_lock(resv, ticket); in amdgpu_dma_buf_move_notify()
402 if (!dma_resv_trylock(resv)) in amdgpu_dma_buf_move_notify()
407 r = dma_resv_reserve_fences(resv, 2); in amdgpu_dma_buf_move_notify()
[all …]
Damdgpu_vm.c320 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_base_init()
323 dma_resv_assert_held(vm->root.bo->tbo.base.resv); in amdgpu_vm_bo_base_init()
866 struct dma_resv *resv, uint64_t start, uint64_t last, in amdgpu_vm_update_range() argument
926 r = vm->update_funcs->prepare(&params, resv, sync_mode); in amdgpu_vm_update_range()
1021 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_get_memory()
1022 !dma_resv_trylock(bo->tbo.base.resv)) in amdgpu_vm_bo_get_memory()
1026 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_get_memory()
1027 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_vm_bo_get_memory()
1078 struct dma_resv *resv; in amdgpu_vm_bo_update() local
1085 resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_bo_update()
[all …]
Damdgpu_object.c269 bp.resv = NULL; in amdgpu_bo_create_reserved()
553 .resv = bp->resv in amdgpu_bo_create()
625 bp->resv, bp->destroy); in amdgpu_bo_create()
640 r = amdgpu_fill_buffer(bo, 0, bo->tbo.base.resv, &fence, true); in amdgpu_bo_create()
644 dma_resv_add_fence(bo->tbo.base.resv, fence, in amdgpu_bo_create()
648 if (!bp->resv) in amdgpu_bo_create()
661 if (!bp->resv) in amdgpu_bo_create()
662 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_bo_create()
792 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL, in amdgpu_bo_kmap()
1155 dma_resv_assert_held(bo->tbo.base.resv); in amdgpu_bo_get_tiling_flags()
[all …]
Damdgpu_vm_cpu.c49 struct dma_resv *resv, in amdgpu_vm_cpu_prepare() argument
52 if (!resv) in amdgpu_vm_cpu_prepare()
55 return amdgpu_bo_sync_wait_resv(p->adev, resv, sync_mode, p->vm, true); in amdgpu_vm_cpu_prepare()
80 r = dma_resv_wait_timeout(vmbo->bo.tbo.base.resv, DMA_RESV_USAGE_KERNEL, in amdgpu_vm_cpu_update()
Damdgpu_vm_sdma.c87 struct dma_resv *resv, in amdgpu_vm_sdma_prepare() argument
97 if (!resv) in amdgpu_vm_sdma_prepare()
101 r = amdgpu_sync_resv(p->adev, &sync, resv, sync_mode, p->vm); in amdgpu_vm_sdma_prepare()
143 dma_resv_add_fence(p->vm->root.bo->tbo.base.resv, f, in amdgpu_vm_sdma_commit()
244 dma_resv_iter_begin(&cursor, bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL); in amdgpu_vm_sdma_update()
Damdgpu_vm_pt.c509 struct dma_resv *resv; in amdgpu_vm_pt_create() local
542 bp.resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_pt_create()
554 if (!bp.resv) in amdgpu_vm_pt_create()
555 WARN_ON(dma_resv_lock(bo->tbo.base.resv, in amdgpu_vm_pt_create()
557 resv = bp.resv; in amdgpu_vm_pt_create()
563 bp.resv = bo->tbo.base.resv; in amdgpu_vm_pt_create()
569 if (!resv) in amdgpu_vm_pt_create()
570 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_vm_pt_create()
/drivers/gpu/drm/vgem/
Dvgem_fence.c131 struct dma_resv *resv; in vgem_fence_attach_ioctl() local
154 resv = obj->resv; in vgem_fence_attach_ioctl()
156 if (!dma_resv_test_signaled(resv, usage)) { in vgem_fence_attach_ioctl()
162 dma_resv_lock(resv, NULL); in vgem_fence_attach_ioctl()
163 ret = dma_resv_reserve_fences(resv, 1); in vgem_fence_attach_ioctl()
165 dma_resv_add_fence(resv, fence, arg->flags & VGEM_FENCE_WRITE ? in vgem_fence_attach_ioctl()
167 dma_resv_unlock(resv); in vgem_fence_attach_ioctl()
/drivers/gpu/drm/i915/gem/
Di915_gem_wait.c36 i915_gem_object_boost(struct dma_resv *resv, unsigned int flags) in i915_gem_object_boost() argument
57 dma_resv_iter_begin(&cursor, resv, in i915_gem_object_boost()
67 i915_gem_object_wait_reservation(struct dma_resv *resv, in i915_gem_object_wait_reservation() argument
75 i915_gem_object_boost(resv, flags); in i915_gem_object_wait_reservation()
77 dma_resv_iter_begin(&cursor, resv, in i915_gem_object_wait_reservation()
155 dma_resv_iter_begin(&cursor, obj->base.resv, in i915_gem_object_wait_priority()
177 timeout = i915_gem_object_wait_reservation(obj->base.resv, in i915_gem_object_wait()
Di915_gem_clflush.c113 dma_resv_reserve_fences(obj->base.resv, 1) == 0) in i915_gem_clflush_object()
117 obj->base.resv, true, in i915_gem_clflush_object()
120 dma_resv_add_fence(obj->base.resv, &clflush->base.dma, in i915_gem_clflush_object()
/drivers/infiniband/core/
Dumem_dmabuf.c24 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_map_pages()
72 ret = dma_resv_wait_timeout(umem_dmabuf->attach->dmabuf->resv, in ib_umem_dmabuf_map_pages()
85 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_unmap_pages()
200 dma_resv_lock(umem_dmabuf->attach->dmabuf->resv, NULL); in ib_umem_dmabuf_get_pinned()
209 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned()
216 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_get_pinned()
226 dma_resv_lock(dmabuf->resv, NULL); in ib_umem_dmabuf_release()
230 dma_resv_unlock(dmabuf->resv); in ib_umem_dmabuf_release()
/drivers/gpu/drm/radeon/
Dradeon_prime.c49 struct dma_resv *resv = attach->dmabuf->resv; in radeon_gem_prime_import_sg_table() local
54 dma_resv_lock(resv, NULL); in radeon_gem_prime_import_sg_table()
56 RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo); in radeon_gem_prime_import_sg_table()
57 dma_resv_unlock(resv); in radeon_gem_prime_import_sg_table()
Dradeon_benchmark.c38 struct dma_resv *resv) in radeon_benchmark_do_move() argument
51 resv); in radeon_benchmark_do_move()
56 resv); in radeon_benchmark_do_move()
125 dobj->tbo.base.resv); in radeon_benchmark_move()
136 dobj->tbo.base.resv); in radeon_benchmark_move()
Dradeon_object.c133 struct dma_resv *resv, in radeon_bo_create() argument
206 &bo->placement, page_align, !kernel, sg, resv, in radeon_bo_create()
224 r = dma_resv_wait_timeout(bo->tbo.base.resv, DMA_RESV_USAGE_KERNEL, in radeon_bo_kmap()
550 dma_resv_assert_held(bo->tbo.base.resv); in radeon_bo_get_surface_reg()
675 dma_resv_assert_held(bo->tbo.base.resv); in radeon_bo_get_tiling_flags()
687 dma_resv_assert_held(bo->tbo.base.resv); in radeon_bo_check_tiling()
789 struct dma_resv *resv = bo->tbo.base.resv; in radeon_bo_fence() local
792 r = dma_resv_reserve_fences(resv, 1); in radeon_bo_fence()
799 dma_resv_add_fence(resv, &fence->base, shared ? in radeon_bo_fence()
/drivers/gpu/drm/loongson/
Dlsdc_gem.c154 struct dma_resv *resv) in lsdc_gem_object_create() argument
161 lbo = lsdc_bo_create(ddev, domain, size, kerenl, sg, resv); in lsdc_gem_object_create()
188 struct dma_resv *resv = attach->dmabuf->resv; in lsdc_prime_import_sg_table() local
193 dma_resv_lock(resv, NULL); in lsdc_prime_import_sg_table()
195 sg, resv); in lsdc_prime_import_sg_table()
196 dma_resv_unlock(resv); in lsdc_prime_import_sg_table()
/drivers/nvme/host/
Dpr.c246 struct pr_held_reservation *resv) in nvme_pr_read_reservation() argument
264 resv->generation = le32_to_cpu(tmp_rse.gen); in nvme_pr_read_reservation()
282 resv->generation = le32_to_cpu(rse->gen); in nvme_pr_read_reservation()
283 resv->type = block_pr_type_from_nvme(rse->rtype); in nvme_pr_read_reservation()
288 resv->key = le64_to_cpu(rse->regctl_eds[i].rkey); in nvme_pr_read_reservation()
296 resv->key = le64_to_cpu(rs->regctl_ds[i].rkey); in nvme_pr_read_reservation()
/drivers/net/ethernet/netronome/nfp/crypto/
Dfw.h17 u8 resv[2]; member
37 u8 resv[3]; member
86 u8 resv[3]; member

12345678