Lines Matching refs:resv
375 if (bo->tbo.base.resv != vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_base_init()
665 if (abo->tbo.base.resv == vm->root.bo->tbo.base.resv) in amdgpu_vm_del_from_lru_notify()
926 struct dma_resv *resv; in amdgpu_vm_pt_create() local
952 bp.resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_pt_create()
964 if (!bp.resv) in amdgpu_vm_pt_create()
965 WARN_ON(dma_resv_lock(bo->tbo.base.resv, in amdgpu_vm_pt_create()
967 resv = bp.resv; in amdgpu_vm_pt_create()
973 bp.resv = bo->tbo.base.resv; in amdgpu_vm_pt_create()
978 if (!resv) in amdgpu_vm_pt_create()
979 dma_resv_unlock(bo->tbo.base.resv); in amdgpu_vm_pt_create()
1701 bool unlocked, struct dma_resv *resv, in amdgpu_vm_bo_update_mapping() argument
1746 r = vm->update_funcs->prepare(¶ms, resv, sync_mode); in amdgpu_vm_bo_update_mapping()
1880 struct dma_resv *resv; in amdgpu_vm_bo_update() local
1887 resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_bo_update()
1891 resv = bo->tbo.base.resv; in amdgpu_vm_bo_update()
1917 if (clear || (bo && bo->tbo.base.resv == in amdgpu_vm_bo_update()
1918 vm->root.bo->tbo.base.resv)) in amdgpu_vm_bo_update()
1948 resv, mapping->start, in amdgpu_vm_bo_update()
1960 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv) { in amdgpu_vm_bo_update()
2097 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_prt_fini() local
2102 r = dma_resv_get_fences(resv, &excl, &shared_count, &shared); in amdgpu_vm_prt_fini()
2107 dma_resv_wait_timeout(resv, true, false, in amdgpu_vm_prt_fini()
2143 struct dma_resv *resv = vm->root.bo->tbo.base.resv; in amdgpu_vm_clear_freed() local
2159 resv, mapping->start, in amdgpu_vm_clear_freed()
2197 struct dma_resv *resv; in amdgpu_vm_handle_moved() local
2212 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
2216 if (!amdgpu_vm_debug && dma_resv_trylock(resv)) in amdgpu_vm_handle_moved()
2227 dma_resv_unlock(resv); in amdgpu_vm_handle_moved()
2302 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_insert_map()
2584 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_clear_mappings()
2599 if (bo && bo->tbo.base.resv == vm->root.bo->tbo.base.resv && in amdgpu_vm_bo_clear_mappings()
2648 if (dma_resv_locking_ctx(bo->tbo.base.resv) != in amdgpu_vm_bo_trace_cs()
2676 if (bo->tbo.base.resv == vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_rmv()
2731 if (!dma_resv_test_signaled(bo->tbo.base.resv, true)) in amdgpu_vm_evictable()
2770 if (evicted && bo->tbo.base.resv == vm->root.bo->tbo.base.resv) { in amdgpu_vm_bo_invalidate()
2781 else if (bo->tbo.base.resv == vm->root.bo->tbo.base.resv) in amdgpu_vm_bo_invalidate()
2911 timeout = dma_resv_wait_timeout(vm->root.bo->tbo.base.resv, true, in amdgpu_vm_wait_idle()
2992 r = dma_resv_reserve_shared(root_bo->tbo.base.resv, 1); in amdgpu_vm_init()
3443 r = dma_resv_reserve_shared(root->tbo.base.resv, 1); in amdgpu_vm_handle_fault()