Lines Matching refs:f
66 struct dma_fence *f) in amdgpu_sync_same_dev() argument
68 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_same_dev()
87 static void *amdgpu_sync_get_owner(struct dma_fence *f) in amdgpu_sync_get_owner() argument
92 if (!f) in amdgpu_sync_get_owner()
95 s_fence = to_drm_sched_fence(f); in amdgpu_sync_get_owner()
99 kfd_fence = to_amdgpu_amdkfd_fence(f); in amdgpu_sync_get_owner()
133 static bool amdgpu_sync_add_later(struct amdgpu_sync *sync, struct dma_fence *f) in amdgpu_sync_add_later() argument
137 hash_for_each_possible(sync->fences, e, node, f->context) { in amdgpu_sync_add_later()
138 if (unlikely(e->fence->context != f->context)) in amdgpu_sync_add_later()
141 amdgpu_sync_keep_later(&e->fence, f); in amdgpu_sync_add_later()
155 int amdgpu_sync_fence(struct amdgpu_sync *sync, struct dma_fence *f) in amdgpu_sync_fence() argument
159 if (!f) in amdgpu_sync_fence()
162 if (amdgpu_sync_add_later(sync, f)) in amdgpu_sync_fence()
169 hash_add(sync->fences, &e->node, f->context); in amdgpu_sync_fence()
170 e->fence = dma_fence_get(f); in amdgpu_sync_fence()
194 void *owner, struct dma_fence *f) in amdgpu_sync_test_fence() argument
196 void *fence_owner = amdgpu_sync_get_owner(f); in amdgpu_sync_test_fence()
221 if (amdgpu_sync_same_dev(adev, f) && in amdgpu_sync_test_fence()
227 if (amdgpu_sync_same_dev(adev, f) && in amdgpu_sync_test_fence()
257 struct dma_fence *f; in amdgpu_sync_resv() local
265 f = dma_resv_excl_fence(resv); in amdgpu_sync_resv()
266 dma_fence_chain_for_each(f, f) { in amdgpu_sync_resv()
267 struct dma_fence_chain *chain = to_dma_fence_chain(f); in amdgpu_sync_resv()
270 chain->fence : f)) { in amdgpu_sync_resv()
271 r = amdgpu_sync_fence(sync, f); in amdgpu_sync_resv()
272 dma_fence_put(f); in amdgpu_sync_resv()
284 f = rcu_dereference_protected(flist->shared[i], in amdgpu_sync_resv()
287 if (amdgpu_sync_test_fence(adev, mode, owner, f)) { in amdgpu_sync_resv()
288 r = amdgpu_sync_fence(sync, f); in amdgpu_sync_resv()
313 struct dma_fence *f = e->fence; in amdgpu_sync_peek_fence() local
314 struct drm_sched_fence *s_fence = to_drm_sched_fence(f); in amdgpu_sync_peek_fence()
316 if (dma_fence_is_signaled(f)) { in amdgpu_sync_peek_fence()
318 dma_fence_put(f); in amdgpu_sync_peek_fence()
334 return f; in amdgpu_sync_peek_fence()
351 struct dma_fence *f; in amdgpu_sync_get_fence() local
355 f = e->fence; in amdgpu_sync_get_fence()
360 if (!dma_fence_is_signaled(f)) in amdgpu_sync_get_fence()
361 return f; in amdgpu_sync_get_fence()
363 dma_fence_put(f); in amdgpu_sync_get_fence()
381 struct dma_fence *f; in amdgpu_sync_clone() local
385 f = e->fence; in amdgpu_sync_clone()
386 if (!dma_fence_is_signaled(f)) { in amdgpu_sync_clone()
387 r = amdgpu_sync_fence(clone, f); in amdgpu_sync_clone()
392 dma_fence_put(f); in amdgpu_sync_clone()