Home
last modified time | relevance | path

Searched refs:in_fence (Results 1 – 7 of 7) sorted by relevance

/drivers/gpu/drm/virtio/
Dvirtgpu_submit.c49 struct dma_fence *in_fence) in virtio_gpu_do_fence_wait() argument
53 if (dma_fence_match_context(in_fence, context)) in virtio_gpu_do_fence_wait()
56 return dma_fence_wait(in_fence, true); in virtio_gpu_do_fence_wait()
440 struct dma_fence *in_fence = in virtio_gpu_wait_in_fence() local
442 if (!in_fence) in virtio_gpu_wait_in_fence()
449 ret = virtio_gpu_dma_fence_wait(submit, in_fence); in virtio_gpu_wait_in_fence()
451 dma_fence_put(in_fence); in virtio_gpu_wait_in_fence()
/drivers/gpu/drm/nouveau/
Dnouveau_sched.c155 struct dma_fence *in_fence = NULL; in nouveau_job_add_deps() local
161 ret = sync_find_fence(job, sync, &in_fence); in nouveau_job_add_deps()
169 ret = drm_sched_job_add_dependency(&job->base, in_fence); in nouveau_job_add_deps()
/drivers/gpu/drm/vc4/
Dvc4_gem.c1127 struct dma_fence *in_fence; in vc4_submit_cl_ioctl() local
1186 0, 0, &in_fence); in vc4_submit_cl_ioctl()
1195 if (!dma_fence_match_context(in_fence, in vc4_submit_cl_ioctl()
1197 ret = dma_fence_wait(in_fence, true); in vc4_submit_cl_ioctl()
1199 dma_fence_put(in_fence); in vc4_submit_cl_ioctl()
1204 dma_fence_put(in_fence); in vc4_submit_cl_ioctl()
/drivers/gpu/drm/i915/gem/
Di915_gem_execbuffer.c3253 struct dma_fence *in_fence, int out_fence_fd) in eb_fences_add() argument
3268 if (in_fence) { in eb_fences_add()
3270 err = i915_request_await_execution(rq, in_fence); in eb_fences_add()
3272 err = i915_request_await_dma_fence(rq, in_fence); in eb_fences_add()
3314 eb_requests_create(struct i915_execbuffer *eb, struct dma_fence *in_fence, in eb_requests_create() argument
3336 in_fence, out_fence_fd); in eb_requests_create()
3367 struct dma_fence *in_fence = NULL; in i915_gem_do_execbuffer() local
3433 in_fence = sync_file_get_fence(lower_32_bits(args->rsvd2)); in i915_gem_do_execbuffer()
3434 if (!in_fence) { in i915_gem_do_execbuffer()
3489 out_fence = eb_requests_create(&eb, in_fence, out_fence_fd); in i915_gem_do_execbuffer()
[all …]
/drivers/gpu/drm/etnaviv/
Detnaviv_gem_submit.c554 struct dma_fence *in_fence = sync_file_get_fence(args->fence_fd); in etnaviv_ioctl_gem_submit() local
555 if (!in_fence) { in etnaviv_ioctl_gem_submit()
561 in_fence); in etnaviv_ioctl_gem_submit()
/drivers/gpu/drm/msm/
Dmsm_gem_submit.c813 struct dma_fence *in_fence; in msm_ioctl_gem_submit() local
815 in_fence = sync_file_get_fence(args->fence_fd); in msm_ioctl_gem_submit()
817 if (!in_fence) { in msm_ioctl_gem_submit()
822 ret = drm_sched_job_add_dependency(&submit->base, in_fence); in msm_ioctl_gem_submit()
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_execbuf.c4458 struct dma_fence *in_fence = NULL; in vmw_execbuf_ioctl() local
4489 in_fence = sync_file_get_fence(arg->imported_fence_fd); in vmw_execbuf_ioctl()
4491 if (!in_fence) { in vmw_execbuf_ioctl()
4497 ret = dma_fence_wait(in_fence, true); in vmw_execbuf_ioctl()
4515 if (in_fence) in vmw_execbuf_ioctl()
4516 dma_fence_put(in_fence); in vmw_execbuf_ioctl()