/drivers/dma-buf/ |
D | dma-fence.c | 113 static const char *dma_fence_stub_get_name(struct dma_fence *fence) in dma_fence_stub_get_name() argument 332 int dma_fence_signal_timestamp_locked(struct dma_fence *fence, in dma_fence_signal_timestamp_locked() argument 338 lockdep_assert_held(fence->lock); in dma_fence_signal_timestamp_locked() 341 &fence->flags))) in dma_fence_signal_timestamp_locked() 345 list_replace(&fence->cb_list, &cb_list); in dma_fence_signal_timestamp_locked() 347 fence->timestamp = timestamp; in dma_fence_signal_timestamp_locked() 348 set_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &fence->flags); in dma_fence_signal_timestamp_locked() 349 trace_dma_fence_signaled(fence); in dma_fence_signal_timestamp_locked() 353 cur->func(fence, cur); in dma_fence_signal_timestamp_locked() 375 int dma_fence_signal_timestamp(struct dma_fence *fence, ktime_t timestamp) in dma_fence_signal_timestamp() argument [all …]
|
D | dma-resv.c | 147 RCU_INIT_POINTER(obj->fence, NULL); in dma_resv_init() 169 fobj = rcu_dereference_protected(obj->fence, 1); in dma_resv_fini() 217 struct dma_fence *fence; in dma_resv_reserve_shared() local 219 fence = rcu_dereference_protected(old->shared[i], in dma_resv_reserve_shared() 221 if (dma_fence_is_signaled(fence)) in dma_resv_reserve_shared() 222 RCU_INIT_POINTER(new->shared[--k], fence); in dma_resv_reserve_shared() 224 RCU_INIT_POINTER(new->shared[j++], fence); in dma_resv_reserve_shared() 236 rcu_assign_pointer(obj->fence, new); in dma_resv_reserve_shared() 243 struct dma_fence *fence; in dma_resv_reserve_shared() local 245 fence = rcu_dereference_protected(new->shared[i], in dma_resv_reserve_shared() [all …]
|
D | dma-fence-chain.c | 12 static bool dma_fence_chain_enable_signaling(struct dma_fence *fence); 39 struct dma_fence *dma_fence_chain_walk(struct dma_fence *fence) in dma_fence_chain_walk() argument 44 chain = to_dma_fence_chain(fence); in dma_fence_chain_walk() 46 dma_fence_put(fence); in dma_fence_chain_walk() 54 if (!dma_fence_is_signaled(prev_chain->fence)) in dma_fence_chain_walk() 74 dma_fence_put(fence); in dma_fence_chain_walk() 112 static const char *dma_fence_chain_get_driver_name(struct dma_fence *fence) in dma_fence_chain_get_driver_name() argument 117 static const char *dma_fence_chain_get_timeline_name(struct dma_fence *fence) in dma_fence_chain_get_timeline_name() argument 144 static bool dma_fence_chain_enable_signaling(struct dma_fence *fence) in dma_fence_chain_enable_signaling() argument 146 struct dma_fence_chain *head = to_dma_fence_chain(fence); in dma_fence_chain_enable_signaling() [all …]
|
D | seqno-fence.c | 16 static const char *seqno_fence_get_driver_name(struct dma_fence *fence) in seqno_fence_get_driver_name() argument 18 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_driver_name() 20 return seqno_fence->ops->get_driver_name(fence); in seqno_fence_get_driver_name() 23 static const char *seqno_fence_get_timeline_name(struct dma_fence *fence) in seqno_fence_get_timeline_name() argument 25 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_timeline_name() 27 return seqno_fence->ops->get_timeline_name(fence); in seqno_fence_get_timeline_name() 30 static bool seqno_enable_signaling(struct dma_fence *fence) in seqno_enable_signaling() argument 32 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_enable_signaling() 34 return seqno_fence->ops->enable_signaling(fence); in seqno_enable_signaling() 37 static bool seqno_signaled(struct dma_fence *fence) in seqno_signaled() argument [all …]
|
D | st-dma-fence-chain.c | 68 struct dma_fence *fence, in mock_chain() argument 79 dma_fence_get(fence), in mock_chain() 188 struct dma_fence *fence; in find_seqno() local 196 fence = dma_fence_get(fc.tail); in find_seqno() 197 err = dma_fence_chain_find_seqno(&fence, 0); in find_seqno() 198 dma_fence_put(fence); in find_seqno() 205 fence = dma_fence_get(fc.tail); in find_seqno() 206 err = dma_fence_chain_find_seqno(&fence, i + 1); in find_seqno() 207 dma_fence_put(fence); in find_seqno() 213 if (fence != fc.chains[i]) { in find_seqno() [all …]
|
D | sync_file.c | 64 struct sync_file *sync_file_create(struct dma_fence *fence) in sync_file_create() argument 72 sync_file->fence = dma_fence_get(fence); in sync_file_create() 105 struct dma_fence *fence; in sync_file_get_fence() local 111 fence = dma_fence_get(sync_file->fence); in sync_file_get_fence() 114 return fence; in sync_file_get_fence() 136 struct dma_fence *fence = sync_file->fence; in sync_file_get_name() local 139 fence->ops->get_driver_name(fence), in sync_file_get_name() 140 fence->ops->get_timeline_name(fence), in sync_file_get_name() 141 fence->context, in sync_file_get_name() 142 fence->seqno); in sync_file_get_name() [all …]
|
/drivers/gpu/drm/i915/ |
D | i915_sw_fence.c | 42 static inline void debug_fence_init(struct i915_sw_fence *fence) in debug_fence_init() argument 44 debug_object_init(fence, &i915_sw_fence_debug_descr); in debug_fence_init() 47 static inline void debug_fence_init_onstack(struct i915_sw_fence *fence) in debug_fence_init_onstack() argument 49 debug_object_init_on_stack(fence, &i915_sw_fence_debug_descr); in debug_fence_init_onstack() 52 static inline void debug_fence_activate(struct i915_sw_fence *fence) in debug_fence_activate() argument 54 debug_object_activate(fence, &i915_sw_fence_debug_descr); in debug_fence_activate() 57 static inline void debug_fence_set_state(struct i915_sw_fence *fence, in debug_fence_set_state() argument 60 debug_object_active_state(fence, &i915_sw_fence_debug_descr, old, new); in debug_fence_set_state() 63 static inline void debug_fence_deactivate(struct i915_sw_fence *fence) in debug_fence_deactivate() argument 65 debug_object_deactivate(fence, &i915_sw_fence_debug_descr); in debug_fence_deactivate() [all …]
|
D | i915_sw_fence.h | 41 void __i915_sw_fence_init(struct i915_sw_fence *fence, 46 #define i915_sw_fence_init(fence, fn) \ argument 50 __i915_sw_fence_init((fence), (fn), #fence, &__key); \ 53 #define i915_sw_fence_init(fence, fn) \ argument 54 __i915_sw_fence_init((fence), (fn), NULL, NULL) 57 void i915_sw_fence_reinit(struct i915_sw_fence *fence); 60 void i915_sw_fence_fini(struct i915_sw_fence *fence); 62 static inline void i915_sw_fence_fini(struct i915_sw_fence *fence) {} in i915_sw_fence_fini() argument 65 void i915_sw_fence_commit(struct i915_sw_fence *fence); 67 int i915_sw_fence_await_sw_fence(struct i915_sw_fence *fence, [all …]
|
D | i915_request.c | 47 struct i915_sw_fence *fence; member 58 static const char *i915_fence_get_driver_name(struct dma_fence *fence) in i915_fence_get_driver_name() argument 60 return dev_name(to_request(fence)->engine->i915->drm.dev); in i915_fence_get_driver_name() 63 static const char *i915_fence_get_timeline_name(struct dma_fence *fence) in i915_fence_get_timeline_name() argument 76 if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) in i915_fence_get_timeline_name() 79 ctx = i915_request_gem_context(to_request(fence)); in i915_fence_get_timeline_name() 86 static bool i915_fence_signaled(struct dma_fence *fence) in i915_fence_signaled() argument 88 return i915_request_completed(to_request(fence)); in i915_fence_signaled() 91 static bool i915_fence_enable_signaling(struct dma_fence *fence) in i915_fence_enable_signaling() argument 93 return i915_request_enable_breadcrumb(to_request(fence)); in i915_fence_enable_signaling() [all …]
|
D | i915_active.h | 48 void i915_active_noop(struct dma_fence *fence, struct dma_fence_cb *cb); 64 void *fence, in __i915_active_fence_init() argument 67 RCU_INIT_POINTER(active->fence, fence); in __i915_active_fence_init() 76 struct dma_fence *fence); 103 struct dma_fence *fence; in i915_active_fence_get() local 106 fence = dma_fence_get_rcu_safe(&active->fence); in i915_active_fence_get() 109 return fence; in i915_active_fence_get() 123 return rcu_access_pointer(active->fence); in i915_active_fence_isset() 167 __i915_active_ref(struct i915_active *ref, u64 idx, struct dma_fence *fence); 168 int i915_active_ref(struct i915_active *ref, u64 idx, struct dma_fence *fence); [all …]
|
D | i915_request.h | 56 rq__->fence.context, rq__->fence.seqno, \ 162 struct dma_fence fence; member 299 static inline bool dma_fence_is_i915(const struct dma_fence *fence) in dma_fence_is_i915() argument 301 return fence->ops == &i915_fence_ops; in dma_fence_is_i915() 322 to_request(struct dma_fence *fence) in to_request() argument 325 BUILD_BUG_ON(offsetof(struct i915_request, fence) != 0); in to_request() 326 GEM_BUG_ON(fence && !dma_fence_is_i915(fence)); in to_request() 327 return container_of(fence, struct i915_request, fence); in to_request() 333 return to_request(dma_fence_get(&rq->fence)); in i915_request_get() 339 return to_request(dma_fence_get_rcu(&rq->fence)); in i915_request_get_rcu() [all …]
|
/drivers/gpu/drm/i915/gt/ |
D | intel_ggtt_fencing.c | 61 static struct drm_i915_private *fence_to_i915(struct i915_fence_reg *fence) in fence_to_i915() argument 63 return fence->ggtt->vm.i915; in fence_to_i915() 66 static struct intel_uncore *fence_to_uncore(struct i915_fence_reg *fence) in fence_to_uncore() argument 68 return fence->ggtt->vm.gt->uncore; in fence_to_uncore() 71 static void i965_write_fence_reg(struct i915_fence_reg *fence) in i965_write_fence_reg() argument 77 if (INTEL_GEN(fence_to_i915(fence)) >= 6) { in i965_write_fence_reg() 78 fence_reg_lo = FENCE_REG_GEN6_LO(fence->id); in i965_write_fence_reg() 79 fence_reg_hi = FENCE_REG_GEN6_HI(fence->id); in i965_write_fence_reg() 83 fence_reg_lo = FENCE_REG_965_LO(fence->id); in i965_write_fence_reg() 84 fence_reg_hi = FENCE_REG_965_HI(fence->id); in i965_write_fence_reg() [all …]
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_fence.c | 44 from_fence(struct dma_fence *fence) in from_fence() argument 46 return container_of(fence, struct nouveau_fence, base); in from_fence() 50 nouveau_fctx(struct nouveau_fence *fence) in nouveau_fctx() argument 52 return container_of(fence->base.lock, struct nouveau_fence_chan, lock); in nouveau_fctx() 56 nouveau_fence_signal(struct nouveau_fence *fence) in nouveau_fence_signal() argument 60 dma_fence_signal_locked(&fence->base); in nouveau_fence_signal() 61 list_del(&fence->head); in nouveau_fence_signal() 62 rcu_assign_pointer(fence->channel, NULL); in nouveau_fence_signal() 64 if (test_bit(DMA_FENCE_FLAG_USER_BITS, &fence->base.flags)) { in nouveau_fence_signal() 65 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal() [all …]
|
/drivers/gpu/drm/vgem/ |
D | vgem_fence.c | 38 static const char *vgem_fence_get_driver_name(struct dma_fence *fence) in vgem_fence_get_driver_name() argument 43 static const char *vgem_fence_get_timeline_name(struct dma_fence *fence) in vgem_fence_get_timeline_name() argument 50 struct vgem_fence *fence = container_of(base, typeof(*fence), base); in vgem_fence_release() local 52 del_timer_sync(&fence->timer); in vgem_fence_release() 53 dma_fence_free(&fence->base); in vgem_fence_release() 56 static void vgem_fence_value_str(struct dma_fence *fence, char *str, int size) in vgem_fence_value_str() argument 58 snprintf(str, size, "%llu", fence->seqno); in vgem_fence_value_str() 61 static void vgem_fence_timeline_value_str(struct dma_fence *fence, char *str, in vgem_fence_timeline_value_str() argument 65 dma_fence_is_signaled(fence) ? fence->seqno : 0); in vgem_fence_timeline_value_str() 79 struct vgem_fence *fence = from_timer(fence, t, timer); in vgem_fence_timeout() local [all …]
|
/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_fence.c | 55 struct vmw_fence_obj fence; member 77 struct vmw_fence_obj *fence; member 85 fman_from_fence(struct vmw_fence_obj *fence) in fman_from_fence() argument 87 return container_of(fence->base.lock, struct vmw_fence_manager, lock); in fman_from_fence() 114 struct vmw_fence_obj *fence = in vmw_fence_obj_destroy() local 117 struct vmw_fence_manager *fman = fman_from_fence(fence); in vmw_fence_obj_destroy() 120 list_del_init(&fence->head); in vmw_fence_obj_destroy() 123 fence->destroy(fence); in vmw_fence_obj_destroy() 138 struct vmw_fence_obj *fence = in vmw_fence_enable_signaling() local 141 struct vmw_fence_manager *fman = fman_from_fence(fence); in vmw_fence_enable_signaling() [all …]
|
/drivers/gpu/drm/scheduler/ |
D | sched_fence.c | 51 void drm_sched_fence_scheduled(struct drm_sched_fence *fence) in drm_sched_fence_scheduled() argument 53 int ret = dma_fence_signal(&fence->scheduled); in drm_sched_fence_scheduled() 56 DMA_FENCE_TRACE(&fence->scheduled, in drm_sched_fence_scheduled() 59 DMA_FENCE_TRACE(&fence->scheduled, in drm_sched_fence_scheduled() 63 void drm_sched_fence_finished(struct drm_sched_fence *fence) in drm_sched_fence_finished() argument 65 int ret = dma_fence_signal(&fence->finished); in drm_sched_fence_finished() 68 DMA_FENCE_TRACE(&fence->finished, in drm_sched_fence_finished() 71 DMA_FENCE_TRACE(&fence->finished, in drm_sched_fence_finished() 75 static const char *drm_sched_fence_get_driver_name(struct dma_fence *fence) in drm_sched_fence_get_driver_name() argument 82 struct drm_sched_fence *fence = to_drm_sched_fence(f); in drm_sched_fence_get_timeline_name() local [all …]
|
D | gpu_scheduler_trace.h | 40 __field(struct dma_fence *, fence) 50 __entry->fence = &sched_job->s_fence->finished; 58 __entry->fence, __entry->name, 67 __field(struct dma_fence *, fence) 77 __entry->fence = &sched_job->s_fence->finished; 85 __entry->fence, __entry->name, 90 TP_PROTO(struct drm_sched_fence *fence), 91 TP_ARGS(fence), 93 __field(struct dma_fence *, fence) 97 __entry->fence = &fence->finished; [all …]
|
/drivers/gpu/drm/i915/selftests/ |
D | lib_sw_fence.c | 30 nop_fence_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) in nop_fence_notify() argument 35 void __onstack_fence_init(struct i915_sw_fence *fence, in __onstack_fence_init() argument 39 debug_fence_init_onstack(fence); in __onstack_fence_init() 41 __init_waitqueue_head(&fence->wait, name, key); in __onstack_fence_init() 42 atomic_set(&fence->pending, 1); in __onstack_fence_init() 43 fence->error = 0; in __onstack_fence_init() 44 fence->flags = (unsigned long)nop_fence_notify; in __onstack_fence_init() 47 void onstack_fence_fini(struct i915_sw_fence *fence) in onstack_fence_fini() argument 49 if (!fence->flags) in onstack_fence_fini() 52 i915_sw_fence_commit(fence); in onstack_fence_fini() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_amdkfd_fence.c | 65 struct amdgpu_amdkfd_fence *fence; in amdgpu_amdkfd_fence_create() local 67 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in amdgpu_amdkfd_fence_create() 68 if (fence == NULL) in amdgpu_amdkfd_fence_create() 73 fence->mm = mm; in amdgpu_amdkfd_fence_create() 74 get_task_comm(fence->timeline_name, current); in amdgpu_amdkfd_fence_create() 75 spin_lock_init(&fence->lock); in amdgpu_amdkfd_fence_create() 77 dma_fence_init(&fence->base, &amdkfd_fence_ops, &fence->lock, in amdgpu_amdkfd_fence_create() 80 return fence; in amdgpu_amdkfd_fence_create() 85 struct amdgpu_amdkfd_fence *fence; in to_amdgpu_amdkfd_fence() local 90 fence = container_of(f, struct amdgpu_amdkfd_fence, base); in to_amdgpu_amdkfd_fence() [all …]
|
/drivers/gpu/drm/radeon/ |
D | radeon_fence.c | 136 struct radeon_fence **fence, in radeon_fence_emit() argument 142 *fence = kmalloc(sizeof(struct radeon_fence), GFP_KERNEL); in radeon_fence_emit() 143 if ((*fence) == NULL) { in radeon_fence_emit() 146 (*fence)->rdev = rdev; in radeon_fence_emit() 147 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit() 148 (*fence)->ring = ring; in radeon_fence_emit() 149 (*fence)->is_vm_update = false; in radeon_fence_emit() 150 dma_fence_init(&(*fence)->base, &radeon_fence_ops, in radeon_fence_emit() 154 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit() 155 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit() [all …]
|
D | radeon_sync.c | 63 struct radeon_fence *fence) in radeon_sync_fence() argument 67 if (!fence) in radeon_sync_fence() 70 other = sync->sync_to[fence->ring]; in radeon_sync_fence() 71 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence() 73 if (fence->is_vm_update) { in radeon_sync_fence() 75 sync->last_vm_update = radeon_fence_later(fence, other); in radeon_sync_fence() 95 struct radeon_fence *fence; in radeon_sync_resv() local 101 fence = f ? to_radeon_fence(f) : NULL; in radeon_sync_resv() 102 if (fence && fence->rdev == rdev) in radeon_sync_resv() 103 radeon_sync_fence(sync, fence); in radeon_sync_resv() [all …]
|
/drivers/gpu/drm/virtio/ |
D | virtgpu_fence.c | 45 struct virtio_gpu_fence *fence = to_virtio_fence(f); in virtio_fence_signaled() local 47 if (WARN_ON_ONCE(fence->f.seqno == 0)) in virtio_fence_signaled() 51 if (atomic64_read(&fence->drv->last_seq) >= fence->f.seqno) in virtio_fence_signaled() 63 struct virtio_gpu_fence *fence = to_virtio_fence(f); in virtio_timeline_value_str() local 65 snprintf(str, size, "%llu", (u64)atomic64_read(&fence->drv->last_seq)); in virtio_timeline_value_str() 79 struct virtio_gpu_fence *fence = kzalloc(sizeof(struct virtio_gpu_fence), in virtio_gpu_fence_alloc() local 81 if (!fence) in virtio_gpu_fence_alloc() 82 return fence; in virtio_gpu_fence_alloc() 84 fence->drv = drv; in virtio_gpu_fence_alloc() 90 dma_fence_init(&fence->f, &virtio_fence_ops, &drv->lock, drv->context, 0); in virtio_gpu_fence_alloc() [all …]
|
/drivers/gpu/drm/ |
D | drm_syncobj.c | 206 struct dma_fence *fence; member 243 struct dma_fence *fence; in drm_syncobj_fence_add_wait() local 245 if (wait->fence) in drm_syncobj_fence_add_wait() 253 fence = dma_fence_get(rcu_dereference_protected(syncobj->fence, 1)); in drm_syncobj_fence_add_wait() 254 if (!fence || dma_fence_chain_find_seqno(&fence, wait->point)) { in drm_syncobj_fence_add_wait() 255 dma_fence_put(fence); in drm_syncobj_fence_add_wait() 257 } else if (!fence) { in drm_syncobj_fence_add_wait() 258 wait->fence = dma_fence_get_stub(); in drm_syncobj_fence_add_wait() 260 wait->fence = fence; in drm_syncobj_fence_add_wait() 287 struct dma_fence *fence, in drm_syncobj_add_point() argument [all …]
|
/drivers/gpu/drm/msm/ |
D | msm_fence.c | 36 static inline bool fence_completed(struct msm_fence_context *fctx, uint32_t fence) in fence_completed() argument 38 return (int32_t)(fctx->completed_fence - fence) >= 0; in fence_completed() 42 int msm_wait_fence(struct msm_fence_context *fctx, uint32_t fence, in msm_wait_fence() argument 47 if (fence > fctx->last_fence) { in msm_wait_fence() 49 fctx->name, fence, fctx->last_fence); in msm_wait_fence() 55 ret = fence_completed(fctx, fence) ? 0 : -EBUSY; in msm_wait_fence() 61 fence_completed(fctx, fence), in msm_wait_fence() 65 fence_completed(fctx, fence), in msm_wait_fence() 70 fence, fctx->completed_fence); in msm_wait_fence() 81 void msm_update_fence(struct msm_fence_context *fctx, uint32_t fence) in msm_update_fence() argument [all …]
|
/drivers/gpu/drm/v3d/ |
D | v3d_fence.c | 8 struct v3d_fence *fence; in v3d_fence_create() local 10 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in v3d_fence_create() 11 if (!fence) in v3d_fence_create() 14 fence->dev = &v3d->drm; in v3d_fence_create() 15 fence->queue = queue; in v3d_fence_create() 16 fence->seqno = ++v3d->queue[queue].emit_seqno; in v3d_fence_create() 17 dma_fence_init(&fence->base, &v3d_fence_ops, &v3d->job_lock, in v3d_fence_create() 18 v3d->queue[queue].fence_context, fence->seqno); in v3d_fence_create() 20 return &fence->base; in v3d_fence_create() 23 static const char *v3d_fence_get_driver_name(struct dma_fence *fence) in v3d_fence_get_driver_name() argument [all …]
|