/drivers/dma-buf/ |
D | dma-fence.c | 68 int dma_fence_signal_locked(struct dma_fence *fence) in dma_fence_signal_locked() argument 73 lockdep_assert_held(fence->lock); in dma_fence_signal_locked() 75 if (WARN_ON(!fence)) in dma_fence_signal_locked() 78 if (test_and_set_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) { in dma_fence_signal_locked() 86 fence->timestamp = ktime_get(); in dma_fence_signal_locked() 87 set_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &fence->flags); in dma_fence_signal_locked() 88 trace_dma_fence_signaled(fence); in dma_fence_signal_locked() 91 list_for_each_entry_safe(cur, tmp, &fence->cb_list, node) { in dma_fence_signal_locked() 93 cur->func(fence, cur); in dma_fence_signal_locked() 109 int dma_fence_signal(struct dma_fence *fence) in dma_fence_signal() argument [all …]
|
D | reservation.c | 105 struct dma_fence *fence) in reservation_object_add_shared_inplace() argument 109 dma_fence_get(fence); in reservation_object_add_shared_inplace() 120 if (old_fence->context == fence->context) { in reservation_object_add_shared_inplace() 122 RCU_INIT_POINTER(fobj->shared[i], fence); in reservation_object_add_shared_inplace() 135 RCU_INIT_POINTER(fobj->shared[fobj->shared_count], fence); in reservation_object_add_shared_inplace() 146 struct dma_fence *fence) in reservation_object_add_shared_replace() argument 151 dma_fence_get(fence); in reservation_object_add_shared_replace() 154 RCU_INIT_POINTER(fobj->shared[0], fence); in reservation_object_add_shared_replace() 173 if (!old_fence && check->context == fence->context) { in reservation_object_add_shared_replace() 175 RCU_INIT_POINTER(fobj->shared[i], fence); in reservation_object_add_shared_replace() [all …]
|
D | seqno-fence.c | 24 static const char *seqno_fence_get_driver_name(struct dma_fence *fence) in seqno_fence_get_driver_name() argument 26 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_driver_name() 28 return seqno_fence->ops->get_driver_name(fence); in seqno_fence_get_driver_name() 31 static const char *seqno_fence_get_timeline_name(struct dma_fence *fence) in seqno_fence_get_timeline_name() argument 33 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_timeline_name() 35 return seqno_fence->ops->get_timeline_name(fence); in seqno_fence_get_timeline_name() 38 static bool seqno_enable_signaling(struct dma_fence *fence) in seqno_enable_signaling() argument 40 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_enable_signaling() 42 return seqno_fence->ops->enable_signaling(fence); in seqno_enable_signaling() 45 static bool seqno_signaled(struct dma_fence *fence) in seqno_signaled() argument [all …]
|
D | sync_file.c | 73 struct sync_file *sync_file_create(struct dma_fence *fence) in sync_file_create() argument 81 sync_file->fence = dma_fence_get(fence); in sync_file_create() 114 struct dma_fence *fence; in sync_file_get_fence() local 120 fence = dma_fence_get(sync_file->fence); in sync_file_get_fence() 123 return fence; in sync_file_get_fence() 145 struct dma_fence *fence = sync_file->fence; in sync_file_get_name() local 148 fence->ops->get_driver_name(fence), in sync_file_get_name() 149 fence->ops->get_timeline_name(fence), in sync_file_get_name() 150 fence->context, in sync_file_get_name() 151 fence->seqno); in sync_file_get_name() [all …]
|
D | sw_sync.c | 61 __s32 fence; /* fd of new fence */ member 73 static inline struct sync_pt *dma_fence_to_sync_pt(struct dma_fence *fence) in dma_fence_to_sync_pt() argument 75 if (fence->ops != &timeline_fence_ops) in dma_fence_to_sync_pt() 77 return container_of(fence, struct sync_pt, base); in dma_fence_to_sync_pt() 128 static const char *timeline_fence_get_driver_name(struct dma_fence *fence) in timeline_fence_get_driver_name() argument 133 static const char *timeline_fence_get_timeline_name(struct dma_fence *fence) in timeline_fence_get_timeline_name() argument 135 struct sync_timeline *parent = dma_fence_parent(fence); in timeline_fence_get_timeline_name() 140 static void timeline_fence_release(struct dma_fence *fence) in timeline_fence_release() argument 142 struct sync_pt *pt = dma_fence_to_sync_pt(fence); in timeline_fence_release() 143 struct sync_timeline *parent = dma_fence_parent(fence); in timeline_fence_release() [all …]
|
D | dma-fence-array.c | 24 static const char *dma_fence_array_get_driver_name(struct dma_fence *fence) in dma_fence_array_get_driver_name() argument 29 static const char *dma_fence_array_get_timeline_name(struct dma_fence *fence) in dma_fence_array_get_timeline_name() argument 55 static bool dma_fence_array_enable_signaling(struct dma_fence *fence) in dma_fence_array_enable_signaling() argument 57 struct dma_fence_array *array = to_dma_fence_array(fence); in dma_fence_array_enable_signaling() 83 static bool dma_fence_array_signaled(struct dma_fence *fence) in dma_fence_array_signaled() argument 85 struct dma_fence_array *array = to_dma_fence_array(fence); in dma_fence_array_signaled() 90 static void dma_fence_array_release(struct dma_fence *fence) in dma_fence_array_release() argument 92 struct dma_fence_array *array = to_dma_fence_array(fence); in dma_fence_array_release() 99 dma_fence_free(fence); in dma_fence_array_release() 167 bool dma_fence_match_context(struct dma_fence *fence, u64 context) in dma_fence_match_context() argument [all …]
|
D | sync_debug.c | 75 struct dma_fence *fence, bool show) in sync_print_fence() argument 77 struct sync_timeline *parent = dma_fence_parent(fence); in sync_print_fence() 80 status = dma_fence_get_status_locked(fence); in sync_print_fence() 87 if (test_bit(DMA_FENCE_FLAG_TIMESTAMP_BIT, &fence->flags)) { in sync_print_fence() 89 ktime_to_timespec64(fence->timestamp); in sync_print_fence() 94 if (fence->ops->timeline_value_str && in sync_print_fence() 95 fence->ops->fence_value_str) { in sync_print_fence() 99 fence->ops->fence_value_str(fence, value, sizeof(value)); in sync_print_fence() 105 fence->ops->timeline_value_str(fence, value, in sync_print_fence() 138 sync_status_str(dma_fence_get_status(sync_file->fence))); in sync_print_sync_file() [all …]
|
/drivers/gpu/drm/i915/ |
D | i915_sw_fence.c | 38 static inline void debug_fence_init(struct i915_sw_fence *fence) in debug_fence_init() argument 40 debug_object_init(fence, &i915_sw_fence_debug_descr); in debug_fence_init() 43 static inline void debug_fence_activate(struct i915_sw_fence *fence) in debug_fence_activate() argument 45 debug_object_activate(fence, &i915_sw_fence_debug_descr); in debug_fence_activate() 48 static inline void debug_fence_set_state(struct i915_sw_fence *fence, in debug_fence_set_state() argument 51 debug_object_active_state(fence, &i915_sw_fence_debug_descr, old, new); in debug_fence_set_state() 54 static inline void debug_fence_deactivate(struct i915_sw_fence *fence) in debug_fence_deactivate() argument 56 debug_object_deactivate(fence, &i915_sw_fence_debug_descr); in debug_fence_deactivate() 59 static inline void debug_fence_destroy(struct i915_sw_fence *fence) in debug_fence_destroy() argument 61 debug_object_destroy(fence, &i915_sw_fence_debug_descr); in debug_fence_destroy() [all …]
|
D | i915_gem_fence_reg.c | 60 static void i965_write_fence_reg(struct drm_i915_fence_reg *fence, in i965_write_fence_reg() argument 67 if (INTEL_INFO(fence->i915)->gen >= 6) { in i965_write_fence_reg() 68 fence_reg_lo = FENCE_REG_GEN6_LO(fence->id); in i965_write_fence_reg() 69 fence_reg_hi = FENCE_REG_GEN6_HI(fence->id); in i965_write_fence_reg() 73 fence_reg_lo = FENCE_REG_965_LO(fence->id); in i965_write_fence_reg() 74 fence_reg_hi = FENCE_REG_965_HI(fence->id); in i965_write_fence_reg() 96 struct drm_i915_private *dev_priv = fence->i915; in i965_write_fence_reg() 116 static void i915_write_fence_reg(struct drm_i915_fence_reg *fence, in i915_write_fence_reg() argument 132 if (is_y_tiled && HAS_128_BYTE_Y_TILING(fence->i915)) in i915_write_fence_reg() 148 struct drm_i915_private *dev_priv = fence->i915; in i915_write_fence_reg() [all …]
|
D | i915_sw_fence.h | 42 void __i915_sw_fence_init(struct i915_sw_fence *fence, 47 #define i915_sw_fence_init(fence, fn) \ argument 51 __i915_sw_fence_init((fence), (fn), #fence, &__key); \ 54 #define i915_sw_fence_init(fence, fn) \ argument 55 __i915_sw_fence_init((fence), (fn), NULL, NULL) 59 void i915_sw_fence_fini(struct i915_sw_fence *fence); 61 static inline void i915_sw_fence_fini(struct i915_sw_fence *fence) {} in i915_sw_fence_fini() argument 64 void i915_sw_fence_commit(struct i915_sw_fence *fence); 66 int i915_sw_fence_await_sw_fence(struct i915_sw_fence *fence, 69 int i915_sw_fence_await_sw_fence_gfp(struct i915_sw_fence *fence, [all …]
|
/drivers/gpu/drm/nouveau/ |
D | nouveau_fence.c | 45 from_fence(struct dma_fence *fence) in from_fence() argument 47 return container_of(fence, struct nouveau_fence, base); in from_fence() 51 nouveau_fctx(struct nouveau_fence *fence) in nouveau_fctx() argument 53 return container_of(fence->base.lock, struct nouveau_fence_chan, lock); in nouveau_fctx() 57 nouveau_fence_signal(struct nouveau_fence *fence) in nouveau_fence_signal() argument 61 dma_fence_signal_locked(&fence->base); in nouveau_fence_signal() 62 list_del(&fence->head); in nouveau_fence_signal() 63 rcu_assign_pointer(fence->channel, NULL); in nouveau_fence_signal() 65 if (test_bit(DMA_FENCE_FLAG_USER_BITS, &fence->base.flags)) { in nouveau_fence_signal() 66 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal() [all …]
|
D | nv84_fence.c | 67 nv84_fence_emit(struct nouveau_fence *fence) in nv84_fence_emit() argument 69 struct nouveau_channel *chan = fence->channel; in nv84_fence_emit() 70 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_emit() 73 if (fence->sysmem) in nv84_fence_emit() 78 return fctx->base.emit32(chan, addr, fence->base.seqno); in nv84_fence_emit() 82 nv84_fence_sync(struct nouveau_fence *fence, in nv84_fence_sync() argument 85 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_sync() 88 if (fence->sysmem) in nv84_fence_sync() 93 return fctx->base.sync32(chan, addr, fence->base.seqno); in nv84_fence_sync() 99 struct nv84_fence_priv *priv = chan->drm->fence; in nv84_fence_read() [all …]
|
D | nv04_fence.c | 40 nv04_fence_emit(struct nouveau_fence *fence) in nv04_fence_emit() argument 42 struct nouveau_channel *chan = fence->channel; in nv04_fence_emit() 46 OUT_RING (chan, fence->base.seqno); in nv04_fence_emit() 53 nv04_fence_sync(struct nouveau_fence *fence, in nv04_fence_sync() argument 71 struct nv04_fence_chan *fctx = chan->fence; in nv04_fence_context_del() 73 chan->fence = NULL; in nv04_fence_context_del() 86 chan->fence = fctx; in nv04_fence_context_new() 95 struct nv04_fence_priv *priv = drm->fence; in nv04_fence_destroy() 96 drm->fence = NULL; in nv04_fence_destroy() 105 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv04_fence_create()
|
/drivers/gpu/drm/vgem/ |
D | vgem_fence.c | 36 static const char *vgem_fence_get_driver_name(struct dma_fence *fence) in vgem_fence_get_driver_name() argument 41 static const char *vgem_fence_get_timeline_name(struct dma_fence *fence) in vgem_fence_get_timeline_name() argument 46 static bool vgem_fence_signaled(struct dma_fence *fence) in vgem_fence_signaled() argument 51 static bool vgem_fence_enable_signaling(struct dma_fence *fence) in vgem_fence_enable_signaling() argument 58 struct vgem_fence *fence = container_of(base, typeof(*fence), base); in vgem_fence_release() local 60 del_timer_sync(&fence->timer); in vgem_fence_release() 61 dma_fence_free(&fence->base); in vgem_fence_release() 64 static void vgem_fence_value_str(struct dma_fence *fence, char *str, int size) in vgem_fence_value_str() argument 66 snprintf(str, size, "%u", fence->seqno); in vgem_fence_value_str() 69 static void vgem_fence_timeline_value_str(struct dma_fence *fence, char *str, in vgem_fence_timeline_value_str() argument [all …]
|
/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_fence.c | 54 struct vmw_fence_obj fence; member 76 struct vmw_fence_obj *fence; member 84 fman_from_fence(struct vmw_fence_obj *fence) in fman_from_fence() argument 86 return container_of(fence->base.lock, struct vmw_fence_manager, lock); in fman_from_fence() 113 struct vmw_fence_obj *fence = in vmw_fence_obj_destroy() local 116 struct vmw_fence_manager *fman = fman_from_fence(fence); in vmw_fence_obj_destroy() 119 list_del_init(&fence->head); in vmw_fence_obj_destroy() 122 fence->destroy(fence); in vmw_fence_obj_destroy() 137 struct vmw_fence_obj *fence = in vmw_fence_enable_signaling() local 140 struct vmw_fence_manager *fman = fman_from_fence(fence); in vmw_fence_enable_signaling() [all …]
|
D | vmwgfx_fence.h | 60 void (*destroy)(struct vmw_fence_obj *fence); 71 struct vmw_fence_obj *fence = *fence_p; in vmw_fence_obj_unreference() local 74 if (fence) in vmw_fence_obj_unreference() 75 dma_fence_put(&fence->base); in vmw_fence_obj_unreference() 79 vmw_fence_obj_reference(struct vmw_fence_obj *fence) in vmw_fence_obj_reference() argument 81 if (fence) in vmw_fence_obj_reference() 82 dma_fence_get(&fence->base); in vmw_fence_obj_reference() 83 return fence; in vmw_fence_obj_reference() 88 extern bool vmw_fence_obj_signaled(struct vmw_fence_obj *fence); 90 extern int vmw_fence_obj_wait(struct vmw_fence_obj *fence, [all …]
|
/drivers/gpu/drm/amd/scheduler/ |
D | sched_fence.c | 52 struct amd_sched_fence *fence = NULL; in amd_sched_fence_create() local 55 fence = kmem_cache_zalloc(sched_fence_slab, GFP_KERNEL); in amd_sched_fence_create() 56 if (fence == NULL) in amd_sched_fence_create() 59 fence->owner = owner; in amd_sched_fence_create() 60 fence->sched = entity->sched; in amd_sched_fence_create() 61 spin_lock_init(&fence->lock); in amd_sched_fence_create() 64 dma_fence_init(&fence->scheduled, &amd_sched_fence_ops_scheduled, in amd_sched_fence_create() 65 &fence->lock, entity->fence_context, seq); in amd_sched_fence_create() 66 dma_fence_init(&fence->finished, &amd_sched_fence_ops_finished, in amd_sched_fence_create() 67 &fence->lock, entity->fence_context + 1, seq); in amd_sched_fence_create() [all …]
|
/drivers/gpu/drm/radeon/ |
D | radeon_fence.c | 130 struct radeon_fence **fence, in radeon_fence_emit() argument 136 *fence = kmalloc(sizeof(struct radeon_fence), GFP_KERNEL); in radeon_fence_emit() 137 if ((*fence) == NULL) { in radeon_fence_emit() 140 (*fence)->rdev = rdev; in radeon_fence_emit() 141 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit() 142 (*fence)->ring = ring; in radeon_fence_emit() 143 (*fence)->is_vm_update = false; in radeon_fence_emit() 144 dma_fence_init(&(*fence)->base, &radeon_fence_ops, in radeon_fence_emit() 148 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit() 149 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit() [all …]
|
D | radeon_sync.c | 64 struct radeon_fence *fence) in radeon_sync_fence() argument 68 if (!fence) in radeon_sync_fence() 71 other = sync->sync_to[fence->ring]; in radeon_sync_fence() 72 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence() 74 if (fence->is_vm_update) { in radeon_sync_fence() 76 sync->last_vm_update = radeon_fence_later(fence, other); in radeon_sync_fence() 96 struct radeon_fence *fence; in radeon_sync_resv() local 102 fence = f ? to_radeon_fence(f) : NULL; in radeon_sync_resv() 103 if (fence && fence->rdev == rdev) in radeon_sync_resv() 104 radeon_sync_fence(sync, fence); in radeon_sync_resv() [all …]
|
/drivers/gpu/drm/virtio/ |
D | virtgpu_fence.c | 46 struct virtio_gpu_fence *fence = to_virtio_fence(f); in virtio_signaled() local 48 if (atomic64_read(&fence->drv->last_seq) >= fence->seq) in virtio_signaled() 55 struct virtio_gpu_fence *fence = to_virtio_fence(f); in virtio_fence_value_str() local 57 snprintf(str, size, "%llu", fence->seq); in virtio_fence_value_str() 62 struct virtio_gpu_fence *fence = to_virtio_fence(f); in virtio_timeline_value_str() local 64 snprintf(str, size, "%llu", (u64)atomic64_read(&fence->drv->last_seq)); in virtio_timeline_value_str() 79 struct virtio_gpu_fence **fence) in virtio_gpu_fence_emit() argument 84 *fence = kmalloc(sizeof(struct virtio_gpu_fence), GFP_ATOMIC); in virtio_gpu_fence_emit() 85 if ((*fence) == NULL) in virtio_gpu_fence_emit() 89 (*fence)->drv = drv; in virtio_gpu_fence_emit() [all …]
|
/drivers/gpu/drm/ |
D | drm_syncobj.c | 93 struct dma_fence **fence, in drm_syncobj_fence_get_or_add_callback() argument 99 WARN_ON(*fence); in drm_syncobj_fence_get_or_add_callback() 101 *fence = drm_syncobj_fence_get(syncobj); in drm_syncobj_fence_get_or_add_callback() 102 if (*fence) in drm_syncobj_fence_get_or_add_callback() 110 if (syncobj->fence) { in drm_syncobj_fence_get_or_add_callback() 111 *fence = dma_fence_get(syncobj->fence); in drm_syncobj_fence_get_or_add_callback() 114 *fence = NULL; in drm_syncobj_fence_get_or_add_callback() 163 struct dma_fence *fence) in drm_syncobj_replace_fence() argument 168 if (fence) in drm_syncobj_replace_fence() 169 dma_fence_get(fence); in drm_syncobj_replace_fence() [all …]
|
/drivers/gpu/drm/msm/ |
D | msm_fence.c | 47 static inline bool fence_completed(struct msm_fence_context *fctx, uint32_t fence) in fence_completed() argument 49 return (int32_t)(fctx->completed_fence - fence) >= 0; in fence_completed() 53 int msm_wait_fence(struct msm_fence_context *fctx, uint32_t fence, in msm_wait_fence() argument 58 if (fence > fctx->last_fence) { in msm_wait_fence() 60 fctx->name, fence, fctx->last_fence); in msm_wait_fence() 66 ret = fence_completed(fctx, fence) ? 0 : -EBUSY; in msm_wait_fence() 72 fence_completed(fctx, fence), in msm_wait_fence() 76 fence_completed(fctx, fence), in msm_wait_fence() 81 fence, fctx->completed_fence); in msm_wait_fence() 92 void msm_update_fence(struct msm_fence_context *fctx, uint32_t fence) in msm_update_fence() argument [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_job.c | 96 f = job->base.s_fence ? &job->base.s_fence->finished : job->fence; in amdgpu_job_free_resources() 106 dma_fence_put(job->fence); in amdgpu_job_free_cb() 117 dma_fence_put(job->fence); in amdgpu_job_free() 152 struct dma_fence *fence = amdgpu_sync_get_fence(&job->dep_sync); in amdgpu_job_dependency() local 155 if (amd_sched_dependency_optimized(fence, sched_job->s_entity)) { in amdgpu_job_dependency() 156 r = amdgpu_sync_fence(job->adev, &job->sched_sync, fence); in amdgpu_job_dependency() 160 if (!fence) in amdgpu_job_dependency() 161 fence = amdgpu_sync_get_fence(&job->sync); in amdgpu_job_dependency() 162 while (fence == NULL && vm && !job->vm_id) { in amdgpu_job_dependency() 171 fence = amdgpu_sync_get_fence(&job->sync); in amdgpu_job_dependency() [all …]
|
D | amdgpu_fence.c | 137 struct amdgpu_fence *fence; in amdgpu_fence_emit() local 142 fence = kmem_cache_alloc(amdgpu_fence_slab, GFP_KERNEL); in amdgpu_fence_emit() 143 if (fence == NULL) in amdgpu_fence_emit() 147 fence->ring = ring; in amdgpu_fence_emit() 148 dma_fence_init(&fence->base, &amdgpu_fence_ops, in amdgpu_fence_emit() 174 rcu_assign_pointer(*ptr, dma_fence_get(&fence->base)); in amdgpu_fence_emit() 176 *f = &fence->base; in amdgpu_fence_emit() 225 struct dma_fence *fence, **ptr; in amdgpu_fence_process() local 232 fence = rcu_dereference_protected(*ptr, 1); in amdgpu_fence_process() 235 if (!fence) in amdgpu_fence_process() [all …]
|
/drivers/platform/goldfish/ |
D | goldfish_sync.c | 176 *goldfish_dma_fence_parent(struct dma_fence *fence) in goldfish_dma_fence_parent() argument 178 return container_of(fence->lock, struct goldfish_sync_timeline, lock); in goldfish_dma_fence_parent() 181 static struct sync_pt *goldfish_sync_fence_to_sync_pt(struct dma_fence *fence) in goldfish_sync_fence_to_sync_pt() argument 183 return container_of(fence, struct sync_pt, base); in goldfish_sync_fence_to_sync_pt() 282 *goldfish_sync_timeline_fence_get_driver_name(struct dma_fence *fence) in goldfish_sync_timeline_fence_get_driver_name() argument 288 *goldfish_sync_timeline_fence_get_timeline_name(struct dma_fence *fence) in goldfish_sync_timeline_fence_get_timeline_name() argument 290 struct goldfish_sync_timeline *tl = goldfish_dma_fence_parent(fence); in goldfish_sync_timeline_fence_get_timeline_name() 295 static void goldfish_sync_timeline_fence_release(struct dma_fence *fence) in goldfish_sync_timeline_fence_release() argument 297 goldfish_sync_pt_destroy(goldfish_sync_fence_to_sync_pt(fence)); in goldfish_sync_timeline_fence_release() 300 static bool goldfish_sync_timeline_fence_signaled(struct dma_fence *fence) in goldfish_sync_timeline_fence_signaled() argument [all …]
|