Home
last modified time | relevance | path

Searched refs:fence (Results 1 – 25 of 147) sorted by relevance

123456

/drivers/dma-buf/
Dfence.c66 int fence_signal_locked(struct fence *fence) in fence_signal_locked() argument
71 if (WARN_ON(!fence)) in fence_signal_locked()
74 if (!ktime_to_ns(fence->timestamp)) { in fence_signal_locked()
75 fence->timestamp = ktime_get(); in fence_signal_locked()
79 if (test_and_set_bit(FENCE_FLAG_SIGNALED_BIT, &fence->flags)) { in fence_signal_locked()
87 trace_fence_signaled(fence); in fence_signal_locked()
89 list_for_each_entry_safe(cur, tmp, &fence->cb_list, node) { in fence_signal_locked()
91 cur->func(fence, cur); in fence_signal_locked()
107 int fence_signal(struct fence *fence) in fence_signal() argument
111 if (!fence) in fence_signal()
[all …]
Dreservation.c86 struct fence *fence) in reservation_object_add_shared_inplace() argument
90 fence_get(fence); in reservation_object_add_shared_inplace()
96 struct fence *old_fence; in reservation_object_add_shared_inplace()
101 if (old_fence->context == fence->context) { in reservation_object_add_shared_inplace()
103 RCU_INIT_POINTER(fobj->shared[i], fence); in reservation_object_add_shared_inplace()
116 RCU_INIT_POINTER(fobj->shared[fobj->shared_count], fence); in reservation_object_add_shared_inplace()
127 struct fence *fence) in reservation_object_add_shared_replace() argument
130 struct fence *old_fence = NULL; in reservation_object_add_shared_replace()
132 fence_get(fence); in reservation_object_add_shared_replace()
135 RCU_INIT_POINTER(fobj->shared[0], fence); in reservation_object_add_shared_replace()
[all …]
Dseqno-fence.c24 static const char *seqno_fence_get_driver_name(struct fence *fence) in seqno_fence_get_driver_name() argument
26 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_driver_name()
28 return seqno_fence->ops->get_driver_name(fence); in seqno_fence_get_driver_name()
31 static const char *seqno_fence_get_timeline_name(struct fence *fence) in seqno_fence_get_timeline_name() argument
33 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_fence_get_timeline_name()
35 return seqno_fence->ops->get_timeline_name(fence); in seqno_fence_get_timeline_name()
38 static bool seqno_enable_signaling(struct fence *fence) in seqno_enable_signaling() argument
40 struct seqno_fence *seqno_fence = to_seqno_fence(fence); in seqno_enable_signaling()
42 return seqno_fence->ops->enable_signaling(fence); in seqno_enable_signaling()
45 static bool seqno_signaled(struct fence *fence) in seqno_signaled() argument
[all …]
/drivers/staging/android/
Dsync.c155 struct sync_fence *fence; in sync_fence_alloc() local
157 fence = kzalloc(size, GFP_KERNEL); in sync_fence_alloc()
158 if (fence == NULL) in sync_fence_alloc()
161 fence->file = anon_inode_getfile("sync_fence", &sync_fence_fops, in sync_fence_alloc()
162 fence, 0); in sync_fence_alloc()
163 if (IS_ERR(fence->file)) in sync_fence_alloc()
166 kref_init(&fence->kref); in sync_fence_alloc()
167 strlcpy(fence->name, name, sizeof(fence->name)); in sync_fence_alloc()
169 init_waitqueue_head(&fence->wq); in sync_fence_alloc()
171 return fence; in sync_fence_alloc()
[all …]
Dsync_debug.c56 void sync_fence_debug_add(struct sync_fence *fence) in sync_fence_debug_add() argument
61 list_add_tail(&fence->sync_fence_list, &sync_fence_list_head); in sync_fence_debug_add()
65 void sync_fence_debug_remove(struct sync_fence *fence) in sync_fence_debug_remove() argument
70 list_del(&fence->sync_fence_list); in sync_fence_debug_remove()
85 static void sync_print_pt(struct seq_file *s, struct sync_pt *pt, bool fence) in sync_print_pt() argument
94 fence ? parent->name : "", in sync_print_pt()
95 fence ? "_" : "", in sync_print_pt()
111 if (fence) { in sync_print_pt()
146 static void sync_print_fence(struct seq_file *s, struct sync_fence *fence) in sync_print_fence() argument
152 seq_printf(s, "[%pK] %s: %s\n", fence, fence->name, in sync_print_fence()
[all …]
Dsync.h124 struct fence base;
138 struct fence *sync_pt;
139 struct sync_fence *fence; member
170 typedef void (*sync_callback_t)(struct sync_fence *fence,
289 void sync_fence_put(struct sync_fence *fence);
299 void sync_fence_install(struct sync_fence *fence, int fd);
311 int sync_fence_wait_async(struct sync_fence *fence,
325 int sync_fence_cancel_async(struct sync_fence *fence,
336 int sync_fence_wait(struct sync_fence *fence, long timeout);
342 void sync_fence_debug_add(struct sync_fence *fence);
[all …]
/drivers/gpu/drm/nouveau/
Dnouveau_fence.c44 from_fence(struct fence *fence) in from_fence() argument
46 return container_of(fence, struct nouveau_fence, base); in from_fence()
50 nouveau_fctx(struct nouveau_fence *fence) in nouveau_fctx() argument
52 return container_of(fence->base.lock, struct nouveau_fence_chan, lock); in nouveau_fctx()
56 nouveau_fence_signal(struct nouveau_fence *fence) in nouveau_fence_signal() argument
60 fence_signal_locked(&fence->base); in nouveau_fence_signal()
61 list_del(&fence->head); in nouveau_fence_signal()
62 rcu_assign_pointer(fence->channel, NULL); in nouveau_fence_signal()
64 if (test_bit(FENCE_FLAG_USER_BITS, &fence->base.flags)) { in nouveau_fence_signal()
65 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal()
[all …]
Dnv84_fence.c34 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_crtc()
74 nv84_fence_emit(struct nouveau_fence *fence) in nv84_fence_emit() argument
76 struct nouveau_channel *chan = fence->channel; in nv84_fence_emit()
77 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_emit()
80 if (fence->sysmem) in nv84_fence_emit()
85 return fctx->base.emit32(chan, addr, fence->base.seqno); in nv84_fence_emit()
89 nv84_fence_sync(struct nouveau_fence *fence, in nv84_fence_sync() argument
92 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_sync()
95 if (fence->sysmem) in nv84_fence_sync()
100 return fctx->base.sync32(chan, addr, fence->base.seqno); in nv84_fence_sync()
[all …]
Dnv04_fence.c38 nv04_fence_emit(struct nouveau_fence *fence) in nv04_fence_emit() argument
40 struct nouveau_channel *chan = fence->channel; in nv04_fence_emit()
44 OUT_RING (chan, fence->base.seqno); in nv04_fence_emit()
51 nv04_fence_sync(struct nouveau_fence *fence, in nv04_fence_sync() argument
69 struct nv04_fence_chan *fctx = chan->fence; in nv04_fence_context_del()
71 chan->fence = NULL; in nv04_fence_context_del()
84 chan->fence = fctx; in nv04_fence_context_new()
93 struct nv04_fence_priv *priv = drm->fence; in nv04_fence_destroy()
94 drm->fence = NULL; in nv04_fence_destroy()
103 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv04_fence_create()
Dnv10_fence.c30 nv10_fence_emit(struct nouveau_fence *fence) in nv10_fence_emit() argument
32 struct nouveau_channel *chan = fence->channel; in nv10_fence_emit()
36 OUT_RING (chan, fence->base.seqno); in nv10_fence_emit()
44 nv10_fence_sync(struct nouveau_fence *fence, in nv10_fence_sync() argument
59 struct nv10_fence_chan *fctx = chan->fence; in nv10_fence_context_del()
65 chan->fence = NULL; in nv10_fence_context_del()
74 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv10_fence_context_new()
88 struct nv10_fence_priv *priv = drm->fence; in nv10_fence_destroy()
93 drm->fence = NULL; in nv10_fence_destroy()
102 priv = drm->fence = kzalloc(sizeof(*priv), GFP_KERNEL); in nv10_fence_create()
/drivers/gpu/drm/radeon/
Dradeon_fence.c130 struct radeon_fence **fence, in radeon_fence_emit() argument
136 *fence = kmalloc(sizeof(struct radeon_fence), GFP_KERNEL); in radeon_fence_emit()
137 if ((*fence) == NULL) { in radeon_fence_emit()
140 (*fence)->rdev = rdev; in radeon_fence_emit()
141 (*fence)->seq = seq; in radeon_fence_emit()
142 (*fence)->ring = ring; in radeon_fence_emit()
143 (*fence)->is_vm_update = false; in radeon_fence_emit()
144 fence_init(&(*fence)->base, &radeon_fence_ops, in radeon_fence_emit()
146 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit()
147 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit()
[all …]
Dradeon_sync.c64 struct radeon_fence *fence) in radeon_sync_fence() argument
68 if (!fence) in radeon_sync_fence()
71 other = sync->sync_to[fence->ring]; in radeon_sync_fence()
72 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence()
74 if (fence->is_vm_update) { in radeon_sync_fence()
76 sync->last_vm_update = radeon_fence_later(fence, other); in radeon_sync_fence()
95 struct fence *f; in radeon_sync_resv()
96 struct radeon_fence *fence; in radeon_sync_resv() local
102 fence = f ? to_radeon_fence(f) : NULL; in radeon_sync_resv()
103 if (fence && fence->rdev == rdev) in radeon_sync_resv()
[all …]
Dradeon_sa.c150 radeon_fence_unref(&sa_bo->fence); in radeon_sa_bo_remove_locked()
163 if (sa_bo->fence == NULL || !radeon_fence_signaled(sa_bo->fence)) { in radeon_sa_bo_try_free()
277 if (!radeon_fence_signaled(sa_bo->fence)) { in radeon_sa_bo_next_hole()
278 fences[i] = sa_bo->fence; in radeon_sa_bo_next_hole()
301 ++tries[best_bo->fence->ring]; in radeon_sa_bo_next_hole()
329 (*sa_bo)->fence = NULL; in radeon_sa_bo_new()
377 struct radeon_fence *fence) in radeon_sa_bo_free() argument
387 if (fence && !radeon_fence_signaled(fence)) { in radeon_sa_bo_free()
388 (*sa_bo)->fence = radeon_fence_ref(fence); in radeon_sa_bo_free()
390 &sa_manager->flist[fence->ring]); in radeon_sa_bo_free()
[all …]
Dradeon_test.c87 struct radeon_fence *fence = NULL; in radeon_do_test_moves() local
120 fence = radeon_copy_dma(rdev, gtt_addr, vram_addr, in radeon_do_test_moves()
124 fence = radeon_copy_blit(rdev, gtt_addr, vram_addr, in radeon_do_test_moves()
127 if (IS_ERR(fence)) { in radeon_do_test_moves()
129 r = PTR_ERR(fence); in radeon_do_test_moves()
133 r = radeon_fence_wait(fence, false); in radeon_do_test_moves()
139 radeon_fence_unref(&fence); in radeon_do_test_moves()
171 fence = radeon_copy_dma(rdev, vram_addr, gtt_addr, in radeon_do_test_moves()
175 fence = radeon_copy_blit(rdev, vram_addr, gtt_addr, in radeon_do_test_moves()
178 if (IS_ERR(fence)) { in radeon_do_test_moves()
[all …]
/drivers/gpu/drm/virtio/
Dvirtgpu_fence.c29 static const char *virtio_get_driver_name(struct fence *f) in virtio_get_driver_name()
34 static const char *virtio_get_timeline_name(struct fence *f) in virtio_get_timeline_name()
39 static bool virtio_enable_signaling(struct fence *f) in virtio_enable_signaling()
44 static bool virtio_signaled(struct fence *f) in virtio_signaled()
46 struct virtio_gpu_fence *fence = to_virtio_fence(f); in virtio_signaled() local
48 if (atomic64_read(&fence->drv->last_seq) >= fence->seq) in virtio_signaled()
53 static void virtio_fence_value_str(struct fence *f, char *str, int size) in virtio_fence_value_str()
55 struct virtio_gpu_fence *fence = to_virtio_fence(f); in virtio_fence_value_str() local
57 snprintf(str, size, "%llu", fence->seq); in virtio_fence_value_str()
60 static void virtio_timeline_value_str(struct fence *f, char *str, int size) in virtio_timeline_value_str()
[all …]
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_fence.c54 struct vmw_fence_obj fence; member
77 struct vmw_fence_obj *fence; member
85 fman_from_fence(struct vmw_fence_obj *fence) in fman_from_fence() argument
87 return container_of(fence->base.lock, struct vmw_fence_manager, lock); in fman_from_fence()
112 static void vmw_fence_obj_destroy(struct fence *f) in vmw_fence_obj_destroy()
114 struct vmw_fence_obj *fence = in vmw_fence_obj_destroy() local
117 struct vmw_fence_manager *fman = fman_from_fence(fence); in vmw_fence_obj_destroy()
121 list_del_init(&fence->head); in vmw_fence_obj_destroy()
124 fence->destroy(fence); in vmw_fence_obj_destroy()
127 static const char *vmw_fence_get_driver_name(struct fence *f) in vmw_fence_get_driver_name()
[all …]
Dvmwgfx_fence.h55 struct fence base;
59 void (*destroy)(struct vmw_fence_obj *fence);
70 struct vmw_fence_obj *fence = *fence_p; in vmw_fence_obj_unreference() local
73 if (fence) in vmw_fence_obj_unreference()
74 fence_put(&fence->base); in vmw_fence_obj_unreference()
78 vmw_fence_obj_reference(struct vmw_fence_obj *fence) in vmw_fence_obj_reference() argument
80 if (fence) in vmw_fence_obj_reference()
81 fence_get(&fence->base); in vmw_fence_obj_reference()
82 return fence; in vmw_fence_obj_reference()
87 extern bool vmw_fence_obj_signaled(struct vmw_fence_obj *fence);
[all …]
/drivers/gpu/drm/amd/scheduler/
Dsched_fence.c32 struct amd_sched_fence *fence = NULL; in amd_sched_fence_create() local
35 fence = kmem_cache_zalloc(sched_fence_slab, GFP_KERNEL); in amd_sched_fence_create()
36 if (fence == NULL) in amd_sched_fence_create()
39 INIT_LIST_HEAD(&fence->scheduled_cb); in amd_sched_fence_create()
40 fence->owner = owner; in amd_sched_fence_create()
41 fence->sched = s_entity->sched; in amd_sched_fence_create()
42 spin_lock_init(&fence->lock); in amd_sched_fence_create()
45 fence_init(&fence->base, &amd_sched_fence_ops, &fence->lock, in amd_sched_fence_create()
48 return fence; in amd_sched_fence_create()
51 void amd_sched_fence_signal(struct amd_sched_fence *fence) in amd_sched_fence_signal() argument
[all …]
Dgpu_sched_trace.h20 __field(struct fence *, fence)
29 __entry->fence = &sched_job->s_fence->base;
37 __entry->entity, __entry->sched_job, __entry->fence, __entry->name,
42 TP_PROTO(struct amd_sched_fence *fence),
43 TP_ARGS(fence),
45 __field(struct fence *, fence)
49 __entry->fence = &fence->base;
51 TP_printk("fence=%p signaled", __entry->fence)
Dgpu_scheduler.h55 struct fence *dependency;
71 struct fence base;
88 static inline struct amd_sched_fence *to_amd_sched_fence(struct fence *f) in to_amd_sched_fence()
103 struct fence *(*dependency)(struct amd_sched_job *sched_job);
104 struct fence *(*run_job)(struct amd_sched_job *sched_job);
140 void amd_sched_fence_scheduled(struct amd_sched_fence *fence);
141 void amd_sched_fence_signal(struct amd_sched_fence *fence);
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_sync.c37 struct fence *fence; member
61 static bool amdgpu_sync_same_dev(struct amdgpu_device *adev, struct fence *f) in amdgpu_sync_same_dev()
79 static bool amdgpu_sync_test_owner(struct fence *f, void *owner) in amdgpu_sync_test_owner()
90 static void amdgpu_sync_keep_later(struct fence **keep, struct fence *fence) in amdgpu_sync_keep_later() argument
92 if (*keep && fence_is_later(*keep, fence)) in amdgpu_sync_keep_later()
96 *keep = fence_get(fence); in amdgpu_sync_keep_later()
107 struct fence *f) in amdgpu_sync_fence()
110 struct amdgpu_fence *fence; in amdgpu_sync_fence() local
119 fence = to_amdgpu_fence(f); in amdgpu_sync_fence()
120 if (!fence || fence->ring->adev != adev) { in amdgpu_sync_fence()
[all …]
Damdgpu_fence.c101 struct amdgpu_fence **fence) in amdgpu_fence_emit() argument
106 *fence = kmem_cache_alloc(amdgpu_fence_slab, GFP_KERNEL); in amdgpu_fence_emit()
107 if ((*fence) == NULL) { in amdgpu_fence_emit()
110 (*fence)->seq = ++ring->fence_drv.sync_seq[ring->idx]; in amdgpu_fence_emit()
111 (*fence)->ring = ring; in amdgpu_fence_emit()
112 (*fence)->owner = owner; in amdgpu_fence_emit()
113 fence_init(&(*fence)->base, &amdgpu_fence_ops, in amdgpu_fence_emit()
116 (*fence)->seq); in amdgpu_fence_emit()
118 (*fence)->seq, in amdgpu_fence_emit()
370 bool amdgpu_fence_need_sync(struct amdgpu_fence *fence, in amdgpu_fence_need_sync() argument
[all …]
Damdgpu_sa.c150 fence_put(sa_bo->fence); in amdgpu_sa_bo_remove_locked()
163 if (sa_bo->fence == NULL || in amdgpu_sa_bo_try_free()
164 !fence_is_signaled(sa_bo->fence)) { in amdgpu_sa_bo_try_free()
249 struct fence **fences, in amdgpu_sa_bo_next_hole()
278 if (!fence_is_signaled(sa_bo->fence)) { in amdgpu_sa_bo_next_hole()
279 fences[i] = sa_bo->fence; in amdgpu_sa_bo_next_hole()
302 uint32_t idx = amdgpu_ring_from_fence(best_bo->fence)->idx; in amdgpu_sa_bo_next_hole()
318 struct fence *fences[AMDGPU_MAX_RINGS]; in amdgpu_sa_bo_new()
332 (*sa_bo)->fence = NULL; in amdgpu_sa_bo_new()
385 struct fence *fence) in amdgpu_sa_bo_free() argument
[all …]
Damdgpu_sched.c31 static struct fence *amdgpu_sched_dependency(struct amd_sched_job *sched_job) in amdgpu_sched_dependency()
37 static struct fence *amdgpu_sched_run_job(struct amd_sched_job *sched_job) in amdgpu_sched_run_job()
39 struct amdgpu_fence *fence = NULL; in amdgpu_sched_run_job() local
55 fence = job->ibs[job->num_ibs - 1].fence; in amdgpu_sched_run_job()
56 fence_get(&fence->base); in amdgpu_sched_run_job()
63 return fence ? &fence->base : NULL; in amdgpu_sched_run_job()
77 struct fence **f) in amdgpu_sched_ib_submit_kernel_helper()
104 *f = fence_get(&ibs[num_ibs - 1].fence->base); in amdgpu_sched_ib_submit_kernel_helper()
/drivers/staging/goldfish/
Dgoldfish_sync.c173 *goldfish_fence_parent(struct fence *fence) in goldfish_fence_parent() argument
175 return container_of(fence->lock, struct goldfish_sync_timeline, lock); in goldfish_fence_parent()
178 static struct sync_pt *goldfish_sync_fence_to_sync_pt(struct fence *fence) in goldfish_sync_fence_to_sync_pt() argument
180 return container_of(fence, struct sync_pt, base); in goldfish_sync_fence_to_sync_pt()
279 *goldfish_sync_timeline_fence_get_driver_name(struct fence *fence) in goldfish_sync_timeline_fence_get_driver_name() argument
285 *goldfish_sync_timeline_fence_get_timeline_name(struct fence *fence) in goldfish_sync_timeline_fence_get_timeline_name() argument
287 struct goldfish_sync_timeline *tl = goldfish_fence_parent(fence); in goldfish_sync_timeline_fence_get_timeline_name()
292 static void goldfish_sync_timeline_fence_release(struct fence *fence) in goldfish_sync_timeline_fence_release() argument
294 goldfish_sync_pt_destroy(goldfish_sync_fence_to_sync_pt(fence)); in goldfish_sync_timeline_fence_release()
297 static bool goldfish_sync_timeline_fence_signaled(struct fence *fence) in goldfish_sync_timeline_fence_signaled() argument
[all …]

123456