Home
last modified time | relevance | path

Searched refs:fence (Results 1 – 21 of 21) sorted by relevance

/include/trace/events/
Ddma_fence.h14 TP_PROTO(struct dma_fence *fence),
16 TP_ARGS(fence),
19 __string(driver, fence->ops->get_driver_name(fence))
20 __string(timeline, fence->ops->get_timeline_name(fence))
26 __assign_str(driver, fence->ops->get_driver_name(fence))
27 __assign_str(timeline, fence->ops->get_timeline_name(fence))
28 __entry->context = fence->context;
29 __entry->seqno = fence->seqno;
39 TP_PROTO(struct dma_fence *fence),
41 TP_ARGS(fence)
[all …]
/include/linux/
Ddma-fence.h105 typedef void (*dma_fence_func_t)(struct dma_fence *fence,
143 const char * (*get_driver_name)(struct dma_fence *fence);
155 const char * (*get_timeline_name)(struct dma_fence *fence);
192 bool (*enable_signaling)(struct dma_fence *fence);
209 bool (*signaled)(struct dma_fence *fence);
231 signed long (*wait)(struct dma_fence *fence,
242 void (*release)(struct dma_fence *fence);
252 void (*fence_value_str)(struct dma_fence *fence, char *str, int size);
262 void (*timeline_value_str)(struct dma_fence *fence,
266 void dma_fence_init(struct dma_fence *fence, const struct dma_fence_ops *ops,
[all …]
Dseqno-fence.h42 to_seqno_fence(struct dma_fence *fence) in to_seqno_fence() argument
44 if (fence->ops != &seqno_fence_ops) in to_seqno_fence()
46 return container_of(fence, struct seqno_fence, base); in to_seqno_fence()
87 seqno_fence_init(struct seqno_fence *fence, spinlock_t *lock, in seqno_fence_init() argument
93 BUG_ON(!fence || !sync_buf || !ops); in seqno_fence_init()
101 fence->ops = ops; in seqno_fence_init()
102 dma_fence_init(&fence->base, &seqno_fence_ops, lock, context, seqno); in seqno_fence_init()
104 fence->sync_buf = sync_buf; in seqno_fence_init()
105 fence->seqno_ofs = seqno_ofs; in seqno_fence_init()
106 fence->condition = cond; in seqno_fence_init()
Ddma-fence-chain.h31 struct dma_fence *fence; member
46 to_dma_fence_chain(struct dma_fence *fence) in to_dma_fence_chain() argument
48 if (!fence || fence->ops != &dma_fence_chain_ops) in to_dma_fence_chain()
51 return container_of(fence, struct dma_fence_chain, base); in to_dma_fence_chain()
66 struct dma_fence *dma_fence_chain_walk(struct dma_fence *fence);
70 struct dma_fence *fence,
Ddma-fence-array.h56 static inline bool dma_fence_is_array(struct dma_fence *fence) in dma_fence_is_array() argument
58 return fence->ops == &dma_fence_array_ops; in dma_fence_is_array()
69 to_dma_fence_array(struct dma_fence *fence) in to_dma_fence_array() argument
71 if (fence->ops != &dma_fence_array_ops) in to_dma_fence_array()
74 return container_of(fence, struct dma_fence_array, base); in to_dma_fence_array()
82 bool dma_fence_match_context(struct dma_fence *fence, u64 context);
Ddma-resv.h77 struct dma_resv_list __rcu *fence; member
93 return rcu_dereference_protected(obj->fence, in dma_resv_get_list()
222 if (rcu_access_pointer(obj->fence)) { in dma_resv_unlock()
223 struct dma_resv_list *fence = dma_resv_get_list(obj); in dma_resv_unlock() local
225 fence->shared_max = fence->shared_count; in dma_resv_unlock()
264 struct dma_fence *fence; in dma_resv_get_excl_rcu() local
270 fence = dma_fence_get_rcu_safe(&obj->fence_excl); in dma_resv_get_excl_rcu()
273 return fence; in dma_resv_get_excl_rcu()
279 void dma_resv_add_shared_fence(struct dma_resv *obj, struct dma_fence *fence);
281 void dma_resv_add_excl_fence(struct dma_resv *obj, struct dma_fence *fence);
Dsync_file.h52 struct dma_fence *fence; member
58 struct sync_file *sync_file_create(struct dma_fence *fence);
/include/drm/
Ddrm_syncobj.h51 struct dma_fence __rcu *fence; member
105 struct dma_fence *fence; in drm_syncobj_fence_get() local
108 fence = dma_fence_get_rcu_safe(&syncobj->fence); in drm_syncobj_fence_get()
111 return fence; in drm_syncobj_fence_get()
118 struct dma_fence *fence,
121 struct dma_fence *fence);
124 struct dma_fence **fence);
127 struct dma_fence *fence);
Ddrm_atomic_uapi.h53 struct dma_fence *fence);
Dgpu_scheduler.h301 bool drm_sched_dependency_optimized(struct dma_fence* fence,
328 void drm_sched_fence_scheduled(struct drm_sched_fence *fence);
329 void drm_sched_fence_finished(struct drm_sched_fence *fence);
Ddrm_file.h124 struct dma_fence *fence; member
Ddrm_gem.h400 struct dma_fence *fence);
Ddrm_plane.h80 struct dma_fence *fence; member
/include/drm/ttm/
Dttm_execbuf_util.h119 struct dma_fence *fence);
Dttm_bo_driver.h866 struct dma_fence *fence, bool evict,
881 struct dma_fence *fence, bool evict,
/include/uapi/drm/
Dvgem_drm.h54 __u32 fence; member
Dmsm_drm.h233 __u32 fence; /* out */ member
250 __u32 fence; /* in */ member
Detnaviv_drm.h195 __u32 fence; /* out */ member
221 __u32 fence; /* in */ member
Dtegra_drm.h514 __u32 fence; member
Damdgpu_drm.h634 struct drm_amdgpu_fence fence; member
/include/uapi/linux/
Dsync_file.h29 __s32 fence; member