• Home
  • Raw
  • Download

Lines Matching refs:batch

39 alloc_ring(struct fd_batch *batch, unsigned sz, enum fd_ringbuffer_flags flags)  in alloc_ring()  argument
41 struct fd_context *ctx = batch->ctx; in alloc_ring()
57 return fd_submit_new_ringbuffer(batch->submit, sz, flags); in alloc_ring()
61 batch_init(struct fd_batch *batch) in batch_init() argument
63 struct fd_context *ctx = batch->ctx; in batch_init()
65 batch->submit = fd_submit_new(ctx->pipe); in batch_init()
66 if (batch->nondraw) { in batch_init()
67 batch->gmem = alloc_ring(batch, 0x1000, FD_RINGBUFFER_PRIMARY); in batch_init()
68 batch->draw = alloc_ring(batch, 0x100000, 0); in batch_init()
70 batch->gmem = alloc_ring(batch, 0x100000, FD_RINGBUFFER_PRIMARY); in batch_init()
71 batch->draw = alloc_ring(batch, 0x100000, 0); in batch_init()
75 batch->binning = alloc_ring(batch, 0x100000, 0); in batch_init()
79 batch->in_fence_fd = -1; in batch_init()
80 batch->fence = NULL; in batch_init()
87 batch->fence = fd_fence_create(batch); in batch_init()
89 batch->cleared = 0; in batch_init()
90 batch->fast_cleared = 0; in batch_init()
91 batch->invalidated = 0; in batch_init()
92 batch->restore = batch->resolve = 0; in batch_init()
93 batch->needs_flush = false; in batch_init()
94 batch->flushed = false; in batch_init()
95 batch->gmem_reason = 0; in batch_init()
96 batch->num_draws = 0; in batch_init()
97 batch->num_vertices = 0; in batch_init()
98 batch->num_bins_per_pipe = 0; in batch_init()
99 batch->prim_strm_bits = 0; in batch_init()
100 batch->draw_strm_bits = 0; in batch_init()
102 fd_reset_wfi(batch); in batch_init()
104 util_dynarray_init(&batch->draw_patches, NULL); in batch_init()
105 util_dynarray_init(&batch->fb_read_patches, NULL); in batch_init()
108 util_dynarray_init(&batch->shader_patches, NULL); in batch_init()
109 util_dynarray_init(&batch->gmem_patches, NULL); in batch_init()
113 util_dynarray_init(&batch->rbrc_patches, NULL); in batch_init()
115 assert(batch->resources->entries == 0); in batch_init()
117 util_dynarray_init(&batch->samples, NULL); in batch_init()
119 u_trace_init(&batch->trace, &ctx->trace_context); in batch_init()
120 batch->last_timestamp_cmd = NULL; in batch_init()
126 struct fd_batch *batch = CALLOC_STRUCT(fd_batch); in fd_batch_create() local
128 if (!batch) in fd_batch_create()
131 DBG("%p", batch); in fd_batch_create()
133 pipe_reference_init(&batch->reference, 1); in fd_batch_create()
134 batch->ctx = ctx; in fd_batch_create()
135 batch->nondraw = nondraw; in fd_batch_create()
137 simple_mtx_init(&batch->submit_lock, mtx_plain); in fd_batch_create()
139 batch->resources = in fd_batch_create()
142 batch_init(batch); in fd_batch_create()
144 return batch; in fd_batch_create()
148 cleanup_submit(struct fd_batch *batch) in cleanup_submit() argument
150 if (!batch->submit) in cleanup_submit()
153 fd_ringbuffer_del(batch->draw); in cleanup_submit()
154 fd_ringbuffer_del(batch->gmem); in cleanup_submit()
156 if (batch->binning) { in cleanup_submit()
157 fd_ringbuffer_del(batch->binning); in cleanup_submit()
158 batch->binning = NULL; in cleanup_submit()
161 if (batch->prologue) { in cleanup_submit()
162 fd_ringbuffer_del(batch->prologue); in cleanup_submit()
163 batch->prologue = NULL; in cleanup_submit()
166 if (batch->epilogue) { in cleanup_submit()
167 fd_ringbuffer_del(batch->epilogue); in cleanup_submit()
168 batch->epilogue = NULL; in cleanup_submit()
171 if (batch->tile_setup) { in cleanup_submit()
172 fd_ringbuffer_del(batch->tile_setup); in cleanup_submit()
173 batch->tile_setup = NULL; in cleanup_submit()
176 if (batch->tile_fini) { in cleanup_submit()
177 fd_ringbuffer_del(batch->tile_fini); in cleanup_submit()
178 batch->tile_fini = NULL; in cleanup_submit()
181 fd_submit_del(batch->submit); in cleanup_submit()
182 batch->submit = NULL; in cleanup_submit()
186 batch_fini(struct fd_batch *batch) in batch_fini() argument
188 DBG("%p", batch); in batch_fini()
190 pipe_resource_reference(&batch->query_buf, NULL); in batch_fini()
192 if (batch->in_fence_fd != -1) in batch_fini()
193 close(batch->in_fence_fd); in batch_fini()
196 if (batch->fence) in batch_fini()
197 fd_fence_set_batch(batch->fence, NULL); in batch_fini()
199 fd_fence_ref(&batch->fence, NULL); in batch_fini()
201 cleanup_submit(batch); in batch_fini()
203 util_dynarray_fini(&batch->draw_patches); in batch_fini()
204 util_dynarray_fini(&batch->fb_read_patches); in batch_fini()
206 if (is_a2xx(batch->ctx->screen)) { in batch_fini()
207 util_dynarray_fini(&batch->shader_patches); in batch_fini()
208 util_dynarray_fini(&batch->gmem_patches); in batch_fini()
211 if (is_a3xx(batch->ctx->screen)) in batch_fini()
212 util_dynarray_fini(&batch->rbrc_patches); in batch_fini()
214 while (batch->samples.size > 0) { in batch_fini()
216 util_dynarray_pop(&batch->samples, struct fd_hw_sample *); in batch_fini()
217 fd_hw_sample_reference(batch->ctx, &samp, NULL); in batch_fini()
219 util_dynarray_fini(&batch->samples); in batch_fini()
221 u_trace_fini(&batch->trace); in batch_fini()
225 batch_flush_dependencies(struct fd_batch *batch) assert_dt in batch_flush_dependencies() argument
227 struct fd_batch_cache *cache = &batch->ctx->screen->batch_cache; in batch_flush_dependencies()
230 foreach_batch (dep, cache, batch->dependents_mask) { in batch_flush_dependencies()
235 batch->dependents_mask = 0; in batch_flush_dependencies()
239 batch_reset_dependencies(struct fd_batch *batch) in batch_reset_dependencies() argument
241 struct fd_batch_cache *cache = &batch->ctx->screen->batch_cache; in batch_reset_dependencies()
244 foreach_batch (dep, cache, batch->dependents_mask) { in batch_reset_dependencies()
248 batch->dependents_mask = 0; in batch_reset_dependencies()
252 batch_reset_resources(struct fd_batch *batch) in batch_reset_resources() argument
254 fd_screen_assert_locked(batch->ctx->screen); in batch_reset_resources()
256 set_foreach (batch->resources, entry) { in batch_reset_resources()
258 _mesa_set_remove(batch->resources, entry); in batch_reset_resources()
259 assert(rsc->track->batch_mask & (1 << batch->idx)); in batch_reset_resources()
260 rsc->track->batch_mask &= ~(1 << batch->idx); in batch_reset_resources()
261 if (rsc->track->write_batch == batch) in batch_reset_resources()
267 batch_reset(struct fd_batch *batch) assert_dt in batch_reset() argument
269 DBG("%p", batch); in batch_reset()
271 batch_reset_dependencies(batch); in batch_reset()
273 fd_screen_lock(batch->ctx->screen); in batch_reset()
274 batch_reset_resources(batch); in batch_reset()
275 fd_screen_unlock(batch->ctx->screen); in batch_reset()
277 batch_fini(batch); in batch_reset()
278 batch_init(batch); in batch_reset()
282 fd_batch_reset(struct fd_batch *batch) in fd_batch_reset() argument
284 if (batch->needs_flush) in fd_batch_reset()
285 batch_reset(batch); in fd_batch_reset()
289 __fd_batch_destroy(struct fd_batch *batch) in __fd_batch_destroy() argument
291 struct fd_context *ctx = batch->ctx; in __fd_batch_destroy()
293 DBG("%p", batch); in __fd_batch_destroy()
295 fd_screen_assert_locked(batch->ctx->screen); in __fd_batch_destroy()
297 fd_bc_invalidate_batch(batch, true); in __fd_batch_destroy()
299 batch_reset_resources(batch); in __fd_batch_destroy()
300 assert(batch->resources->entries == 0); in __fd_batch_destroy()
301 _mesa_set_destroy(batch->resources, NULL); in __fd_batch_destroy()
304 batch_reset_dependencies(batch); in __fd_batch_destroy()
305 assert(batch->dependents_mask == 0); in __fd_batch_destroy()
307 util_copy_framebuffer_state(&batch->framebuffer, NULL); in __fd_batch_destroy()
308 batch_fini(batch); in __fd_batch_destroy()
310 simple_mtx_destroy(&batch->submit_lock); in __fd_batch_destroy()
312 free(batch->key); in __fd_batch_destroy()
313 free(batch); in __fd_batch_destroy()
318 __fd_batch_describe(char *buf, const struct fd_batch *batch) in __fd_batch_describe() argument
320 sprintf(buf, "fd_batch<%u>", batch->seqno); in __fd_batch_describe()
325 fd_batch_get_prologue(struct fd_batch *batch) in fd_batch_get_prologue() argument
327 if (!batch->prologue) in fd_batch_get_prologue()
328 batch->prologue = alloc_ring(batch, 0x1000, 0); in fd_batch_get_prologue()
329 return batch->prologue; in fd_batch_get_prologue()
334 batch_flush(struct fd_batch *batch) assert_dt in batch_flush() argument
336 DBG("%p: needs_flush=%d", batch, batch->needs_flush); in batch_flush()
338 if (!fd_batch_lock_submit(batch)) in batch_flush()
341 batch->needs_flush = false; in batch_flush()
346 fd_batch_finish_queries(batch); in batch_flush()
348 batch_flush_dependencies(batch); in batch_flush()
350 fd_screen_lock(batch->ctx->screen); in batch_flush()
351 batch_reset_resources(batch); in batch_flush()
357 fd_bc_invalidate_batch(batch, false); in batch_flush()
358 batch->flushed = true; in batch_flush()
360 if (batch == batch->ctx->batch) in batch_flush()
361 fd_batch_reference_locked(&batch->ctx->batch, NULL); in batch_flush()
363 fd_screen_unlock(batch->ctx->screen); in batch_flush()
365 if (batch->fence) in batch_flush()
366 fd_fence_ref(&batch->ctx->last_fence, batch->fence); in batch_flush()
368 fd_gmem_render_tiles(batch); in batch_flush()
370 assert(batch->reference.count > 0); in batch_flush()
372 cleanup_submit(batch); in batch_flush()
373 fd_batch_unlock_submit(batch); in batch_flush()
379 fd_batch_flush(struct fd_batch *batch) in fd_batch_flush() argument
387 fd_batch_reference(&tmp, batch); in fd_batch_flush()
394 recursive_dependents_mask(struct fd_batch *batch) in recursive_dependents_mask() argument
396 struct fd_batch_cache *cache = &batch->ctx->screen->batch_cache; in recursive_dependents_mask()
398 uint32_t dependents_mask = batch->dependents_mask; in recursive_dependents_mask()
400 foreach_batch (dep, cache, batch->dependents_mask) in recursive_dependents_mask()
407 fd_batch_add_dep(struct fd_batch *batch, struct fd_batch *dep) in fd_batch_add_dep() argument
409 fd_screen_assert_locked(batch->ctx->screen); in fd_batch_add_dep()
411 if (batch->dependents_mask & (1 << dep->idx)) in fd_batch_add_dep()
415 assert(!((1 << batch->idx) & recursive_dependents_mask(dep))); in fd_batch_add_dep()
419 batch->dependents_mask |= (1 << dep->idx); in fd_batch_add_dep()
420 DBG("%p: added dependency on %p", batch, dep); in fd_batch_add_dep()
437 fd_batch_add_resource(struct fd_batch *batch, struct fd_resource *rsc) in fd_batch_add_resource() argument
440 if (likely(fd_batch_references_resource(batch, rsc))) { in fd_batch_add_resource()
441 assert(_mesa_set_search_pre_hashed(batch->resources, rsc->hash, rsc)); in fd_batch_add_resource()
445 assert(!_mesa_set_search(batch->resources, rsc)); in fd_batch_add_resource()
447 _mesa_set_add_pre_hashed(batch->resources, rsc->hash, rsc); in fd_batch_add_resource()
448 rsc->track->batch_mask |= (1 << batch->idx); in fd_batch_add_resource()
452 fd_batch_resource_write(struct fd_batch *batch, struct fd_resource *rsc) in fd_batch_resource_write() argument
454 fd_screen_assert_locked(batch->ctx->screen); in fd_batch_resource_write()
456 DBG("%p: write %p", batch, rsc); in fd_batch_resource_write()
463 if (rsc->track->write_batch == batch) in fd_batch_resource_write()
466 fd_batch_write_prep(batch, rsc); in fd_batch_resource_write()
469 fd_batch_resource_write(batch, rsc->stencil); in fd_batch_resource_write()
475 if (unlikely(rsc->track->batch_mask & ~(1 << batch->idx))) { in fd_batch_resource_write()
476 struct fd_batch_cache *cache = &batch->ctx->screen->batch_cache; in fd_batch_resource_write()
484 if (dep == batch) in fd_batch_resource_write()
491 fd_batch_add_dep(batch, b); in fd_batch_resource_write()
496 fd_batch_reference_locked(&rsc->track->write_batch, batch); in fd_batch_resource_write()
498 fd_batch_add_resource(batch, rsc); in fd_batch_resource_write()
502 fd_batch_resource_read_slowpath(struct fd_batch *batch, struct fd_resource *rsc) in fd_batch_resource_read_slowpath() argument
504 fd_screen_assert_locked(batch->ctx->screen); in fd_batch_resource_read_slowpath()
507 fd_batch_resource_read(batch, rsc->stencil); in fd_batch_resource_read_slowpath()
509 DBG("%p: read %p", batch, rsc); in fd_batch_resource_read_slowpath()
515 if (unlikely(rsc->track->write_batch && rsc->track->write_batch != batch)) in fd_batch_resource_read_slowpath()
518 fd_batch_add_resource(batch, rsc); in fd_batch_resource_read_slowpath()
522 fd_batch_check_size(struct fd_batch *batch) in fd_batch_check_size() argument
525 fd_batch_flush(batch); in fd_batch_check_size()
531 if ((batch->prim_strm_bits > limit_bits) || in fd_batch_check_size()
532 (batch->draw_strm_bits > limit_bits)) { in fd_batch_check_size()
533 fd_batch_flush(batch); in fd_batch_check_size()
537 if (!fd_ringbuffer_check_size(batch->draw)) in fd_batch_check_size()
538 fd_batch_flush(batch); in fd_batch_check_size()
545 fd_wfi(struct fd_batch *batch, struct fd_ringbuffer *ring) in fd_wfi() argument
547 if (batch->needs_wfi) { in fd_wfi()
548 if (batch->ctx->screen->gen >= 5) in fd_wfi()
552 batch->needs_wfi = false; in fd_wfi()