1 /*
2 * Copyright 2018 Collabora Ltd.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "zink_batch.h"
25 #include "zink_clear.h"
26 #include "zink_context.h"
27 #include "zink_format.h"
28 #include "zink_inlines.h"
29 #include "zink_query.h"
30
31 #include "util/u_blitter.h"
32 #include "util/format/u_format.h"
33 #include "util/format_srgb.h"
34 #include "util/helpers.h"
35 #include "util/u_framebuffer.h"
36 #include "util/u_inlines.h"
37 #include "util/u_rect.h"
38 #include "util/u_surface.h"
39 #include "util/u_helpers.h"
40 #include "util/perf/cpu_trace.h"
41
42 static inline bool
scissor_states_equal(const struct pipe_scissor_state * a,const struct pipe_scissor_state * b)43 scissor_states_equal(const struct pipe_scissor_state *a, const struct pipe_scissor_state *b)
44 {
45 return a->minx == b->minx && a->miny == b->miny && a->maxx == b->maxx && a->maxy == b->maxy;
46 }
47
48 static void
clear_in_rp(struct pipe_context * pctx,unsigned buffers,const struct pipe_scissor_state * scissor_state,const union pipe_color_union * pcolor,double depth,unsigned stencil)49 clear_in_rp(struct pipe_context *pctx,
50 unsigned buffers,
51 const struct pipe_scissor_state *scissor_state,
52 const union pipe_color_union *pcolor,
53 double depth, unsigned stencil)
54 {
55 struct zink_context *ctx = zink_context(pctx);
56 struct pipe_framebuffer_state *fb = &ctx->fb_state;
57
58 VkClearAttachment attachments[1 + PIPE_MAX_COLOR_BUFS];
59 int num_attachments = 0;
60
61 if (buffers & PIPE_CLEAR_COLOR) {
62 VkClearColorValue color;
63 color.uint32[0] = pcolor->ui[0];
64 color.uint32[1] = pcolor->ui[1];
65 color.uint32[2] = pcolor->ui[2];
66 color.uint32[3] = pcolor->ui[3];
67
68 for (unsigned i = 0; i < fb->nr_cbufs; i++) {
69 if (!(buffers & (PIPE_CLEAR_COLOR0 << i)) || !fb->cbufs[i])
70 continue;
71
72 attachments[num_attachments].aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
73 attachments[num_attachments].colorAttachment = i;
74 attachments[num_attachments].clearValue.color = color;
75 ++num_attachments;
76 }
77 }
78
79 if (buffers & PIPE_CLEAR_DEPTHSTENCIL && fb->zsbuf) {
80 VkImageAspectFlags aspect = 0;
81 if (buffers & PIPE_CLEAR_DEPTH)
82 aspect |= VK_IMAGE_ASPECT_DEPTH_BIT;
83 if (buffers & PIPE_CLEAR_STENCIL)
84 aspect |= VK_IMAGE_ASPECT_STENCIL_BIT;
85
86 assert(zink_is_zsbuf_used(ctx));
87
88 attachments[num_attachments].aspectMask = aspect;
89 attachments[num_attachments].clearValue.depthStencil.depth = depth;
90 attachments[num_attachments].clearValue.depthStencil.stencil = stencil;
91 ++num_attachments;
92 }
93
94 VkClearRect cr = {0};
95 if (scissor_state) {
96 /* invalid clear */
97 if (scissor_state->minx > ctx->fb_state.width || scissor_state->miny > ctx->fb_state.height)
98 return;
99 cr.rect.offset.x = scissor_state->minx;
100 cr.rect.offset.y = scissor_state->miny;
101 cr.rect.extent.width = MIN2(fb->width - cr.rect.offset.x, scissor_state->maxx - scissor_state->minx);
102 cr.rect.extent.height = MIN2(fb->height - cr.rect.offset.y, scissor_state->maxy - scissor_state->miny);
103 } else {
104 cr.rect.extent.width = fb->width;
105 cr.rect.extent.height = fb->height;
106 }
107 cr.baseArrayLayer = 0;
108 cr.layerCount = util_framebuffer_get_num_layers(fb);
109 assert(ctx->in_rp);
110 VKCTX(CmdClearAttachments)(ctx->bs->cmdbuf, num_attachments, attachments, 1, &cr);
111 ctx->bs->has_work = true;
112 /*
113 Rendering within a subpass containing a feedback loop creates a data race, except in the following
114 cases:
115 • If a memory dependency is inserted between when the attachment is written and when it is
116 subsequently read by later fragments. Pipeline barriers expressing a subpass self-dependency
117 are the only way to achieve this, and one must be inserted every time a fragment will read
118 values at a particular sample (x, y, layer, sample) coordinate, if those values have been written
119 since the most recent pipeline barrier
120
121 VK 1.3.211, Chapter 8: Render Pass
122 */
123 if (ctx->fbfetch_outputs)
124 ctx->base.texture_barrier(&ctx->base, PIPE_TEXTURE_BARRIER_FRAMEBUFFER);
125 }
126
127 static struct zink_framebuffer_clear_data *
add_new_clear(struct zink_framebuffer_clear * fb_clear)128 add_new_clear(struct zink_framebuffer_clear *fb_clear)
129 {
130 struct zink_framebuffer_clear_data cd = {0};
131 util_dynarray_append(&fb_clear->clears, struct zink_framebuffer_clear_data, cd);
132 return zink_fb_clear_element(fb_clear, zink_fb_clear_count(fb_clear) - 1);
133 }
134
135 static struct zink_framebuffer_clear_data *
get_clear_data(struct zink_context * ctx,struct zink_framebuffer_clear * fb_clear,const struct pipe_scissor_state * scissor_state)136 get_clear_data(struct zink_context *ctx, struct zink_framebuffer_clear *fb_clear, const struct pipe_scissor_state *scissor_state)
137 {
138 unsigned num_clears = zink_fb_clear_count(fb_clear);
139 if (num_clears) {
140 struct zink_framebuffer_clear_data *last_clear = zink_fb_clear_element(fb_clear, num_clears - 1);
141 /* if we're completely overwriting the previous clear, merge this into the previous clear */
142 if (!scissor_state || (last_clear->has_scissor && scissor_states_equal(&last_clear->scissor, scissor_state)))
143 return last_clear;
144 }
145 return add_new_clear(fb_clear);
146 }
147
148 void
zink_clear(struct pipe_context * pctx,unsigned buffers,const struct pipe_scissor_state * scissor_state,const union pipe_color_union * pcolor,double depth,unsigned stencil)149 zink_clear(struct pipe_context *pctx,
150 unsigned buffers,
151 const struct pipe_scissor_state *scissor_state,
152 const union pipe_color_union *pcolor,
153 double depth, unsigned stencil)
154 {
155 MESA_TRACE_FUNC();
156 struct zink_context *ctx = zink_context(pctx);
157 struct zink_screen *screen = zink_screen(pctx->screen);
158 struct pipe_framebuffer_state *fb = &ctx->fb_state;
159 bool needs_rp = false;
160
161 if (scissor_state) {
162 struct u_rect scissor = {scissor_state->minx, scissor_state->maxx, scissor_state->miny, scissor_state->maxy};
163 needs_rp = !zink_blit_region_fills(scissor, fb->width, fb->height);
164 }
165
166 if (unlikely(ctx->fb_layer_mismatch)) {
167 /* this is a terrible scenario:
168 * at least one attachment has a layerCount greater than the others,
169 * so iterate over all the mismatched attachments and pre-clear them separately,
170 * then continue to flag them as need (additional) clearing
171 * to avoid loadOp=LOAD
172 */
173 unsigned x = 0;
174 unsigned y = 0;
175 unsigned w = ctx->fb_state.width;
176 unsigned h = ctx->fb_state.height;
177 if (scissor_state) {
178 x = scissor_state->minx;
179 y = scissor_state->miny;
180 w = scissor_state->minx + scissor_state->maxx;
181 h = scissor_state->miny + scissor_state->maxy;
182 }
183 unsigned clear_buffers = buffers >> 2;
184 for (unsigned i = 0; i < ctx->fb_state.nr_cbufs; i++) {
185 if (ctx->fb_state.cbufs[i] &&
186 (ctx->fb_layer_mismatch & clear_buffers & BITFIELD_BIT(i))) {
187 if (ctx->void_clears & (PIPE_CLEAR_COLOR0 << i)) {
188 union pipe_color_union color;
189 color.f[0] = color.f[1] = color.f[2] = 0;
190 color.f[3] = 1.0;
191 pctx->clear_render_target(pctx, ctx->fb_state.cbufs[i], &color,
192 0, 0,
193 ctx->fb_state.cbufs[i]->width, ctx->fb_state.cbufs[i]->height,
194 ctx->render_condition_active);
195 }
196 pctx->clear_render_target(pctx, ctx->fb_state.cbufs[i], pcolor,
197 x, y, w, h, ctx->render_condition_active);
198 }
199 }
200 if (ctx->fb_state.zsbuf && (buffers & PIPE_CLEAR_DEPTHSTENCIL))
201 pctx->clear_depth_stencil(pctx, ctx->fb_state.zsbuf, buffers & PIPE_CLEAR_DEPTHSTENCIL, depth, stencil,
202 x, y, w, h, ctx->render_condition_active);
203 }
204
205 if (ctx->in_rp) {
206 if (buffers & PIPE_CLEAR_DEPTHSTENCIL && (ctx->zsbuf_unused || ctx->zsbuf_readonly)) {
207 /* this will need a layout change */
208 assert(!ctx->track_renderpasses);
209 zink_batch_no_rp(ctx);
210 } else {
211 clear_in_rp(pctx, buffers, scissor_state, pcolor, depth, stencil);
212 return;
213 }
214 }
215
216 unsigned rp_clears_enabled = ctx->rp_clears_enabled;
217
218 if (ctx->void_clears & buffers) {
219 unsigned void_clears = ctx->void_clears & buffers;
220 ctx->void_clears &= ~buffers;
221 union pipe_color_union color;
222 color.f[0] = color.f[1] = color.f[2] = 0;
223 color.f[3] = 1.0;
224 for (unsigned i = 0; i < fb->nr_cbufs; i++) {
225 if ((void_clears & (PIPE_CLEAR_COLOR0 << i)) && fb->cbufs[i]) {
226 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i];
227 unsigned num_clears = zink_fb_clear_count(fb_clear);
228 if (num_clears) {
229 if (zink_fb_clear_first_needs_explicit(fb_clear)) {
230 /* a scissored clear exists:
231 * - extend the clear array
232 * - shift existing clears back by one position
233 * - inject void clear base of array
234 */
235 add_new_clear(fb_clear);
236 struct zink_framebuffer_clear_data *clear = fb_clear->clears.data;
237 memmove(clear + 1, clear, num_clears);
238 memcpy(&clear->color, &color, sizeof(color));
239 } else {
240 /* no void clear needed */
241 }
242 void_clears &= ~(PIPE_CLEAR_COLOR0 << i);
243 }
244 }
245 }
246 if (void_clears)
247 pctx->clear(pctx, void_clears, NULL, &color, 0, 0);
248 }
249
250 if (buffers & PIPE_CLEAR_COLOR) {
251 for (unsigned i = 0; i < fb->nr_cbufs; i++) {
252 if ((buffers & (PIPE_CLEAR_COLOR0 << i)) && fb->cbufs[i]) {
253 struct pipe_surface *psurf = fb->cbufs[i];
254 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i];
255 struct zink_framebuffer_clear_data *clear = get_clear_data(ctx, fb_clear, needs_rp ? scissor_state : NULL);
256
257 ctx->clears_enabled |= PIPE_CLEAR_COLOR0 << i;
258 clear->conditional = ctx->render_condition_active;
259 clear->has_scissor = needs_rp;
260 memcpy(&clear->color, pcolor, sizeof(union pipe_color_union));
261 zink_convert_color(screen, psurf->format, &clear->color, pcolor);
262 if (scissor_state && needs_rp)
263 clear->scissor = *scissor_state;
264 if (zink_fb_clear_first_needs_explicit(fb_clear))
265 ctx->rp_clears_enabled &= ~(PIPE_CLEAR_COLOR0 << i);
266 else
267 ctx->rp_clears_enabled |= PIPE_CLEAR_COLOR0 << i;
268 }
269 }
270 }
271
272 if (buffers & PIPE_CLEAR_DEPTHSTENCIL && fb->zsbuf) {
273 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[PIPE_MAX_COLOR_BUFS];
274 struct zink_framebuffer_clear_data *clear = get_clear_data(ctx, fb_clear, needs_rp ? scissor_state : NULL);
275 ctx->clears_enabled |= PIPE_CLEAR_DEPTHSTENCIL;
276 clear->conditional = ctx->render_condition_active;
277 clear->has_scissor = needs_rp;
278 if (scissor_state && needs_rp)
279 clear->scissor = *scissor_state;
280 if (buffers & PIPE_CLEAR_DEPTH)
281 clear->zs.depth = depth;
282 if (buffers & PIPE_CLEAR_STENCIL)
283 clear->zs.stencil = stencil;
284 clear->zs.bits |= (buffers & PIPE_CLEAR_DEPTHSTENCIL);
285 if (zink_fb_clear_first_needs_explicit(fb_clear)) {
286 ctx->rp_clears_enabled &= ~PIPE_CLEAR_DEPTHSTENCIL;
287 if (!ctx->track_renderpasses)
288 ctx->dynamic_fb.tc_info.zsbuf_clear_partial = true;
289 } else {
290 ctx->rp_clears_enabled |= (buffers & PIPE_CLEAR_DEPTHSTENCIL);
291 if (!ctx->track_renderpasses)
292 ctx->dynamic_fb.tc_info.zsbuf_clear = true;
293 }
294 }
295 assert(!ctx->in_rp);
296 ctx->rp_changed |= ctx->rp_clears_enabled != rp_clears_enabled;
297 }
298
299 static inline bool
colors_equal(union pipe_color_union * a,union pipe_color_union * b)300 colors_equal(union pipe_color_union *a, union pipe_color_union *b)
301 {
302 return a->ui[0] == b->ui[0] && a->ui[1] == b->ui[1] && a->ui[2] == b->ui[2] && a->ui[3] == b->ui[3];
303 }
304
305 void
zink_clear_framebuffer(struct zink_context * ctx,unsigned clear_buffers)306 zink_clear_framebuffer(struct zink_context *ctx, unsigned clear_buffers)
307 {
308 unsigned to_clear = 0;
309 struct pipe_framebuffer_state *fb_state = &ctx->fb_state;
310 #ifndef NDEBUG
311 assert(!(clear_buffers & PIPE_CLEAR_DEPTHSTENCIL) || zink_fb_clear_enabled(ctx, PIPE_MAX_COLOR_BUFS));
312 for (int i = 0; i < fb_state->nr_cbufs && clear_buffers >= PIPE_CLEAR_COLOR0; i++) {
313 assert(!(clear_buffers & (PIPE_CLEAR_COLOR0 << i)) || zink_fb_clear_enabled(ctx, i));
314 }
315 #endif
316 while (clear_buffers) {
317 struct zink_framebuffer_clear *color_clear = NULL;
318 struct zink_framebuffer_clear *zs_clear = NULL;
319 unsigned num_clears = 0;
320 for (int i = 0; i < fb_state->nr_cbufs && clear_buffers >= PIPE_CLEAR_COLOR0; i++) {
321 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i];
322 /* these need actual clear calls inside the rp */
323 if (!(clear_buffers & (PIPE_CLEAR_COLOR0 << i)))
324 continue;
325 if (color_clear) {
326 /* different number of clears -> do another clear */
327 //XXX: could potentially merge "some" of the clears into this one for a very, very small optimization
328 if (num_clears != zink_fb_clear_count(fb_clear))
329 goto out;
330 /* compare all the clears to determine if we can batch these buffers together */
331 for (int j = !zink_fb_clear_first_needs_explicit(fb_clear); j < num_clears; j++) {
332 struct zink_framebuffer_clear_data *a = zink_fb_clear_element(color_clear, j);
333 struct zink_framebuffer_clear_data *b = zink_fb_clear_element(fb_clear, j);
334 /* scissors don't match, fire this one off */
335 if (a->has_scissor != b->has_scissor || (a->has_scissor && !scissor_states_equal(&a->scissor, &b->scissor)))
336 goto out;
337
338 /* colors don't match, fire this one off */
339 if (!colors_equal(&a->color, &b->color))
340 goto out;
341 }
342 } else {
343 color_clear = fb_clear;
344 num_clears = zink_fb_clear_count(fb_clear);
345 }
346
347 clear_buffers &= ~(PIPE_CLEAR_COLOR0 << i);
348 to_clear |= (PIPE_CLEAR_COLOR0 << i);
349 }
350 clear_buffers &= ~PIPE_CLEAR_COLOR;
351 if (clear_buffers & PIPE_CLEAR_DEPTHSTENCIL) {
352 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[PIPE_MAX_COLOR_BUFS];
353 if (color_clear) {
354 if (num_clears != zink_fb_clear_count(fb_clear))
355 goto out;
356 /* compare all the clears to determine if we can batch these buffers together */
357 for (int j = !zink_fb_clear_first_needs_explicit(fb_clear); j < zink_fb_clear_count(color_clear); j++) {
358 struct zink_framebuffer_clear_data *a = zink_fb_clear_element(color_clear, j);
359 struct zink_framebuffer_clear_data *b = zink_fb_clear_element(fb_clear, j);
360 /* scissors don't match, fire this one off */
361 if (a->has_scissor != b->has_scissor || (a->has_scissor && !scissor_states_equal(&a->scissor, &b->scissor)))
362 goto out;
363 }
364 }
365 zs_clear = fb_clear;
366 to_clear |= (clear_buffers & PIPE_CLEAR_DEPTHSTENCIL);
367 clear_buffers &= ~PIPE_CLEAR_DEPTHSTENCIL;
368 }
369 out:
370 if (to_clear) {
371 if (num_clears) {
372 for (int j = !zink_fb_clear_first_needs_explicit(color_clear); j < num_clears; j++) {
373 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(color_clear, j);
374 struct zink_framebuffer_clear_data *zsclear = NULL;
375 /* zs bits are both set here if those aspects should be cleared at some point */
376 unsigned clear_bits = to_clear & ~PIPE_CLEAR_DEPTHSTENCIL;
377 if (zs_clear) {
378 zsclear = zink_fb_clear_element(zs_clear, j);
379 clear_bits |= zsclear->zs.bits;
380 }
381 zink_clear(&ctx->base, clear_bits,
382 clear->has_scissor ? &clear->scissor : NULL,
383 &clear->color,
384 zsclear ? zsclear->zs.depth : 0,
385 zsclear ? zsclear->zs.stencil : 0);
386 }
387 } else {
388 for (int j = !zink_fb_clear_first_needs_explicit(zs_clear); j < zink_fb_clear_count(zs_clear); j++) {
389 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(zs_clear, j);
390 zink_clear(&ctx->base, clear->zs.bits,
391 clear->has_scissor ? &clear->scissor : NULL,
392 NULL,
393 clear->zs.depth,
394 clear->zs.stencil);
395 }
396 }
397 }
398 to_clear = 0;
399 }
400 if (ctx->clears_enabled & PIPE_CLEAR_DEPTHSTENCIL)
401 zink_fb_clear_reset(ctx, PIPE_MAX_COLOR_BUFS);
402 u_foreach_bit(i, ctx->clears_enabled >> 2)
403 zink_fb_clear_reset(ctx, i);
404 }
405
406 static struct pipe_surface *
create_clear_surface(struct pipe_context * pctx,struct pipe_resource * pres,unsigned level,const struct pipe_box * box)407 create_clear_surface(struct pipe_context *pctx, struct pipe_resource *pres, unsigned level, const struct pipe_box *box)
408 {
409 struct pipe_surface tmpl = {{0}};
410
411 tmpl.format = pres->format;
412 tmpl.u.tex.first_layer = box->z;
413 tmpl.u.tex.last_layer = box->z + box->depth - 1;
414 tmpl.u.tex.level = level;
415 return pctx->create_surface(pctx, pres, &tmpl);
416 }
417
418 static void
set_clear_fb(struct pipe_context * pctx,struct pipe_surface * psurf,struct pipe_surface * zsurf)419 set_clear_fb(struct pipe_context *pctx, struct pipe_surface *psurf, struct pipe_surface *zsurf)
420 {
421 struct pipe_framebuffer_state fb_state = {0};
422 fb_state.width = psurf ? psurf->width : zsurf->width;
423 fb_state.height = psurf ? psurf->height : zsurf->height;
424 fb_state.nr_cbufs = !!psurf;
425 fb_state.cbufs[0] = psurf;
426 fb_state.zsbuf = zsurf;
427 pctx->set_framebuffer_state(pctx, &fb_state);
428 }
429
430 void
zink_clear_texture_dynamic(struct pipe_context * pctx,struct pipe_resource * pres,unsigned level,const struct pipe_box * box,const void * data)431 zink_clear_texture_dynamic(struct pipe_context *pctx,
432 struct pipe_resource *pres,
433 unsigned level,
434 const struct pipe_box *box,
435 const void *data)
436 {
437 struct zink_context *ctx = zink_context(pctx);
438 struct zink_screen *screen = zink_screen(pctx->screen);
439 struct zink_resource *res = zink_resource(pres);
440
441 bool full_clear = 0 <= box->x && u_minify(pres->width0, level) >= box->x + box->width &&
442 0 <= box->y && u_minify(pres->height0, level) >= box->y + box->height &&
443 0 <= box->z && u_minify(pres->target == PIPE_TEXTURE_3D ? pres->depth0 : pres->array_size, level) >= box->z + box->depth;
444
445 struct pipe_surface *surf = create_clear_surface(pctx, pres, level, box);
446
447 VkRenderingAttachmentInfo att = {0};
448 att.sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO;
449 att.imageView = zink_csurface(surf)->image_view;
450 att.imageLayout = res->aspect & VK_IMAGE_ASPECT_COLOR_BIT ? VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL : VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
451 att.loadOp = full_clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
452 att.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
453
454 VkRenderingInfo info = {0};
455 info.sType = VK_STRUCTURE_TYPE_RENDERING_INFO;
456 info.renderArea.offset.x = box->x;
457 info.renderArea.offset.y = box->y;
458 info.renderArea.extent.width = box->width;
459 info.renderArea.extent.height = box->height;
460 info.layerCount = MAX2(box->depth, 1);
461
462 union pipe_color_union color, tmp;
463 float depth = 0.0;
464 uint8_t stencil = 0;
465 if (res->aspect & VK_IMAGE_ASPECT_COLOR_BIT) {
466 util_format_unpack_rgba(pres->format, tmp.ui, data, 1);
467 zink_convert_color(screen, surf->format, &color, &tmp);
468 } else {
469 if (res->aspect & VK_IMAGE_ASPECT_DEPTH_BIT)
470 util_format_unpack_z_float(pres->format, &depth, data, 1);
471
472 if (res->aspect & VK_IMAGE_ASPECT_STENCIL_BIT)
473 util_format_unpack_s_8uint(pres->format, &stencil, data, 1);
474 }
475
476 zink_blit_barriers(ctx, NULL, res, full_clear);
477 VkCommandBuffer cmdbuf = zink_get_cmdbuf(ctx, NULL, res);
478 if (cmdbuf == ctx->bs->cmdbuf && ctx->in_rp)
479 zink_batch_no_rp(ctx);
480
481 if (res->aspect & VK_IMAGE_ASPECT_COLOR_BIT) {
482 memcpy(&att.clearValue, &color, sizeof(float) * 4);
483 info.colorAttachmentCount = 1;
484 info.pColorAttachments = &att;
485 } else {
486 att.clearValue.depthStencil.depth = depth;
487 att.clearValue.depthStencil.stencil = stencil;
488 if (res->aspect & VK_IMAGE_ASPECT_DEPTH_BIT)
489 info.pDepthAttachment = &att;
490 if (res->aspect & VK_IMAGE_ASPECT_STENCIL_BIT)
491 info.pStencilAttachment = &att;
492 }
493 VKCTX(CmdBeginRendering)(cmdbuf, &info);
494 if (!full_clear) {
495 VkClearRect rect;
496 rect.rect = info.renderArea;
497 rect.baseArrayLayer = box->z;
498 rect.layerCount = box->depth;
499
500 VkClearAttachment clear_att;
501 clear_att.aspectMask = res->aspect;
502 clear_att.colorAttachment = 0;
503 clear_att.clearValue = att.clearValue;
504
505 VKCTX(CmdClearAttachments)(cmdbuf, 1, &clear_att, 1, &rect);
506 }
507 VKCTX(CmdEndRendering)(cmdbuf);
508 zink_batch_reference_resource_rw(ctx, res, true);
509 /* this will never destroy the surface */
510 pipe_surface_reference(&surf, NULL);
511 }
512
513 void
zink_clear_texture(struct pipe_context * pctx,struct pipe_resource * pres,unsigned level,const struct pipe_box * box,const void * data)514 zink_clear_texture(struct pipe_context *pctx,
515 struct pipe_resource *pres,
516 unsigned level,
517 const struct pipe_box *box,
518 const void *data)
519 {
520 struct zink_context *ctx = zink_context(pctx);
521 struct zink_resource *res = zink_resource(pres);
522 struct pipe_surface *surf = NULL;
523 struct pipe_scissor_state scissor = {box->x, box->y, box->x + box->width, box->y + box->height};
524
525 if (res->aspect & VK_IMAGE_ASPECT_COLOR_BIT) {
526 union pipe_color_union color;
527
528 util_format_unpack_rgba(pres->format, color.ui, data, 1);
529
530 surf = create_clear_surface(pctx, pres, level, box);
531 util_blitter_save_framebuffer(ctx->blitter, &ctx->fb_state);
532 set_clear_fb(pctx, surf, NULL);
533 zink_blit_barriers(ctx, NULL, res, false);
534 ctx->blitting = true;
535 ctx->queries_disabled = true;
536 pctx->clear(pctx, PIPE_CLEAR_COLOR0, &scissor, &color, 0, 0);
537 util_blitter_restore_fb_state(ctx->blitter);
538 ctx->queries_disabled = false;
539 ctx->blitting = false;
540 } else {
541 float depth = 0.0;
542 uint8_t stencil = 0;
543
544 if (res->aspect & VK_IMAGE_ASPECT_DEPTH_BIT)
545 util_format_unpack_z_float(pres->format, &depth, data, 1);
546
547 if (res->aspect & VK_IMAGE_ASPECT_STENCIL_BIT)
548 util_format_unpack_s_8uint(pres->format, &stencil, data, 1);
549
550 unsigned flags = 0;
551 if (res->aspect & VK_IMAGE_ASPECT_DEPTH_BIT)
552 flags |= PIPE_CLEAR_DEPTH;
553 if (res->aspect & VK_IMAGE_ASPECT_STENCIL_BIT)
554 flags |= PIPE_CLEAR_STENCIL;
555 surf = create_clear_surface(pctx, pres, level, box);
556 util_blitter_save_framebuffer(ctx->blitter, &ctx->fb_state);
557 zink_blit_barriers(ctx, NULL, res, false);
558 ctx->blitting = true;
559 set_clear_fb(pctx, NULL, surf);
560 ctx->queries_disabled = true;
561 pctx->clear(pctx, flags, &scissor, NULL, depth, stencil);
562 util_blitter_restore_fb_state(ctx->blitter);
563 ctx->queries_disabled = false;
564 ctx->blitting = false;
565 }
566 /* this will never destroy the surface */
567 pipe_surface_reference(&surf, NULL);
568 }
569
570 void
zink_clear_buffer(struct pipe_context * pctx,struct pipe_resource * pres,unsigned offset,unsigned size,const void * clear_value,int clear_value_size)571 zink_clear_buffer(struct pipe_context *pctx,
572 struct pipe_resource *pres,
573 unsigned offset,
574 unsigned size,
575 const void *clear_value,
576 int clear_value_size)
577 {
578 struct zink_context *ctx = zink_context(pctx);
579 struct zink_resource *res = zink_resource(pres);
580
581 uint32_t clamped;
582 if (util_lower_clearsize_to_dword(clear_value, &clear_value_size, &clamped))
583 clear_value = &clamped;
584 if (offset % 4 == 0 && size % 4 == 0 && clear_value_size == sizeof(uint32_t)) {
585 /*
586 - dstOffset is the byte offset into the buffer at which to start filling,
587 and must be a multiple of 4.
588
589 - size is the number of bytes to fill, and must be either a multiple of 4,
590 or VK_WHOLE_SIZE to fill the range from offset to the end of the buffer
591 */
592 zink_resource_buffer_transfer_dst_barrier(ctx, res, offset, size);
593 VkCommandBuffer cmdbuf = zink_get_cmdbuf(ctx, NULL, res);
594 zink_batch_reference_resource_rw(ctx, res, true);
595 VKCTX(CmdFillBuffer)(cmdbuf, res->obj->buffer, offset, size, *(uint32_t*)clear_value);
596 return;
597 }
598 struct pipe_transfer *xfer;
599 uint8_t *map = pipe_buffer_map_range(pctx, pres, offset, size,
600 PIPE_MAP_WRITE | PIPE_MAP_ONCE | PIPE_MAP_DISCARD_RANGE, &xfer);
601 if (!map)
602 return;
603 unsigned rem = size % clear_value_size;
604 uint8_t *ptr = map;
605 for (unsigned i = 0; i < (size - rem) / clear_value_size; i++) {
606 memcpy(ptr, clear_value, clear_value_size);
607 ptr += clear_value_size;
608 }
609 if (rem)
610 memcpy(map + size - rem, clear_value, rem);
611 pipe_buffer_unmap(pctx, xfer);
612 }
613
614 void
zink_clear_render_target(struct pipe_context * pctx,struct pipe_surface * dst,const union pipe_color_union * color,unsigned dstx,unsigned dsty,unsigned width,unsigned height,bool render_condition_enabled)615 zink_clear_render_target(struct pipe_context *pctx, struct pipe_surface *dst,
616 const union pipe_color_union *color, unsigned dstx,
617 unsigned dsty, unsigned width, unsigned height,
618 bool render_condition_enabled)
619 {
620 struct zink_context *ctx = zink_context(pctx);
621 bool render_condition_active = ctx->render_condition_active;
622 if (!render_condition_enabled && render_condition_active) {
623 zink_stop_conditional_render(ctx);
624 ctx->render_condition_active = false;
625 }
626 util_blitter_save_framebuffer(ctx->blitter, &ctx->fb_state);
627 set_clear_fb(pctx, dst, NULL);
628 struct pipe_scissor_state scissor = {dstx, dsty, dstx + width, dsty + height};
629 zink_blit_barriers(ctx, NULL, zink_resource(dst->texture), false);
630 ctx->blitting = true;
631 pctx->clear(pctx, PIPE_CLEAR_COLOR0, &scissor, color, 0, 0);
632 util_blitter_restore_fb_state(ctx->blitter);
633 ctx->blitting = false;
634 if (!render_condition_enabled && render_condition_active)
635 zink_start_conditional_render(ctx);
636 ctx->render_condition_active = render_condition_active;
637 }
638
639 void
zink_clear_depth_stencil(struct pipe_context * pctx,struct pipe_surface * dst,unsigned clear_flags,double depth,unsigned stencil,unsigned dstx,unsigned dsty,unsigned width,unsigned height,bool render_condition_enabled)640 zink_clear_depth_stencil(struct pipe_context *pctx, struct pipe_surface *dst,
641 unsigned clear_flags, double depth, unsigned stencil,
642 unsigned dstx, unsigned dsty, unsigned width, unsigned height,
643 bool render_condition_enabled)
644 {
645 struct zink_context *ctx = zink_context(pctx);
646 /* check for stencil fallback */
647 bool blitting = ctx->blitting;
648 bool render_condition_active = ctx->render_condition_active;
649 if (!render_condition_enabled && render_condition_active) {
650 zink_stop_conditional_render(ctx);
651 ctx->render_condition_active = false;
652 }
653 bool cur_attachment = zink_csurface(ctx->fb_state.zsbuf) == zink_csurface(dst);
654 if (dstx > ctx->fb_state.width || dsty > ctx->fb_state.height ||
655 dstx + width > ctx->fb_state.width ||
656 dsty + height > ctx->fb_state.height)
657 cur_attachment = false;
658 if (!cur_attachment) {
659 if (!blitting) {
660 util_blitter_save_framebuffer(ctx->blitter, &ctx->fb_state);
661 set_clear_fb(pctx, NULL, dst);
662 zink_blit_barriers(ctx, NULL, zink_resource(dst->texture), false);
663 ctx->blitting = true;
664 }
665 }
666 struct pipe_scissor_state scissor = {dstx, dsty, dstx + width, dsty + height};
667 pctx->clear(pctx, clear_flags, &scissor, NULL, depth, stencil);
668 if (!cur_attachment && !blitting) {
669 util_blitter_restore_fb_state(ctx->blitter);
670 ctx->blitting = false;
671 }
672 if (!render_condition_enabled && render_condition_active)
673 zink_start_conditional_render(ctx);
674 ctx->render_condition_active = render_condition_active;
675 }
676
677 bool
zink_fb_clear_needs_explicit(struct zink_framebuffer_clear * fb_clear)678 zink_fb_clear_needs_explicit(struct zink_framebuffer_clear *fb_clear)
679 {
680 if (zink_fb_clear_count(fb_clear) != 1)
681 return true;
682 return zink_fb_clear_element_needs_explicit(zink_fb_clear_element(fb_clear, 0));
683 }
684
685 bool
zink_fb_clear_first_needs_explicit(struct zink_framebuffer_clear * fb_clear)686 zink_fb_clear_first_needs_explicit(struct zink_framebuffer_clear *fb_clear)
687 {
688 if (!zink_fb_clear_count(fb_clear))
689 return false;
690 return zink_fb_clear_element_needs_explicit(zink_fb_clear_element(fb_clear, 0));
691 }
692
693 static void
fb_clears_apply_internal(struct zink_context * ctx,struct pipe_resource * pres,int i)694 fb_clears_apply_internal(struct zink_context *ctx, struct pipe_resource *pres, int i)
695 {
696 if (!zink_fb_clear_enabled(ctx, i))
697 return;
698 if (ctx->in_rp)
699 zink_clear_framebuffer(ctx, BITFIELD_BIT(i));
700 else {
701 struct zink_resource *res = zink_resource(pres);
702 bool queries_disabled = ctx->queries_disabled;
703 VkCommandBuffer cmdbuf = ctx->bs->cmdbuf;
704 /* slightly different than the u_blitter handling:
705 * this can be called recursively while unordered_blitting=true
706 */
707 bool can_reorder = zink_screen(ctx->base.screen)->info.have_KHR_dynamic_rendering &&
708 !ctx->render_condition_active &&
709 !ctx->unordered_blitting &&
710 zink_get_cmdbuf(ctx, NULL, res) == ctx->bs->reordered_cmdbuf;
711 if (can_reorder) {
712 /* set unordered_blitting but NOT blitting:
713 * let begin_rendering handle layouts
714 */
715 ctx->unordered_blitting = true;
716 /* for unordered clears, swap the unordered cmdbuf for the main one for the whole op to avoid conditional hell */
717 ctx->bs->cmdbuf = ctx->bs->reordered_cmdbuf;
718 ctx->rp_changed = true;
719 ctx->queries_disabled = true;
720 }
721 /* this will automatically trigger all the clears */
722 zink_batch_rp(ctx);
723 if (can_reorder) {
724 zink_batch_no_rp(ctx);
725 ctx->unordered_blitting = false;
726 ctx->rp_changed = true;
727 ctx->queries_disabled = queries_disabled;
728 ctx->bs->cmdbuf = cmdbuf;
729 }
730 }
731 zink_fb_clear_reset(ctx, i);
732 }
733
734 void
zink_fb_clear_reset(struct zink_context * ctx,unsigned i)735 zink_fb_clear_reset(struct zink_context *ctx, unsigned i)
736 {
737 unsigned rp_clears_enabled = ctx->clears_enabled;
738 util_dynarray_clear(&ctx->fb_clears[i].clears);
739 if (i == PIPE_MAX_COLOR_BUFS) {
740 ctx->clears_enabled &= ~PIPE_CLEAR_DEPTHSTENCIL;
741 ctx->rp_clears_enabled &= ~PIPE_CLEAR_DEPTHSTENCIL;
742 } else {
743 ctx->clears_enabled &= ~(PIPE_CLEAR_COLOR0 << i);
744 ctx->rp_clears_enabled &= ~(PIPE_CLEAR_COLOR0 << i);
745 }
746 if (ctx->rp_clears_enabled != rp_clears_enabled)
747 ctx->rp_loadop_changed = true;
748 }
749
750 void
zink_fb_clears_apply(struct zink_context * ctx,struct pipe_resource * pres)751 zink_fb_clears_apply(struct zink_context *ctx, struct pipe_resource *pres)
752 {
753 if (zink_resource(pres)->aspect == VK_IMAGE_ASPECT_COLOR_BIT) {
754 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
755 if (ctx->fb_state.cbufs[i] && ctx->fb_state.cbufs[i]->texture == pres) {
756 fb_clears_apply_internal(ctx, pres, i);
757 }
758 }
759 } else {
760 if (ctx->fb_state.zsbuf && ctx->fb_state.zsbuf->texture == pres) {
761 fb_clears_apply_internal(ctx, pres, PIPE_MAX_COLOR_BUFS);
762 }
763 }
764 }
765
766 void
zink_fb_clears_discard(struct zink_context * ctx,struct pipe_resource * pres)767 zink_fb_clears_discard(struct zink_context *ctx, struct pipe_resource *pres)
768 {
769 if (zink_resource(pres)->aspect == VK_IMAGE_ASPECT_COLOR_BIT) {
770 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
771 if (ctx->fb_state.cbufs[i] && ctx->fb_state.cbufs[i]->texture == pres) {
772 if (zink_fb_clear_enabled(ctx, i)) {
773 zink_fb_clear_reset(ctx, i);
774 }
775 }
776 }
777 } else {
778 if (zink_fb_clear_enabled(ctx, PIPE_MAX_COLOR_BUFS) && ctx->fb_state.zsbuf && ctx->fb_state.zsbuf->texture == pres) {
779 int i = PIPE_MAX_COLOR_BUFS;
780 zink_fb_clear_reset(ctx, i);
781 }
782 }
783 }
784
785 void
zink_clear_apply_conditionals(struct zink_context * ctx)786 zink_clear_apply_conditionals(struct zink_context *ctx)
787 {
788 for (int i = 0; i < ARRAY_SIZE(ctx->fb_clears); i++) {
789 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i];
790 if (!zink_fb_clear_enabled(ctx, i))
791 continue;
792 for (int j = 0; j < zink_fb_clear_count(fb_clear); j++) {
793 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(fb_clear, j);
794 if (clear->conditional) {
795 struct pipe_surface *surf;
796 if (i < PIPE_MAX_COLOR_BUFS)
797 surf = ctx->fb_state.cbufs[i];
798 else
799 surf = ctx->fb_state.zsbuf;
800 if (surf)
801 fb_clears_apply_internal(ctx, surf->texture, i);
802 else
803 zink_fb_clear_reset(ctx, i);
804 break;
805 }
806 }
807 }
808 }
809
810 static void
fb_clears_apply_or_discard_internal(struct zink_context * ctx,struct pipe_resource * pres,struct u_rect region,bool discard_only,bool invert,int i)811 fb_clears_apply_or_discard_internal(struct zink_context *ctx, struct pipe_resource *pres, struct u_rect region, bool discard_only, bool invert, int i)
812 {
813 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[i];
814 if (zink_fb_clear_enabled(ctx, i)) {
815 if (zink_blit_region_fills(region, pres->width0, pres->height0)) {
816 if (invert)
817 fb_clears_apply_internal(ctx, pres, i);
818 else
819 /* we know we can skip these */
820 zink_fb_clears_discard(ctx, pres);
821 return;
822 }
823 for (int j = 0; j < zink_fb_clear_count(fb_clear); j++) {
824 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(fb_clear, j);
825 struct u_rect scissor = {clear->scissor.minx, clear->scissor.maxx,
826 clear->scissor.miny, clear->scissor.maxy};
827 if (!clear->has_scissor || zink_blit_region_covers(region, scissor)) {
828 /* this is a clear that isn't fully covered by our pending write */
829 if (!discard_only)
830 fb_clears_apply_internal(ctx, pres, i);
831 return;
832 }
833 }
834 if (!invert)
835 /* if we haven't already returned, then we know we can discard */
836 zink_fb_clears_discard(ctx, pres);
837 }
838 }
839
840 void
zink_fb_clears_apply_or_discard(struct zink_context * ctx,struct pipe_resource * pres,struct u_rect region,bool discard_only)841 zink_fb_clears_apply_or_discard(struct zink_context *ctx, struct pipe_resource *pres, struct u_rect region, bool discard_only)
842 {
843 if (zink_resource(pres)->aspect == VK_IMAGE_ASPECT_COLOR_BIT) {
844 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
845 if (ctx->fb_state.cbufs[i] && ctx->fb_state.cbufs[i]->texture == pres) {
846 fb_clears_apply_or_discard_internal(ctx, pres, region, discard_only, false, i);
847 }
848 }
849 } else {
850 if (zink_fb_clear_enabled(ctx, PIPE_MAX_COLOR_BUFS) && ctx->fb_state.zsbuf && ctx->fb_state.zsbuf->texture == pres) {
851 fb_clears_apply_or_discard_internal(ctx, pres, region, discard_only, false, PIPE_MAX_COLOR_BUFS);
852 }
853 }
854 }
855
856 void
zink_fb_clears_apply_region(struct zink_context * ctx,struct pipe_resource * pres,struct u_rect region)857 zink_fb_clears_apply_region(struct zink_context *ctx, struct pipe_resource *pres, struct u_rect region)
858 {
859 if (zink_resource(pres)->aspect == VK_IMAGE_ASPECT_COLOR_BIT) {
860 for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
861 if (ctx->fb_state.cbufs[i] && ctx->fb_state.cbufs[i]->texture == pres) {
862 fb_clears_apply_or_discard_internal(ctx, pres, region, false, true, i);
863 }
864 }
865 } else {
866 if (ctx->fb_state.zsbuf && ctx->fb_state.zsbuf->texture == pres) {
867 fb_clears_apply_or_discard_internal(ctx, pres, region, false, true, PIPE_MAX_COLOR_BUFS);
868 }
869 }
870 }
871
872 void
zink_fb_clear_rewrite(struct zink_context * ctx,unsigned idx,enum pipe_format before,enum pipe_format after)873 zink_fb_clear_rewrite(struct zink_context *ctx, unsigned idx, enum pipe_format before, enum pipe_format after)
874 {
875 /* if the values for the clear color are incompatible, they must be rewritten;
876 * this occurs if:
877 * - the formats' srgb-ness does not match
878 * - the formats' signedness does not match
879 */
880 const struct util_format_description *bdesc = util_format_description(before);
881 const struct util_format_description *adesc = util_format_description(after);
882 int bfirst_non_void_chan = util_format_get_first_non_void_channel(before);
883 int afirst_non_void_chan = util_format_get_first_non_void_channel(after);
884 bool bsigned = false, asigned = false;
885 if (bfirst_non_void_chan > 0)
886 bsigned = bdesc->channel[bfirst_non_void_chan].type == UTIL_FORMAT_TYPE_SIGNED;
887 if (afirst_non_void_chan > 0)
888 asigned = adesc->channel[afirst_non_void_chan].type == UTIL_FORMAT_TYPE_SIGNED;
889 if (util_format_is_srgb(before) == util_format_is_srgb(after) &&
890 bsigned == asigned)
891 return;
892 struct zink_framebuffer_clear *fb_clear = &ctx->fb_clears[idx];
893 for (int j = 0; j < zink_fb_clear_count(fb_clear); j++) {
894 struct zink_framebuffer_clear_data *clear = zink_fb_clear_element(fb_clear, j);
895 uint32_t data[4];
896 util_format_pack_rgba(before, data, clear->color.ui, 1);
897 util_format_unpack_rgba(after, clear->color.ui, data, 1);
898 }
899 }
900