1 /*
2 * Copyright © 2021 Collabora Ltd.
3 *
4 * Derived from tu_cmd_buffer.c which is:
5 * Copyright © 2016 Red Hat.
6 * Copyright © 2016 Bas Nieuwenhuizen
7 * Copyright © 2015 Intel Corporation
8 *
9 * Permission is hereby granted, free of charge, to any person obtaining a
10 * copy of this software and associated documentation files (the "Software"),
11 * to deal in the Software without restriction, including without limitation
12 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
13 * and/or sell copies of the Software, and to permit persons to whom the
14 * Software is furnished to do so, subject to the following conditions:
15 *
16 * The above copyright notice and this permission notice (including the next
17 * paragraph) shall be included in all copies or substantial portions of the
18 * Software.
19 *
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
23 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
25 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
26 * DEALINGS IN THE SOFTWARE.
27 */
28
29 #include "panvk_private.h"
30
31 #include "pan_encoder.h"
32
33 #include "util/rounding.h"
34 #include "vk_format.h"
35
36 void
panvk_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)37 panvk_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,
38 uint32_t firstBinding,
39 uint32_t bindingCount,
40 const VkBuffer *pBuffers,
41 const VkDeviceSize *pOffsets)
42 {
43 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
44 struct panvk_descriptor_state *desc_state =
45 panvk_cmd_get_desc_state(cmdbuf, GRAPHICS);
46
47 assert(firstBinding + bindingCount <= MAX_VBS);
48
49 for (uint32_t i = 0; i < bindingCount; i++) {
50 VK_FROM_HANDLE(panvk_buffer, buffer, pBuffers[i]);
51
52 cmdbuf->state.vb.bufs[firstBinding + i].address =
53 panvk_buffer_gpu_ptr(buffer, pOffsets[i]);
54 cmdbuf->state.vb.bufs[firstBinding + i].size =
55 panvk_buffer_range(buffer, pOffsets[i], VK_WHOLE_SIZE);
56 }
57
58 cmdbuf->state.vb.count = MAX2(cmdbuf->state.vb.count, firstBinding + bindingCount);
59 desc_state->vs_attrib_bufs = desc_state->vs_attribs = 0;
60 }
61
62 void
panvk_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)63 panvk_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,
64 VkBuffer buffer,
65 VkDeviceSize offset,
66 VkIndexType indexType)
67 {
68 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
69 VK_FROM_HANDLE(panvk_buffer, buf, buffer);
70
71 cmdbuf->state.ib.buffer = buf;
72 cmdbuf->state.ib.offset = offset;
73 switch (indexType) {
74 case VK_INDEX_TYPE_UINT16:
75 cmdbuf->state.ib.index_size = 16;
76 break;
77 case VK_INDEX_TYPE_UINT32:
78 cmdbuf->state.ib.index_size = 32;
79 break;
80 case VK_INDEX_TYPE_NONE_KHR:
81 cmdbuf->state.ib.index_size = 0;
82 break;
83 case VK_INDEX_TYPE_UINT8_EXT:
84 cmdbuf->state.ib.index_size = 8;
85 break;
86 default:
87 unreachable("Invalid index type\n");
88 }
89 }
90
91 static void
panvk_set_dyn_ssbo_pointers(struct panvk_descriptor_state * desc_state,unsigned dyn_ssbo_offset,struct panvk_descriptor_set * set)92 panvk_set_dyn_ssbo_pointers(struct panvk_descriptor_state *desc_state,
93 unsigned dyn_ssbo_offset,
94 struct panvk_descriptor_set *set)
95 {
96 struct panvk_sysvals *sysvals = &desc_state->sysvals;
97
98 for (unsigned i = 0; i < set->layout->num_dyn_ssbos; i++) {
99 const struct panvk_buffer_desc *ssbo =
100 &desc_state->dyn.ssbos[dyn_ssbo_offset + i];
101
102 sysvals->dyn_ssbos[dyn_ssbo_offset + i] = (struct panvk_ssbo_addr) {
103 .base_addr = panvk_buffer_gpu_ptr(ssbo->buffer, ssbo->offset),
104 .size = panvk_buffer_range(ssbo->buffer, ssbo->offset, ssbo->size),
105 };
106 }
107
108 desc_state->sysvals_ptr = 0;
109 }
110
111 void
panvk_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)112 panvk_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
113 VkPipelineBindPoint pipelineBindPoint,
114 VkPipelineLayout layout,
115 uint32_t firstSet,
116 uint32_t descriptorSetCount,
117 const VkDescriptorSet *pDescriptorSets,
118 uint32_t dynamicOffsetCount,
119 const uint32_t *pDynamicOffsets)
120 {
121 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
122 VK_FROM_HANDLE(panvk_pipeline_layout, playout, layout);
123
124 struct panvk_descriptor_state *descriptors_state =
125 &cmdbuf->bind_points[pipelineBindPoint].desc_state;
126
127 unsigned dynoffset_idx = 0;
128 for (unsigned i = 0; i < descriptorSetCount; ++i) {
129 unsigned idx = i + firstSet;
130 VK_FROM_HANDLE(panvk_descriptor_set, set, pDescriptorSets[i]);
131
132 descriptors_state->sets[idx] = set;
133
134 if (set->layout->num_dyn_ssbos || set->layout->num_dyn_ubos) {
135 unsigned dyn_ubo_offset = playout->sets[idx].dyn_ubo_offset;
136 unsigned dyn_ssbo_offset = playout->sets[idx].dyn_ssbo_offset;
137
138 for (unsigned b = 0; b < set->layout->binding_count; b++) {
139 for (unsigned e = 0; e < set->layout->bindings[b].array_size; e++) {
140 struct panvk_buffer_desc *bdesc = NULL;
141
142 if (set->layout->bindings[b].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
143 bdesc = &descriptors_state->dyn.ubos[dyn_ubo_offset++];
144 *bdesc = set->dyn_ubos[set->layout->bindings[b].dyn_ubo_idx + e];
145 } else if (set->layout->bindings[b].type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
146 bdesc = &descriptors_state->dyn.ssbos[dyn_ssbo_offset++];
147 *bdesc = set->dyn_ssbos[set->layout->bindings[b].dyn_ssbo_idx + e];
148 }
149
150 if (bdesc) {
151 bdesc->offset += pDynamicOffsets[dynoffset_idx++];
152 }
153 }
154 }
155 }
156
157 if (set->layout->num_dyn_ssbos) {
158 panvk_set_dyn_ssbo_pointers(descriptors_state,
159 playout->sets[idx].dyn_ssbo_offset,
160 set);
161 }
162
163 if (set->layout->num_dyn_ssbos)
164 descriptors_state->dirty |= PANVK_DYNAMIC_SSBO;
165
166 if (set->layout->num_ubos || set->layout->num_dyn_ubos ||
167 set->layout->num_dyn_ssbos || set->layout->desc_ubo_size)
168 descriptors_state->ubos = 0;
169
170 if (set->layout->num_textures)
171 descriptors_state->textures = 0;
172
173 if (set->layout->num_samplers)
174 descriptors_state->samplers = 0;
175
176 if (set->layout->num_imgs) {
177 descriptors_state->vs_attrib_bufs = descriptors_state->non_vs_attrib_bufs = 0;
178 descriptors_state->vs_attribs = descriptors_state->non_vs_attribs = 0;
179 }
180 }
181
182 assert(dynoffset_idx == dynamicOffsetCount);
183 }
184
185 void
panvk_CmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)186 panvk_CmdPushConstants(VkCommandBuffer commandBuffer,
187 VkPipelineLayout layout,
188 VkShaderStageFlags stageFlags,
189 uint32_t offset,
190 uint32_t size,
191 const void *pValues)
192 {
193 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
194
195 memcpy(cmdbuf->push_constants + offset, pValues, size);
196
197 if (stageFlags & VK_SHADER_STAGE_ALL_GRAPHICS) {
198 struct panvk_descriptor_state *desc_state =
199 panvk_cmd_get_desc_state(cmdbuf, GRAPHICS);
200
201 desc_state->ubos = 0;
202 desc_state->push_constants = 0;
203 }
204
205 if (stageFlags & VK_SHADER_STAGE_COMPUTE_BIT) {
206 struct panvk_descriptor_state *desc_state =
207 panvk_cmd_get_desc_state(cmdbuf, COMPUTE);
208
209 desc_state->ubos = 0;
210 desc_state->push_constants = 0;
211 }
212 }
213
214 void
panvk_CmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline _pipeline)215 panvk_CmdBindPipeline(VkCommandBuffer commandBuffer,
216 VkPipelineBindPoint pipelineBindPoint,
217 VkPipeline _pipeline)
218 {
219 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
220 VK_FROM_HANDLE(panvk_pipeline, pipeline, _pipeline);
221
222 cmdbuf->bind_points[pipelineBindPoint].pipeline = pipeline;
223 cmdbuf->state.fs_rsd = 0;
224
225 if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS) {
226 cmdbuf->state.varyings = pipeline->varyings;
227
228 if (!(pipeline->dynamic_state_mask & BITFIELD_BIT(VK_DYNAMIC_STATE_VIEWPORT))) {
229 cmdbuf->state.viewport = pipeline->viewport;
230 cmdbuf->state.dirty |= PANVK_DYNAMIC_VIEWPORT;
231 }
232 if (!(pipeline->dynamic_state_mask & BITFIELD_BIT(VK_DYNAMIC_STATE_SCISSOR))) {
233 cmdbuf->state.scissor = pipeline->scissor;
234 cmdbuf->state.dirty |= PANVK_DYNAMIC_SCISSOR;
235 }
236 }
237
238 /* Sysvals are passed through UBOs, we need dirty the UBO array if the
239 * pipeline contain shaders using sysvals.
240 */
241 cmdbuf->bind_points[pipelineBindPoint].desc_state.ubos = 0;
242 }
243
244 void
panvk_CmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)245 panvk_CmdSetViewport(VkCommandBuffer commandBuffer,
246 uint32_t firstViewport,
247 uint32_t viewportCount,
248 const VkViewport *pViewports)
249 {
250 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
251 assert(viewportCount == 1);
252 assert(!firstViewport);
253
254 cmdbuf->state.viewport = pViewports[0];
255 cmdbuf->state.vpd = 0;
256 cmdbuf->state.dirty |= PANVK_DYNAMIC_VIEWPORT;
257 }
258
259 void
panvk_CmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)260 panvk_CmdSetScissor(VkCommandBuffer commandBuffer,
261 uint32_t firstScissor,
262 uint32_t scissorCount,
263 const VkRect2D *pScissors)
264 {
265 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
266 assert(scissorCount == 1);
267 assert(!firstScissor);
268
269 cmdbuf->state.scissor = pScissors[0];
270 cmdbuf->state.vpd = 0;
271 cmdbuf->state.dirty |= PANVK_DYNAMIC_SCISSOR;
272 }
273
274 void
panvk_CmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)275 panvk_CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
276 {
277 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
278
279 cmdbuf->state.rast.line_width = lineWidth;
280 cmdbuf->state.dirty |= PANVK_DYNAMIC_LINE_WIDTH;
281 }
282
283 void
panvk_CmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)284 panvk_CmdSetDepthBias(VkCommandBuffer commandBuffer,
285 float depthBiasConstantFactor,
286 float depthBiasClamp,
287 float depthBiasSlopeFactor)
288 {
289 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
290
291 cmdbuf->state.rast.depth_bias.constant_factor = depthBiasConstantFactor;
292 cmdbuf->state.rast.depth_bias.clamp = depthBiasClamp;
293 cmdbuf->state.rast.depth_bias.slope_factor = depthBiasSlopeFactor;
294 cmdbuf->state.dirty |= PANVK_DYNAMIC_DEPTH_BIAS;
295 cmdbuf->state.fs_rsd = 0;
296 }
297
298 void
panvk_CmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])299 panvk_CmdSetBlendConstants(VkCommandBuffer commandBuffer,
300 const float blendConstants[4])
301 {
302 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
303
304 for (unsigned i = 0; i < 4; i++)
305 cmdbuf->state.blend.constants[i] = CLAMP(blendConstants[i], 0.0f, 1.0f);
306
307 cmdbuf->state.dirty |= PANVK_DYNAMIC_BLEND_CONSTANTS;
308 cmdbuf->state.fs_rsd = 0;
309 }
310
311 void
panvk_CmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)312 panvk_CmdSetDepthBounds(VkCommandBuffer commandBuffer,
313 float minDepthBounds,
314 float maxDepthBounds)
315 {
316 panvk_stub();
317 }
318
319 void
panvk_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)320 panvk_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
321 VkStencilFaceFlags faceMask,
322 uint32_t compareMask)
323 {
324 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
325
326 if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
327 cmdbuf->state.zs.s_front.compare_mask = compareMask;
328
329 if (faceMask & VK_STENCIL_FACE_BACK_BIT)
330 cmdbuf->state.zs.s_back.compare_mask = compareMask;
331
332 cmdbuf->state.dirty |= PANVK_DYNAMIC_STENCIL_COMPARE_MASK;
333 cmdbuf->state.fs_rsd = 0;
334 }
335
336 void
panvk_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)337 panvk_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
338 VkStencilFaceFlags faceMask,
339 uint32_t writeMask)
340 {
341 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
342
343 if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
344 cmdbuf->state.zs.s_front.write_mask = writeMask;
345
346 if (faceMask & VK_STENCIL_FACE_BACK_BIT)
347 cmdbuf->state.zs.s_back.write_mask = writeMask;
348
349 cmdbuf->state.dirty |= PANVK_DYNAMIC_STENCIL_WRITE_MASK;
350 cmdbuf->state.fs_rsd = 0;
351 }
352
353 void
panvk_CmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)354 panvk_CmdSetStencilReference(VkCommandBuffer commandBuffer,
355 VkStencilFaceFlags faceMask,
356 uint32_t reference)
357 {
358 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
359
360 if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
361 cmdbuf->state.zs.s_front.ref = reference;
362
363 if (faceMask & VK_STENCIL_FACE_BACK_BIT)
364 cmdbuf->state.zs.s_back.ref = reference;
365
366 cmdbuf->state.dirty |= PANVK_DYNAMIC_STENCIL_REFERENCE;
367 cmdbuf->state.fs_rsd = 0;
368 }
369
370 VkResult
panvk_CreateCommandPool(VkDevice _device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCmdPool)371 panvk_CreateCommandPool(VkDevice _device,
372 const VkCommandPoolCreateInfo *pCreateInfo,
373 const VkAllocationCallbacks *pAllocator,
374 VkCommandPool *pCmdPool)
375 {
376 VK_FROM_HANDLE(panvk_device, device, _device);
377 struct panvk_cmd_pool *pool;
378
379 pool = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*pool), 8,
380 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
381 if (pool == NULL)
382 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
383
384 VkResult result = vk_command_pool_init(&pool->vk, &device->vk,
385 pCreateInfo, pAllocator);
386 if (result != VK_SUCCESS) {
387 vk_free2(&device->vk.alloc, pAllocator, pool);
388 return result;
389 }
390
391 list_inithead(&pool->active_cmd_buffers);
392 list_inithead(&pool->free_cmd_buffers);
393
394 panvk_bo_pool_init(&pool->desc_bo_pool);
395 panvk_bo_pool_init(&pool->varying_bo_pool);
396 panvk_bo_pool_init(&pool->tls_bo_pool);
397 *pCmdPool = panvk_cmd_pool_to_handle(pool);
398 return VK_SUCCESS;
399 }
400
401 static void
panvk_cmd_prepare_clear_values(struct panvk_cmd_buffer * cmdbuf,const VkClearValue * in)402 panvk_cmd_prepare_clear_values(struct panvk_cmd_buffer *cmdbuf,
403 const VkClearValue *in)
404 {
405 for (unsigned i = 0; i < cmdbuf->state.pass->attachment_count; i++) {
406 const struct panvk_render_pass_attachment *attachment =
407 &cmdbuf->state.pass->attachments[i];
408 enum pipe_format fmt = attachment->format;
409
410 if (util_format_is_depth_or_stencil(fmt)) {
411 if (attachment->load_op == VK_ATTACHMENT_LOAD_OP_CLEAR ||
412 attachment->stencil_load_op == VK_ATTACHMENT_LOAD_OP_CLEAR) {
413 cmdbuf->state.clear[i].depth = in[i].depthStencil.depth;
414 cmdbuf->state.clear[i].stencil = in[i].depthStencil.stencil;
415 } else {
416 cmdbuf->state.clear[i].depth = 0;
417 cmdbuf->state.clear[i].stencil = 0;
418 }
419 } else {
420 if (attachment->load_op == VK_ATTACHMENT_LOAD_OP_CLEAR) {
421 union pipe_color_union *col = (union pipe_color_union *) &in[i].color;
422 pan_pack_color(cmdbuf->state.clear[i].color, col, fmt, false);
423 } else {
424 memset(cmdbuf->state.clear[i].color, 0, sizeof(cmdbuf->state.clear[0].color));
425 }
426 }
427 }
428 }
429
430 void
panvk_cmd_fb_info_set_subpass(struct panvk_cmd_buffer * cmdbuf)431 panvk_cmd_fb_info_set_subpass(struct panvk_cmd_buffer *cmdbuf)
432 {
433 const struct panvk_subpass *subpass = cmdbuf->state.subpass;
434 struct pan_fb_info *fbinfo = &cmdbuf->state.fb.info;
435 const struct panvk_framebuffer *fb = cmdbuf->state.framebuffer;
436 const struct panvk_clear_value *clears = cmdbuf->state.clear;
437 struct panvk_image_view *view;
438
439 fbinfo->nr_samples = 1;
440 fbinfo->rt_count = subpass->color_count;
441 memset(&fbinfo->bifrost.pre_post.dcds, 0, sizeof(fbinfo->bifrost.pre_post.dcds));
442
443 for (unsigned cb = 0; cb < subpass->color_count; cb++) {
444 int idx = subpass->color_attachments[cb].idx;
445 view = idx != VK_ATTACHMENT_UNUSED ?
446 fb->attachments[idx].iview : NULL;
447 if (!view)
448 continue;
449 fbinfo->rts[cb].view = &view->pview;
450 fbinfo->rts[cb].clear = subpass->color_attachments[cb].clear;
451 fbinfo->rts[cb].preload = subpass->color_attachments[cb].preload;
452 fbinfo->rts[cb].crc_valid = &cmdbuf->state.fb.crc_valid[cb];
453
454 memcpy(fbinfo->rts[cb].clear_value, clears[idx].color,
455 sizeof(fbinfo->rts[cb].clear_value));
456 fbinfo->nr_samples =
457 MAX2(fbinfo->nr_samples, view->pview.image->layout.nr_samples);
458 }
459
460 if (subpass->zs_attachment.idx != VK_ATTACHMENT_UNUSED) {
461 view = fb->attachments[subpass->zs_attachment.idx].iview;
462 const struct util_format_description *fdesc =
463 util_format_description(view->pview.format);
464
465 fbinfo->nr_samples =
466 MAX2(fbinfo->nr_samples, view->pview.image->layout.nr_samples);
467
468 if (util_format_has_depth(fdesc)) {
469 fbinfo->zs.clear.z = subpass->zs_attachment.clear;
470 fbinfo->zs.clear_value.depth = clears[subpass->zs_attachment.idx].depth;
471 fbinfo->zs.view.zs = &view->pview;
472 }
473
474 if (util_format_has_stencil(fdesc)) {
475 fbinfo->zs.clear.s = subpass->zs_attachment.clear;
476 fbinfo->zs.clear_value.stencil = clears[subpass->zs_attachment.idx].stencil;
477 if (!fbinfo->zs.view.zs)
478 fbinfo->zs.view.s = &view->pview;
479 }
480 }
481 }
482
483 void
panvk_cmd_fb_info_init(struct panvk_cmd_buffer * cmdbuf)484 panvk_cmd_fb_info_init(struct panvk_cmd_buffer *cmdbuf)
485 {
486 struct pan_fb_info *fbinfo = &cmdbuf->state.fb.info;
487 const struct panvk_framebuffer *fb = cmdbuf->state.framebuffer;
488
489 memset(cmdbuf->state.fb.crc_valid, 0, sizeof(cmdbuf->state.fb.crc_valid));
490
491 *fbinfo = (struct pan_fb_info) {
492 .width = fb->width,
493 .height = fb->height,
494 .extent.maxx = fb->width - 1,
495 .extent.maxy = fb->height - 1,
496 };
497 }
498
499 void
panvk_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)500 panvk_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,
501 const VkRenderPassBeginInfo *pRenderPassBegin,
502 const VkSubpassBeginInfo *pSubpassBeginInfo)
503 {
504 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
505 VK_FROM_HANDLE(panvk_render_pass, pass, pRenderPassBegin->renderPass);
506 VK_FROM_HANDLE(panvk_framebuffer, fb, pRenderPassBegin->framebuffer);
507
508 cmdbuf->state.pass = pass;
509 cmdbuf->state.subpass = pass->subpasses;
510 cmdbuf->state.framebuffer = fb;
511 cmdbuf->state.render_area = pRenderPassBegin->renderArea;
512 cmdbuf->state.batch = vk_zalloc(&cmdbuf->pool->vk.alloc,
513 sizeof(*cmdbuf->state.batch), 8,
514 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
515 util_dynarray_init(&cmdbuf->state.batch->jobs, NULL);
516 util_dynarray_init(&cmdbuf->state.batch->event_ops, NULL);
517 assert(pRenderPassBegin->clearValueCount <= pass->attachment_count);
518 cmdbuf->state.clear =
519 vk_zalloc(&cmdbuf->pool->vk.alloc,
520 sizeof(*cmdbuf->state.clear) * pass->attachment_count,
521 8, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
522 panvk_cmd_prepare_clear_values(cmdbuf, pRenderPassBegin->pClearValues);
523 panvk_cmd_fb_info_init(cmdbuf);
524 panvk_cmd_fb_info_set_subpass(cmdbuf);
525 }
526
527 void
panvk_cmd_preload_fb_after_batch_split(struct panvk_cmd_buffer * cmdbuf)528 panvk_cmd_preload_fb_after_batch_split(struct panvk_cmd_buffer *cmdbuf)
529 {
530 for (unsigned i = 0; i < cmdbuf->state.fb.info.rt_count; i++) {
531 if (cmdbuf->state.fb.info.rts[i].view) {
532 cmdbuf->state.fb.info.rts[i].clear = false;
533 cmdbuf->state.fb.info.rts[i].preload = true;
534 }
535 }
536
537 if (cmdbuf->state.fb.info.zs.view.zs) {
538 cmdbuf->state.fb.info.zs.clear.z = false;
539 cmdbuf->state.fb.info.zs.preload.z = true;
540 }
541
542 if (cmdbuf->state.fb.info.zs.view.s ||
543 (cmdbuf->state.fb.info.zs.view.zs &&
544 util_format_is_depth_and_stencil(cmdbuf->state.fb.info.zs.view.zs->format))) {
545 cmdbuf->state.fb.info.zs.clear.s = false;
546 cmdbuf->state.fb.info.zs.preload.s = true;
547 }
548 }
549
550 struct panvk_batch *
panvk_cmd_open_batch(struct panvk_cmd_buffer * cmdbuf)551 panvk_cmd_open_batch(struct panvk_cmd_buffer *cmdbuf)
552 {
553 assert(!cmdbuf->state.batch);
554 cmdbuf->state.batch = vk_zalloc(&cmdbuf->pool->vk.alloc,
555 sizeof(*cmdbuf->state.batch), 8,
556 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
557 assert(cmdbuf->state.batch);
558 return cmdbuf->state.batch;
559 }
560
561 void
panvk_CmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer _buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)562 panvk_CmdDrawIndirect(VkCommandBuffer commandBuffer,
563 VkBuffer _buffer,
564 VkDeviceSize offset,
565 uint32_t drawCount,
566 uint32_t stride)
567 {
568 panvk_stub();
569 }
570
571 void
panvk_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer _buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)572 panvk_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,
573 VkBuffer _buffer,
574 VkDeviceSize offset,
575 uint32_t drawCount,
576 uint32_t stride)
577 {
578 panvk_stub();
579 }
580
581 void
panvk_CmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t base_x,uint32_t base_y,uint32_t base_z,uint32_t x,uint32_t y,uint32_t z)582 panvk_CmdDispatchBase(VkCommandBuffer commandBuffer,
583 uint32_t base_x,
584 uint32_t base_y,
585 uint32_t base_z,
586 uint32_t x,
587 uint32_t y,
588 uint32_t z)
589 {
590 panvk_stub();
591 }
592
593 void
panvk_CmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer _buffer,VkDeviceSize offset)594 panvk_CmdDispatchIndirect(VkCommandBuffer commandBuffer,
595 VkBuffer _buffer,
596 VkDeviceSize offset)
597 {
598 panvk_stub();
599 }
600
601 void
panvk_CmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)602 panvk_CmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
603 {
604 panvk_stub();
605 }
606