1 /*
2 * Copyright © 2021 Collabora Ltd.
3 *
4 * Derived from tu_cmd_buffer.c which is:
5 * Copyright © 2016 Red Hat.
6 * Copyright © 2016 Bas Nieuwenhuizen
7 * Copyright © 2015 Intel Corporation
8 *
9 * Permission is hereby granted, free of charge, to any person obtaining a
10 * copy of this software and associated documentation files (the "Software"),
11 * to deal in the Software without restriction, including without limitation
12 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
13 * and/or sell copies of the Software, and to permit persons to whom the
14 * Software is furnished to do so, subject to the following conditions:
15 *
16 * The above copyright notice and this permission notice (including the next
17 * paragraph) shall be included in all copies or substantial portions of the
18 * Software.
19 *
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
23 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
25 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
26 * DEALINGS IN THE SOFTWARE.
27 */
28
29 #include "panvk_private.h"
30
31 #include "pan_encoder.h"
32 #include "pan_props.h"
33
34 #include "util/rounding.h"
35 #include "vk_format.h"
36
37 void
panvk_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)38 panvk_CmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
39 uint32_t bindingCount, const VkBuffer *pBuffers,
40 const VkDeviceSize *pOffsets)
41 {
42 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
43 struct panvk_descriptor_state *desc_state =
44 panvk_cmd_get_desc_state(cmdbuf, GRAPHICS);
45
46 assert(firstBinding + bindingCount <= MAX_VBS);
47
48 for (uint32_t i = 0; i < bindingCount; i++) {
49 VK_FROM_HANDLE(panvk_buffer, buffer, pBuffers[i]);
50
51 cmdbuf->state.vb.bufs[firstBinding + i].address =
52 panvk_buffer_gpu_ptr(buffer, pOffsets[i]);
53 cmdbuf->state.vb.bufs[firstBinding + i].size =
54 panvk_buffer_range(buffer, pOffsets[i], VK_WHOLE_SIZE);
55 }
56
57 cmdbuf->state.vb.count =
58 MAX2(cmdbuf->state.vb.count, firstBinding + bindingCount);
59 desc_state->vs_attrib_bufs = desc_state->vs_attribs = 0;
60 }
61
62 void
panvk_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)63 panvk_CmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer,
64 VkDeviceSize offset, VkIndexType indexType)
65 {
66 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
67 VK_FROM_HANDLE(panvk_buffer, buf, buffer);
68
69 cmdbuf->state.ib.buffer = buf;
70 cmdbuf->state.ib.offset = offset;
71 switch (indexType) {
72 case VK_INDEX_TYPE_UINT16:
73 cmdbuf->state.ib.index_size = 16;
74 break;
75 case VK_INDEX_TYPE_UINT32:
76 cmdbuf->state.ib.index_size = 32;
77 break;
78 case VK_INDEX_TYPE_NONE_KHR:
79 cmdbuf->state.ib.index_size = 0;
80 break;
81 case VK_INDEX_TYPE_UINT8_EXT:
82 cmdbuf->state.ib.index_size = 8;
83 break;
84 default:
85 unreachable("Invalid index type\n");
86 }
87 }
88
89 static void
panvk_set_dyn_ssbo_pointers(struct panvk_descriptor_state * desc_state,unsigned dyn_ssbo_offset,struct panvk_descriptor_set * set)90 panvk_set_dyn_ssbo_pointers(struct panvk_descriptor_state *desc_state,
91 unsigned dyn_ssbo_offset,
92 struct panvk_descriptor_set *set)
93 {
94 struct panvk_sysvals *sysvals = &desc_state->sysvals;
95
96 for (unsigned i = 0; i < set->layout->num_dyn_ssbos; i++) {
97 const struct panvk_buffer_desc *ssbo =
98 &desc_state->dyn.ssbos[dyn_ssbo_offset + i];
99
100 sysvals->dyn_ssbos[dyn_ssbo_offset + i] = (struct panvk_ssbo_addr){
101 .base_addr = panvk_buffer_gpu_ptr(ssbo->buffer, ssbo->offset),
102 .size = panvk_buffer_range(ssbo->buffer, ssbo->offset, ssbo->size),
103 };
104 }
105
106 desc_state->sysvals_ptr = 0;
107 }
108
109 void
panvk_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)110 panvk_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
111 VkPipelineBindPoint pipelineBindPoint,
112 VkPipelineLayout layout, uint32_t firstSet,
113 uint32_t descriptorSetCount,
114 const VkDescriptorSet *pDescriptorSets,
115 uint32_t dynamicOffsetCount,
116 const uint32_t *pDynamicOffsets)
117 {
118 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
119 VK_FROM_HANDLE(panvk_pipeline_layout, playout, layout);
120
121 struct panvk_descriptor_state *descriptors_state =
122 &cmdbuf->bind_points[pipelineBindPoint].desc_state;
123
124 unsigned dynoffset_idx = 0;
125 for (unsigned i = 0; i < descriptorSetCount; ++i) {
126 unsigned idx = i + firstSet;
127 VK_FROM_HANDLE(panvk_descriptor_set, set, pDescriptorSets[i]);
128
129 descriptors_state->sets[idx] = set;
130
131 if (set->layout->num_dyn_ssbos || set->layout->num_dyn_ubos) {
132 unsigned dyn_ubo_offset = playout->sets[idx].dyn_ubo_offset;
133 unsigned dyn_ssbo_offset = playout->sets[idx].dyn_ssbo_offset;
134
135 for (unsigned b = 0; b < set->layout->binding_count; b++) {
136 for (unsigned e = 0; e < set->layout->bindings[b].array_size; e++) {
137 struct panvk_buffer_desc *bdesc = NULL;
138
139 if (set->layout->bindings[b].type ==
140 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
141 bdesc = &descriptors_state->dyn.ubos[dyn_ubo_offset++];
142 *bdesc =
143 set->dyn_ubos[set->layout->bindings[b].dyn_ubo_idx + e];
144 } else if (set->layout->bindings[b].type ==
145 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
146 bdesc = &descriptors_state->dyn.ssbos[dyn_ssbo_offset++];
147 *bdesc =
148 set->dyn_ssbos[set->layout->bindings[b].dyn_ssbo_idx + e];
149 }
150
151 if (bdesc) {
152 bdesc->offset += pDynamicOffsets[dynoffset_idx++];
153 }
154 }
155 }
156 }
157
158 if (set->layout->num_dyn_ssbos) {
159 panvk_set_dyn_ssbo_pointers(descriptors_state,
160 playout->sets[idx].dyn_ssbo_offset, set);
161 }
162
163 if (set->layout->num_dyn_ssbos)
164 descriptors_state->dirty |= PANVK_DYNAMIC_SSBO;
165
166 if (set->layout->num_ubos || set->layout->num_dyn_ubos ||
167 set->layout->num_dyn_ssbos || set->layout->desc_ubo_size)
168 descriptors_state->ubos = 0;
169
170 if (set->layout->num_textures)
171 descriptors_state->textures = 0;
172
173 if (set->layout->num_samplers)
174 descriptors_state->samplers = 0;
175
176 if (set->layout->num_imgs) {
177 descriptors_state->vs_attrib_bufs =
178 descriptors_state->non_vs_attrib_bufs = 0;
179 descriptors_state->vs_attribs = descriptors_state->non_vs_attribs = 0;
180 }
181 }
182
183 assert(dynoffset_idx == dynamicOffsetCount);
184 }
185
186 void
panvk_CmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)187 panvk_CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
188 VkShaderStageFlags stageFlags, uint32_t offset,
189 uint32_t size, const void *pValues)
190 {
191 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
192
193 memcpy(cmdbuf->push_constants + offset, pValues, size);
194
195 if (stageFlags & VK_SHADER_STAGE_ALL_GRAPHICS) {
196 struct panvk_descriptor_state *desc_state =
197 panvk_cmd_get_desc_state(cmdbuf, GRAPHICS);
198
199 desc_state->ubos = 0;
200 desc_state->push_constants = 0;
201 }
202
203 if (stageFlags & VK_SHADER_STAGE_COMPUTE_BIT) {
204 struct panvk_descriptor_state *desc_state =
205 panvk_cmd_get_desc_state(cmdbuf, COMPUTE);
206
207 desc_state->ubos = 0;
208 desc_state->push_constants = 0;
209 }
210 }
211
212 void
panvk_CmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline _pipeline)213 panvk_CmdBindPipeline(VkCommandBuffer commandBuffer,
214 VkPipelineBindPoint pipelineBindPoint,
215 VkPipeline _pipeline)
216 {
217 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
218 VK_FROM_HANDLE(panvk_pipeline, pipeline, _pipeline);
219
220 cmdbuf->bind_points[pipelineBindPoint].pipeline = pipeline;
221 cmdbuf->state.fs_rsd = 0;
222
223 if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS) {
224 cmdbuf->state.varyings = pipeline->varyings;
225
226 if (!(pipeline->dynamic_state_mask &
227 BITFIELD_BIT(VK_DYNAMIC_STATE_VIEWPORT))) {
228 cmdbuf->state.viewport = pipeline->viewport;
229 cmdbuf->state.dirty |= PANVK_DYNAMIC_VIEWPORT;
230 }
231 if (!(pipeline->dynamic_state_mask &
232 BITFIELD_BIT(VK_DYNAMIC_STATE_SCISSOR))) {
233 cmdbuf->state.scissor = pipeline->scissor;
234 cmdbuf->state.dirty |= PANVK_DYNAMIC_SCISSOR;
235 }
236 }
237
238 /* Sysvals are passed through UBOs, we need dirty the UBO array if the
239 * pipeline contain shaders using sysvals.
240 */
241 cmdbuf->bind_points[pipelineBindPoint].desc_state.ubos = 0;
242 }
243
244 void
panvk_CmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)245 panvk_CmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
246 uint32_t viewportCount, const VkViewport *pViewports)
247 {
248 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
249 assert(viewportCount == 1);
250 assert(!firstViewport);
251
252 cmdbuf->state.viewport = pViewports[0];
253 cmdbuf->state.vpd = 0;
254 cmdbuf->state.dirty |= PANVK_DYNAMIC_VIEWPORT;
255 }
256
257 void
panvk_CmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)258 panvk_CmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
259 uint32_t scissorCount, const VkRect2D *pScissors)
260 {
261 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
262 assert(scissorCount == 1);
263 assert(!firstScissor);
264
265 cmdbuf->state.scissor = pScissors[0];
266 cmdbuf->state.vpd = 0;
267 cmdbuf->state.dirty |= PANVK_DYNAMIC_SCISSOR;
268 }
269
270 void
panvk_CmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)271 panvk_CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
272 {
273 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
274
275 cmdbuf->state.rast.line_width = lineWidth;
276 cmdbuf->state.dirty |= PANVK_DYNAMIC_LINE_WIDTH;
277 }
278
279 void
panvk_CmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)280 panvk_CmdSetDepthBias(VkCommandBuffer commandBuffer,
281 float depthBiasConstantFactor, float depthBiasClamp,
282 float depthBiasSlopeFactor)
283 {
284 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
285
286 cmdbuf->state.rast.depth_bias.constant_factor = depthBiasConstantFactor;
287 cmdbuf->state.rast.depth_bias.clamp = depthBiasClamp;
288 cmdbuf->state.rast.depth_bias.slope_factor = depthBiasSlopeFactor;
289 cmdbuf->state.dirty |= PANVK_DYNAMIC_DEPTH_BIAS;
290 cmdbuf->state.fs_rsd = 0;
291 }
292
293 void
panvk_CmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])294 panvk_CmdSetBlendConstants(VkCommandBuffer commandBuffer,
295 const float blendConstants[4])
296 {
297 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
298
299 for (unsigned i = 0; i < 4; i++)
300 cmdbuf->state.blend.constants[i] = CLAMP(blendConstants[i], 0.0f, 1.0f);
301
302 cmdbuf->state.dirty |= PANVK_DYNAMIC_BLEND_CONSTANTS;
303 cmdbuf->state.fs_rsd = 0;
304 }
305
306 void
panvk_CmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)307 panvk_CmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
308 float maxDepthBounds)
309 {
310 panvk_stub();
311 }
312
313 void
panvk_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)314 panvk_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
315 VkStencilFaceFlags faceMask,
316 uint32_t compareMask)
317 {
318 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
319
320 if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
321 cmdbuf->state.zs.s_front.compare_mask = compareMask;
322
323 if (faceMask & VK_STENCIL_FACE_BACK_BIT)
324 cmdbuf->state.zs.s_back.compare_mask = compareMask;
325
326 cmdbuf->state.dirty |= PANVK_DYNAMIC_STENCIL_COMPARE_MASK;
327 cmdbuf->state.fs_rsd = 0;
328 }
329
330 void
panvk_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)331 panvk_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
332 VkStencilFaceFlags faceMask, uint32_t writeMask)
333 {
334 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
335
336 if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
337 cmdbuf->state.zs.s_front.write_mask = writeMask;
338
339 if (faceMask & VK_STENCIL_FACE_BACK_BIT)
340 cmdbuf->state.zs.s_back.write_mask = writeMask;
341
342 cmdbuf->state.dirty |= PANVK_DYNAMIC_STENCIL_WRITE_MASK;
343 cmdbuf->state.fs_rsd = 0;
344 }
345
346 void
panvk_CmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)347 panvk_CmdSetStencilReference(VkCommandBuffer commandBuffer,
348 VkStencilFaceFlags faceMask, uint32_t reference)
349 {
350 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
351
352 if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
353 cmdbuf->state.zs.s_front.ref = reference;
354
355 if (faceMask & VK_STENCIL_FACE_BACK_BIT)
356 cmdbuf->state.zs.s_back.ref = reference;
357
358 cmdbuf->state.dirty |= PANVK_DYNAMIC_STENCIL_REFERENCE;
359 cmdbuf->state.fs_rsd = 0;
360 }
361
362 VkResult
panvk_CreateCommandPool(VkDevice _device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCmdPool)363 panvk_CreateCommandPool(VkDevice _device,
364 const VkCommandPoolCreateInfo *pCreateInfo,
365 const VkAllocationCallbacks *pAllocator,
366 VkCommandPool *pCmdPool)
367 {
368 VK_FROM_HANDLE(panvk_device, device, _device);
369 struct panvk_cmd_pool *pool;
370
371 pool = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*pool), 8,
372 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
373 if (pool == NULL)
374 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
375
376 VkResult result =
377 vk_command_pool_init(&device->vk, &pool->vk, pCreateInfo, pAllocator);
378 if (result != VK_SUCCESS) {
379 vk_free2(&device->vk.alloc, pAllocator, pool);
380 return result;
381 }
382
383 panvk_bo_pool_init(&pool->desc_bo_pool);
384 panvk_bo_pool_init(&pool->varying_bo_pool);
385 panvk_bo_pool_init(&pool->tls_bo_pool);
386 *pCmdPool = panvk_cmd_pool_to_handle(pool);
387 return VK_SUCCESS;
388 }
389
390 static void
panvk_cmd_prepare_clear_values(struct panvk_cmd_buffer * cmdbuf,const VkClearValue * in)391 panvk_cmd_prepare_clear_values(struct panvk_cmd_buffer *cmdbuf,
392 const VkClearValue *in)
393 {
394 for (unsigned i = 0; i < cmdbuf->state.pass->attachment_count; i++) {
395 const struct panvk_render_pass_attachment *attachment =
396 &cmdbuf->state.pass->attachments[i];
397 enum pipe_format fmt = attachment->format;
398
399 if (util_format_is_depth_or_stencil(fmt)) {
400 if (attachment->load_op == VK_ATTACHMENT_LOAD_OP_CLEAR ||
401 attachment->stencil_load_op == VK_ATTACHMENT_LOAD_OP_CLEAR) {
402 cmdbuf->state.clear[i].depth = in[i].depthStencil.depth;
403 cmdbuf->state.clear[i].stencil = in[i].depthStencil.stencil;
404 } else {
405 cmdbuf->state.clear[i].depth = 0;
406 cmdbuf->state.clear[i].stencil = 0;
407 }
408 } else {
409 if (attachment->load_op == VK_ATTACHMENT_LOAD_OP_CLEAR) {
410 union pipe_color_union *col =
411 (union pipe_color_union *)&in[i].color;
412 pan_pack_color(panfrost_blendable_formats_v7,
413 cmdbuf->state.clear[i].color, col, fmt, false);
414 } else {
415 memset(cmdbuf->state.clear[i].color, 0,
416 sizeof(cmdbuf->state.clear[0].color));
417 }
418 }
419 }
420 }
421
422 void
panvk_cmd_fb_info_set_subpass(struct panvk_cmd_buffer * cmdbuf)423 panvk_cmd_fb_info_set_subpass(struct panvk_cmd_buffer *cmdbuf)
424 {
425 const struct panvk_subpass *subpass = cmdbuf->state.subpass;
426 struct pan_fb_info *fbinfo = &cmdbuf->state.fb.info;
427 const struct panvk_framebuffer *fb = cmdbuf->state.framebuffer;
428 const struct panvk_clear_value *clears = cmdbuf->state.clear;
429 struct panvk_image_view *view;
430
431 fbinfo->nr_samples = 1;
432 fbinfo->rt_count = subpass->color_count;
433 memset(&fbinfo->bifrost.pre_post.dcds, 0,
434 sizeof(fbinfo->bifrost.pre_post.dcds));
435
436 for (unsigned cb = 0; cb < subpass->color_count; cb++) {
437 int idx = subpass->color_attachments[cb].idx;
438 view = idx != VK_ATTACHMENT_UNUSED ? fb->attachments[idx].iview : NULL;
439 if (!view)
440 continue;
441 fbinfo->rts[cb].view = &view->pview;
442 fbinfo->rts[cb].clear = subpass->color_attachments[cb].clear;
443 fbinfo->rts[cb].preload = subpass->color_attachments[cb].preload;
444 fbinfo->rts[cb].crc_valid = &cmdbuf->state.fb.crc_valid[cb];
445
446 memcpy(fbinfo->rts[cb].clear_value, clears[idx].color,
447 sizeof(fbinfo->rts[cb].clear_value));
448 fbinfo->nr_samples =
449 MAX2(fbinfo->nr_samples, pan_image_view_get_nr_samples(&view->pview));
450 }
451
452 if (subpass->zs_attachment.idx != VK_ATTACHMENT_UNUSED) {
453 view = fb->attachments[subpass->zs_attachment.idx].iview;
454 const struct util_format_description *fdesc =
455 util_format_description(view->pview.format);
456
457 fbinfo->nr_samples =
458 MAX2(fbinfo->nr_samples, pan_image_view_get_nr_samples(&view->pview));
459
460 if (util_format_has_depth(fdesc)) {
461 fbinfo->zs.clear.z = subpass->zs_attachment.clear;
462 fbinfo->zs.clear_value.depth =
463 clears[subpass->zs_attachment.idx].depth;
464 fbinfo->zs.view.zs = &view->pview;
465 }
466
467 if (util_format_has_stencil(fdesc)) {
468 fbinfo->zs.clear.s = subpass->zs_attachment.clear;
469 fbinfo->zs.clear_value.stencil =
470 clears[subpass->zs_attachment.idx].stencil;
471 if (!fbinfo->zs.view.zs)
472 fbinfo->zs.view.s = &view->pview;
473 }
474 }
475 }
476
477 void
panvk_cmd_fb_info_init(struct panvk_cmd_buffer * cmdbuf)478 panvk_cmd_fb_info_init(struct panvk_cmd_buffer *cmdbuf)
479 {
480 struct pan_fb_info *fbinfo = &cmdbuf->state.fb.info;
481 const struct panvk_framebuffer *fb = cmdbuf->state.framebuffer;
482
483 memset(cmdbuf->state.fb.crc_valid, 0, sizeof(cmdbuf->state.fb.crc_valid));
484
485 *fbinfo = (struct pan_fb_info){
486 .tile_buf_budget = panfrost_query_optimal_tib_size(
487 cmdbuf->device->physical_device->model),
488 .width = fb->width,
489 .height = fb->height,
490 .extent.maxx = fb->width - 1,
491 .extent.maxy = fb->height - 1,
492 };
493 }
494
495 void
panvk_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)496 panvk_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,
497 const VkRenderPassBeginInfo *pRenderPassBegin,
498 const VkSubpassBeginInfo *pSubpassBeginInfo)
499 {
500 VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, commandBuffer);
501 VK_FROM_HANDLE(panvk_render_pass, pass, pRenderPassBegin->renderPass);
502 VK_FROM_HANDLE(panvk_framebuffer, fb, pRenderPassBegin->framebuffer);
503
504 cmdbuf->state.pass = pass;
505 cmdbuf->state.subpass = pass->subpasses;
506 cmdbuf->state.framebuffer = fb;
507 cmdbuf->state.render_area = pRenderPassBegin->renderArea;
508 cmdbuf->state.batch =
509 vk_zalloc(&cmdbuf->vk.pool->alloc, sizeof(*cmdbuf->state.batch), 8,
510 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
511 util_dynarray_init(&cmdbuf->state.batch->jobs, NULL);
512 util_dynarray_init(&cmdbuf->state.batch->event_ops, NULL);
513 assert(pRenderPassBegin->clearValueCount <= pass->attachment_count);
514 cmdbuf->state.clear =
515 vk_zalloc(&cmdbuf->vk.pool->alloc,
516 sizeof(*cmdbuf->state.clear) * pass->attachment_count, 8,
517 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
518 panvk_cmd_prepare_clear_values(cmdbuf, pRenderPassBegin->pClearValues);
519 panvk_cmd_fb_info_init(cmdbuf);
520 panvk_cmd_fb_info_set_subpass(cmdbuf);
521 }
522
523 void
panvk_cmd_preload_fb_after_batch_split(struct panvk_cmd_buffer * cmdbuf)524 panvk_cmd_preload_fb_after_batch_split(struct panvk_cmd_buffer *cmdbuf)
525 {
526 for (unsigned i = 0; i < cmdbuf->state.fb.info.rt_count; i++) {
527 if (cmdbuf->state.fb.info.rts[i].view) {
528 cmdbuf->state.fb.info.rts[i].clear = false;
529 cmdbuf->state.fb.info.rts[i].preload = true;
530 }
531 }
532
533 if (cmdbuf->state.fb.info.zs.view.zs) {
534 cmdbuf->state.fb.info.zs.clear.z = false;
535 cmdbuf->state.fb.info.zs.preload.z = true;
536 }
537
538 if (cmdbuf->state.fb.info.zs.view.s ||
539 (cmdbuf->state.fb.info.zs.view.zs &&
540 util_format_is_depth_and_stencil(
541 cmdbuf->state.fb.info.zs.view.zs->format))) {
542 cmdbuf->state.fb.info.zs.clear.s = false;
543 cmdbuf->state.fb.info.zs.preload.s = true;
544 }
545 }
546
547 struct panvk_batch *
panvk_cmd_open_batch(struct panvk_cmd_buffer * cmdbuf)548 panvk_cmd_open_batch(struct panvk_cmd_buffer *cmdbuf)
549 {
550 assert(!cmdbuf->state.batch);
551 cmdbuf->state.batch =
552 vk_zalloc(&cmdbuf->vk.pool->alloc, sizeof(*cmdbuf->state.batch), 8,
553 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
554 assert(cmdbuf->state.batch);
555 return cmdbuf->state.batch;
556 }
557
558 void
panvk_CmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer _buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)559 panvk_CmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer _buffer,
560 VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
561 {
562 panvk_stub();
563 }
564
565 void
panvk_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer _buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)566 panvk_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer _buffer,
567 VkDeviceSize offset, uint32_t drawCount,
568 uint32_t stride)
569 {
570 panvk_stub();
571 }
572
573 void
panvk_CmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t base_x,uint32_t base_y,uint32_t base_z,uint32_t x,uint32_t y,uint32_t z)574 panvk_CmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t base_x,
575 uint32_t base_y, uint32_t base_z, uint32_t x, uint32_t y,
576 uint32_t z)
577 {
578 panvk_stub();
579 }
580
581 void
panvk_CmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer _buffer,VkDeviceSize offset)582 panvk_CmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer _buffer,
583 VkDeviceSize offset)
584 {
585 panvk_stub();
586 }
587