1 /*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11 #include "vn_command_buffer.h"
12
13 #include "venus-protocol/vn_protocol_driver_command_buffer.h"
14 #include "venus-protocol/vn_protocol_driver_command_pool.h"
15
16 #include "vn_descriptor_set.h"
17 #include "vn_device.h"
18 #include "vn_image.h"
19 #include "vn_query_pool.h"
20 #include "vn_render_pass.h"
21
22 static void
23 vn_cmd_submit(struct vn_command_buffer *cmd);
24
25 #define VN_CMD_ENQUEUE(cmd_name, commandBuffer, ...) \
26 do { \
27 struct vn_command_buffer *_cmd = \
28 vn_command_buffer_from_handle(commandBuffer); \
29 size_t _cmd_size = vn_sizeof_##cmd_name(commandBuffer, ##__VA_ARGS__); \
30 \
31 if (vn_cs_encoder_reserve(&_cmd->cs, _cmd_size)) \
32 vn_encode_##cmd_name(&_cmd->cs, 0, commandBuffer, ##__VA_ARGS__); \
33 else \
34 _cmd->state = VN_COMMAND_BUFFER_STATE_INVALID; \
35 \
36 if (VN_PERF(NO_CMD_BATCHING)) \
37 vn_cmd_submit(_cmd); \
38 } while (0)
39
40 static bool
vn_image_memory_barrier_has_present_src(const VkImageMemoryBarrier * img_barriers,uint32_t count)41 vn_image_memory_barrier_has_present_src(
42 const VkImageMemoryBarrier *img_barriers, uint32_t count)
43 {
44 for (uint32_t i = 0; i < count; i++) {
45 if (img_barriers[i].oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ||
46 img_barriers[i].newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
47 return true;
48 }
49 return false;
50 }
51
52 static bool
vn_dependency_info_has_present_src(uint32_t dep_count,const VkDependencyInfo * dep_infos)53 vn_dependency_info_has_present_src(uint32_t dep_count,
54 const VkDependencyInfo *dep_infos)
55 {
56 for (uint32_t i = 0; i < dep_count; i++) {
57 for (uint32_t j = 0; j < dep_infos[i].imageMemoryBarrierCount; j++) {
58 const VkImageMemoryBarrier2 *b =
59 &dep_infos[i].pImageMemoryBarriers[j];
60 if (b->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ||
61 b->newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
62 return true;
63 }
64 }
65 }
66
67 return false;
68 }
69
70 static inline VkImageMemoryBarrier *
vn_cmd_get_image_memory_barriers(struct vn_command_buffer * cmd,uint32_t count)71 vn_cmd_get_image_memory_barriers(struct vn_command_buffer *cmd,
72 uint32_t count)
73 {
74 return vn_cached_storage_get(&cmd->pool->storage,
75 count * sizeof(VkImageMemoryBarrier));
76 }
77
78 /* About VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, the spec says
79 *
80 * VK_IMAGE_LAYOUT_PRESENT_SRC_KHR must only be used for presenting a
81 * presentable image for display. A swapchain's image must be transitioned
82 * to this layout before calling vkQueuePresentKHR, and must be
83 * transitioned away from this layout after calling vkAcquireNextImageKHR.
84 *
85 * That allows us to treat the layout internally as
86 *
87 * - VK_IMAGE_LAYOUT_GENERAL
88 * - VK_QUEUE_FAMILY_FOREIGN_EXT has the ownership, if the image is not a
89 * prime blit source
90 *
91 * while staying performant.
92 *
93 * About queue family ownerships, the spec says
94 *
95 * A queue family can take ownership of an image subresource or buffer
96 * range of a resource created with VK_SHARING_MODE_EXCLUSIVE, without an
97 * ownership transfer, in the same way as for a resource that was just
98 * created; however, taking ownership in this way has the effect that the
99 * contents of the image subresource or buffer range are undefined.
100 *
101 * It is unclear if that is applicable to external resources, which supposedly
102 * have the same semantics
103 *
104 * Binding a resource to a memory object shared between multiple Vulkan
105 * instances or other APIs does not change the ownership of the underlying
106 * memory. The first entity to access the resource implicitly acquires
107 * ownership. Accessing a resource backed by memory that is owned by a
108 * particular instance or API has the same semantics as accessing a
109 * VK_SHARING_MODE_EXCLUSIVE resource[...]
110 *
111 * We should get the spec clarified, or get rid of this completely broken code
112 * (TODO).
113 *
114 * Assuming a queue family can acquire the ownership implicitly when the
115 * contents are not needed, we do not need to worry about
116 * VK_IMAGE_LAYOUT_UNDEFINED. We can use VK_IMAGE_LAYOUT_PRESENT_SRC_KHR as
117 * the sole signal to trigger queue family ownership transfers.
118 *
119 * When the image has VK_SHARING_MODE_CONCURRENT, we can, and are required to,
120 * use VK_QUEUE_FAMILY_IGNORED as the other queue family whether we are
121 * transitioning to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR.
122 *
123 * When the image has VK_SHARING_MODE_EXCLUSIVE, we have to work out who the
124 * other queue family is. It is easier when the barrier does not also define
125 * a queue family ownership transfer (i.e., srcQueueFamilyIndex equals to
126 * dstQueueFamilyIndex). The other queue family must be the queue family the
127 * command buffer was allocated for.
128 *
129 * When the barrier also defines a queue family ownership transfer, it is
130 * submitted both to the source queue family to release the ownership and to
131 * the destination queue family to acquire the ownership. Depending on
132 * whether the barrier transitions to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
133 * we are only interested in the ownership release or acquire respectively and
134 * should be careful to avoid double releases/acquires.
135 *
136 * I haven't followed all transition paths mentally to verify the correctness.
137 * I likely also violate some VUs or miss some cases below. They are
138 * hopefully fixable and are left as TODOs.
139 */
140 static void
vn_cmd_fix_image_memory_barrier(const struct vn_command_buffer * cmd,const VkImageMemoryBarrier * src_barrier,VkImageMemoryBarrier * out_barrier)141 vn_cmd_fix_image_memory_barrier(const struct vn_command_buffer *cmd,
142 const VkImageMemoryBarrier *src_barrier,
143 VkImageMemoryBarrier *out_barrier)
144 {
145 const struct vn_image *img = vn_image_from_handle(src_barrier->image);
146
147 *out_barrier = *src_barrier;
148
149 /* no fix needed */
150 if (out_barrier->oldLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
151 out_barrier->newLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
152 return;
153
154 assert(img->wsi.is_wsi);
155
156 if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
157 return;
158
159 /* prime blit src or no layout transition */
160 if (img->wsi.is_prime_blit_src ||
161 out_barrier->oldLayout == out_barrier->newLayout) {
162 if (out_barrier->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
163 out_barrier->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
164 if (out_barrier->newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
165 out_barrier->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
166 return;
167 }
168
169 if (out_barrier->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
170 out_barrier->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
171
172 /* no availability operation needed */
173 out_barrier->srcAccessMask = 0;
174
175 const uint32_t dst_qfi = out_barrier->dstQueueFamilyIndex;
176 if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
177 out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
178 out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
179 } else if (dst_qfi == out_barrier->srcQueueFamilyIndex ||
180 dst_qfi == cmd->pool->queue_family_index) {
181 out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
182 out_barrier->dstQueueFamilyIndex = cmd->pool->queue_family_index;
183 } else {
184 /* The barrier also defines a queue family ownership transfer, and
185 * this is the one that gets submitted to the source queue family to
186 * release the ownership. Skip both the transfer and the transition.
187 */
188 out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
189 out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
190 out_barrier->newLayout = out_barrier->oldLayout;
191 }
192 } else {
193 out_barrier->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
194
195 /* no visibility operation needed */
196 out_barrier->dstAccessMask = 0;
197
198 const uint32_t src_qfi = out_barrier->srcQueueFamilyIndex;
199 if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
200 out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
201 out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
202 } else if (src_qfi == out_barrier->dstQueueFamilyIndex ||
203 src_qfi == cmd->pool->queue_family_index) {
204 out_barrier->srcQueueFamilyIndex = cmd->pool->queue_family_index;
205 out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
206 } else {
207 /* The barrier also defines a queue family ownership transfer, and
208 * this is the one that gets submitted to the destination queue
209 * family to acquire the ownership. Skip both the transfer and the
210 * transition.
211 */
212 out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
213 out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
214 out_barrier->oldLayout = out_barrier->newLayout;
215 }
216 }
217 }
218
219 /** See vn_cmd_fix_image_memory_barrier(). */
220 static void
vn_cmd_fix_image_memory_barrier2(const struct vn_command_buffer * cmd,VkImageMemoryBarrier2 * b)221 vn_cmd_fix_image_memory_barrier2(const struct vn_command_buffer *cmd,
222 VkImageMemoryBarrier2 *b)
223 {
224 if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
225 return;
226
227 if (b->oldLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
228 b->newLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
229 return;
230
231 const struct vn_image *img = vn_image_from_handle(b->image);
232 assert(img->wsi.is_wsi);
233
234 if (img->wsi.is_prime_blit_src || b->oldLayout == b->newLayout) {
235 if (b->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
236 b->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
237 if (b->newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
238 b->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
239 return;
240 }
241
242 if (b->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
243 b->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
244
245 /* no availability operation needed */
246 b->srcStageMask = 0;
247 b->srcAccessMask = 0;
248
249 if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
250 b->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
251 b->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
252 } else if (b->dstQueueFamilyIndex == b->srcQueueFamilyIndex ||
253 b->dstQueueFamilyIndex == cmd->pool->queue_family_index) {
254 b->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
255 b->dstQueueFamilyIndex = cmd->pool->queue_family_index;
256 } else {
257 /* The barrier also defines a queue family ownership transfer, and
258 * this is the one that gets submitted to the source queue family to
259 * release the ownership. Skip both the transfer and the transition.
260 */
261 b->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
262 b->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
263 b->newLayout = b->oldLayout;
264 }
265 } else {
266 b->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
267
268 /* no visibility operation needed */
269 b->dstStageMask = 0;
270 b->dstAccessMask = 0;
271
272 if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
273 b->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
274 b->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
275 } else if (b->srcQueueFamilyIndex == b->dstQueueFamilyIndex ||
276 b->srcQueueFamilyIndex == cmd->pool->queue_family_index) {
277 b->srcQueueFamilyIndex = cmd->pool->queue_family_index;
278 b->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
279 } else {
280 /* The barrier also defines a queue family ownership transfer, and
281 * this is the one that gets submitted to the destination queue
282 * family to acquire the ownership. Skip both the transfer and the
283 * transition.
284 */
285 b->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
286 b->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
287 b->oldLayout = b->newLayout;
288 }
289 }
290 }
291
292 static const VkImageMemoryBarrier *
vn_cmd_wait_events_fix_image_memory_barriers(struct vn_command_buffer * cmd,const VkImageMemoryBarrier * src_barriers,uint32_t count,uint32_t * out_transfer_count)293 vn_cmd_wait_events_fix_image_memory_barriers(
294 struct vn_command_buffer *cmd,
295 const VkImageMemoryBarrier *src_barriers,
296 uint32_t count,
297 uint32_t *out_transfer_count)
298 {
299 *out_transfer_count = 0;
300
301 if (cmd->builder.in_render_pass ||
302 !vn_image_memory_barrier_has_present_src(src_barriers, count))
303 return src_barriers;
304
305 VkImageMemoryBarrier *img_barriers =
306 vn_cmd_get_image_memory_barriers(cmd, count * 2);
307 if (!img_barriers) {
308 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
309 return src_barriers;
310 }
311
312 /* vkCmdWaitEvents cannot be used for queue family ownership transfers.
313 * Nothing appears to be said about the submission order of image memory
314 * barriers in the same array. We take the liberty to move queue family
315 * ownership transfers to the tail.
316 */
317 VkImageMemoryBarrier *transfer_barriers = img_barriers + count;
318 uint32_t transfer_count = 0;
319 uint32_t valid_count = 0;
320 for (uint32_t i = 0; i < count; i++) {
321 VkImageMemoryBarrier *img_barrier = &img_barriers[valid_count];
322 vn_cmd_fix_image_memory_barrier(cmd, &src_barriers[i], img_barrier);
323
324 if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
325 valid_count++;
326 continue;
327 }
328
329 if (img_barrier->srcQueueFamilyIndex ==
330 img_barrier->dstQueueFamilyIndex) {
331 valid_count++;
332 } else {
333 transfer_barriers[transfer_count++] = *img_barrier;
334 }
335 }
336
337 assert(valid_count + transfer_count == count);
338 if (transfer_count) {
339 /* copy back to the tail */
340 memcpy(&img_barriers[valid_count], transfer_barriers,
341 sizeof(*transfer_barriers) * transfer_count);
342 *out_transfer_count = transfer_count;
343 }
344
345 return img_barriers;
346 }
347
348 static const VkImageMemoryBarrier *
vn_cmd_pipeline_barrier_fix_image_memory_barriers(struct vn_command_buffer * cmd,const VkImageMemoryBarrier * src_barriers,uint32_t count)349 vn_cmd_pipeline_barrier_fix_image_memory_barriers(
350 struct vn_command_buffer *cmd,
351 const VkImageMemoryBarrier *src_barriers,
352 uint32_t count)
353 {
354 if (cmd->builder.in_render_pass ||
355 !vn_image_memory_barrier_has_present_src(src_barriers, count))
356 return src_barriers;
357
358 VkImageMemoryBarrier *img_barriers =
359 vn_cmd_get_image_memory_barriers(cmd, count);
360 if (!img_barriers) {
361 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
362 return src_barriers;
363 }
364
365 for (uint32_t i = 0; i < count; i++) {
366 vn_cmd_fix_image_memory_barrier(cmd, &src_barriers[i],
367 &img_barriers[i]);
368 }
369
370 return img_barriers;
371 }
372
373 static const VkDependencyInfo *
vn_cmd_fix_dependency_infos(struct vn_command_buffer * cmd,uint32_t dep_count,const VkDependencyInfo * dep_infos)374 vn_cmd_fix_dependency_infos(struct vn_command_buffer *cmd,
375 uint32_t dep_count,
376 const VkDependencyInfo *dep_infos)
377 {
378 if (cmd->builder.in_render_pass ||
379 !vn_dependency_info_has_present_src(dep_count, dep_infos))
380 return dep_infos;
381
382 uint32_t total_barrier_count = 0;
383 for (uint32_t i = 0; i < dep_count; i++)
384 total_barrier_count += dep_infos[i].imageMemoryBarrierCount;
385
386 size_t tmp_size = dep_count * sizeof(VkDependencyInfo) +
387 total_barrier_count * sizeof(VkImageMemoryBarrier2);
388 void *tmp = vn_cached_storage_get(&cmd->pool->storage, tmp_size);
389 if (!tmp) {
390 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
391 return dep_infos;
392 }
393
394 VkDependencyInfo *new_dep_infos = tmp;
395 tmp += dep_count * sizeof(VkDependencyInfo);
396 memcpy(new_dep_infos, dep_infos, dep_count * sizeof(VkDependencyInfo));
397
398 for (uint32_t i = 0; i < dep_count; i++) {
399 uint32_t barrier_count = dep_infos[i].imageMemoryBarrierCount;
400
401 VkImageMemoryBarrier2 *new_barriers = tmp;
402 tmp += barrier_count * sizeof(VkImageMemoryBarrier2);
403
404 memcpy(new_barriers, dep_infos[i].pImageMemoryBarriers,
405 barrier_count * sizeof(VkImageMemoryBarrier2));
406 new_dep_infos[i].pImageMemoryBarriers = new_barriers;
407
408 for (uint32_t j = 0; j < barrier_count; j++) {
409 vn_cmd_fix_image_memory_barrier2(cmd, &new_barriers[j]);
410 }
411 }
412
413 return new_dep_infos;
414 }
415
416 static void
vn_cmd_encode_memory_barriers(struct vn_command_buffer * cmd,VkPipelineStageFlags src_stage_mask,VkPipelineStageFlags dst_stage_mask,uint32_t buf_barrier_count,const VkBufferMemoryBarrier * buf_barriers,uint32_t img_barrier_count,const VkImageMemoryBarrier * img_barriers)417 vn_cmd_encode_memory_barriers(struct vn_command_buffer *cmd,
418 VkPipelineStageFlags src_stage_mask,
419 VkPipelineStageFlags dst_stage_mask,
420 uint32_t buf_barrier_count,
421 const VkBufferMemoryBarrier *buf_barriers,
422 uint32_t img_barrier_count,
423 const VkImageMemoryBarrier *img_barriers)
424 {
425 const VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
426
427 VN_CMD_ENQUEUE(vkCmdPipelineBarrier, cmd_handle, src_stage_mask,
428 dst_stage_mask, 0, 0, NULL, buf_barrier_count, buf_barriers,
429 img_barrier_count, img_barriers);
430 }
431
432 static void
vn_present_src_attachment_to_image_memory_barrier(const struct vn_image * img,const struct vn_present_src_attachment * att,VkImageMemoryBarrier * img_barrier,bool acquire)433 vn_present_src_attachment_to_image_memory_barrier(
434 const struct vn_image *img,
435 const struct vn_present_src_attachment *att,
436 VkImageMemoryBarrier *img_barrier,
437 bool acquire)
438 {
439 *img_barrier = (VkImageMemoryBarrier)
440 {
441 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
442 .srcAccessMask = att->src_access_mask,
443 .dstAccessMask = att->dst_access_mask,
444 .oldLayout = acquire ? VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
445 : VN_PRESENT_SRC_INTERNAL_LAYOUT,
446 .newLayout = acquire ? VN_PRESENT_SRC_INTERNAL_LAYOUT
447 : VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
448 .image = vn_image_to_handle((struct vn_image *)img),
449 .subresourceRange = {
450 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
451 .levelCount = 1,
452 .layerCount = 1,
453 },
454 };
455 }
456
457 static void
vn_cmd_transfer_present_src_images(struct vn_command_buffer * cmd,bool acquire,const struct vn_image * const * images,const struct vn_present_src_attachment * atts,uint32_t count)458 vn_cmd_transfer_present_src_images(
459 struct vn_command_buffer *cmd,
460 bool acquire,
461 const struct vn_image *const *images,
462 const struct vn_present_src_attachment *atts,
463 uint32_t count)
464 {
465 VkImageMemoryBarrier *img_barriers =
466 vn_cmd_get_image_memory_barriers(cmd, count);
467 if (!img_barriers) {
468 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
469 return;
470 }
471
472 VkPipelineStageFlags src_stage_mask = 0;
473 VkPipelineStageFlags dst_stage_mask = 0;
474 for (uint32_t i = 0; i < count; i++) {
475 src_stage_mask |= atts[i].src_stage_mask;
476 dst_stage_mask |= atts[i].dst_stage_mask;
477
478 vn_present_src_attachment_to_image_memory_barrier(
479 images[i], &atts[i], &img_barriers[i], acquire);
480 vn_cmd_fix_image_memory_barrier(cmd, &img_barriers[i],
481 &img_barriers[i]);
482 }
483
484 if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
485 return;
486
487 vn_cmd_encode_memory_barriers(cmd, src_stage_mask, dst_stage_mask, 0, NULL,
488 count, img_barriers);
489 }
490
491 struct vn_feedback_query_batch *
vn_cmd_query_batch_alloc(struct vn_command_pool * pool,struct vn_query_pool * query_pool,uint32_t query,uint32_t query_count,bool copy)492 vn_cmd_query_batch_alloc(struct vn_command_pool *pool,
493 struct vn_query_pool *query_pool,
494 uint32_t query,
495 uint32_t query_count,
496 bool copy)
497 {
498 struct vn_feedback_query_batch *batch;
499 if (list_is_empty(&pool->free_query_batches)) {
500 batch = vk_alloc(&pool->allocator, sizeof(*batch), VN_DEFAULT_ALIGN,
501 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
502 if (!batch)
503 return NULL;
504 } else {
505 batch = list_first_entry(&pool->free_query_batches,
506 struct vn_feedback_query_batch, head);
507 list_del(&batch->head);
508 }
509
510 batch->query_pool = query_pool;
511 batch->query = query;
512 batch->query_count = query_count;
513 batch->copy = copy;
514
515 return batch;
516 }
517
518 static inline void
vn_cmd_merge_batched_query_feedback(struct vn_command_buffer * primary_cmd,struct vn_command_buffer * secondary_cmd)519 vn_cmd_merge_batched_query_feedback(struct vn_command_buffer *primary_cmd,
520 struct vn_command_buffer *secondary_cmd)
521 {
522 list_for_each_entry_safe(struct vn_feedback_query_batch, secondary_batch,
523 &secondary_cmd->builder.query_batches, head) {
524
525 struct vn_feedback_query_batch *batch = vn_cmd_query_batch_alloc(
526 primary_cmd->pool, secondary_batch->query_pool,
527 secondary_batch->query, secondary_batch->query_count,
528 secondary_batch->copy);
529
530 if (!batch) {
531 primary_cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
532 return;
533 }
534
535 list_addtail(&batch->head, &primary_cmd->builder.query_batches);
536 }
537 }
538
539 static void
vn_cmd_begin_render_pass(struct vn_command_buffer * cmd,const struct vn_render_pass * pass,const struct vn_framebuffer * fb,const VkRenderPassBeginInfo * begin_info)540 vn_cmd_begin_render_pass(struct vn_command_buffer *cmd,
541 const struct vn_render_pass *pass,
542 const struct vn_framebuffer *fb,
543 const VkRenderPassBeginInfo *begin_info)
544 {
545 assert(begin_info);
546 assert(cmd->level == VK_COMMAND_BUFFER_LEVEL_PRIMARY);
547
548 cmd->builder.render_pass = pass;
549 cmd->builder.in_render_pass = true;
550 cmd->builder.subpass_index = 0;
551 cmd->builder.view_mask = vn_render_pass_get_subpass_view_mask(pass, 0);
552
553 if (!pass->present_count)
554 return;
555
556 /* find fb attachments */
557 const VkImageView *views;
558 ASSERTED uint32_t view_count;
559 if (fb->image_view_count) {
560 views = fb->image_views;
561 view_count = fb->image_view_count;
562 } else {
563 const VkRenderPassAttachmentBeginInfo *imageless_info =
564 vk_find_struct_const(begin_info->pNext,
565 RENDER_PASS_ATTACHMENT_BEGIN_INFO);
566 assert(imageless_info);
567 views = imageless_info->pAttachments;
568 view_count = imageless_info->attachmentCount;
569 }
570
571 const struct vn_image **images =
572 vk_alloc(&cmd->pool->allocator, sizeof(*images) * pass->present_count,
573 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
574 if (!images) {
575 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
576 return;
577 }
578
579 for (uint32_t i = 0; i < pass->present_count; i++) {
580 const uint32_t index = pass->present_attachments[i].index;
581 assert(index < view_count);
582 images[i] = vn_image_view_from_handle(views[index])->image;
583 }
584
585 if (pass->present_acquire_count) {
586 vn_cmd_transfer_present_src_images(cmd, true, images,
587 pass->present_acquire_attachments,
588 pass->present_acquire_count);
589 }
590
591 cmd->builder.present_src_images = images;
592 }
593
594 static void
vn_cmd_end_render_pass(struct vn_command_buffer * cmd)595 vn_cmd_end_render_pass(struct vn_command_buffer *cmd)
596 {
597 const struct vn_render_pass *pass = cmd->builder.render_pass;
598 const struct vn_image **images = cmd->builder.present_src_images;
599
600 cmd->builder.render_pass = NULL;
601 cmd->builder.present_src_images = NULL;
602 cmd->builder.in_render_pass = false;
603 cmd->builder.subpass_index = 0;
604 cmd->builder.view_mask = 0;
605
606 if (!pass->present_count || !images)
607 return;
608
609 if (pass->present_release_count) {
610 vn_cmd_transfer_present_src_images(
611 cmd, false, images + pass->present_acquire_count,
612 pass->present_release_attachments, pass->present_release_count);
613 }
614
615 vk_free(&cmd->pool->allocator, images);
616 }
617
618 static inline void
vn_cmd_next_subpass(struct vn_command_buffer * cmd)619 vn_cmd_next_subpass(struct vn_command_buffer *cmd)
620 {
621 cmd->builder.view_mask = vn_render_pass_get_subpass_view_mask(
622 cmd->builder.render_pass, ++cmd->builder.subpass_index);
623 }
624
625 static inline void
vn_cmd_begin_rendering(struct vn_command_buffer * cmd,const VkRenderingInfo * rendering_info)626 vn_cmd_begin_rendering(struct vn_command_buffer *cmd,
627 const VkRenderingInfo *rendering_info)
628 {
629 cmd->builder.in_render_pass = true;
630 cmd->builder.view_mask = rendering_info->viewMask;
631 }
632
633 static inline void
vn_cmd_end_rendering(struct vn_command_buffer * cmd)634 vn_cmd_end_rendering(struct vn_command_buffer *cmd)
635 {
636 cmd->builder.in_render_pass = false;
637 cmd->builder.view_mask = 0;
638 }
639
640 /* command pool commands */
641
642 VkResult
vn_CreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)643 vn_CreateCommandPool(VkDevice device,
644 const VkCommandPoolCreateInfo *pCreateInfo,
645 const VkAllocationCallbacks *pAllocator,
646 VkCommandPool *pCommandPool)
647 {
648 VN_TRACE_FUNC();
649 struct vn_device *dev = vn_device_from_handle(device);
650 const VkAllocationCallbacks *alloc =
651 pAllocator ? pAllocator : &dev->base.base.alloc;
652
653 struct vn_command_pool *pool =
654 vk_zalloc(alloc, sizeof(*pool), VN_DEFAULT_ALIGN,
655 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
656 if (!pool)
657 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
658
659 vn_object_base_init(&pool->base, VK_OBJECT_TYPE_COMMAND_POOL, &dev->base);
660
661 pool->allocator = *alloc;
662 pool->device = dev;
663 pool->queue_family_index = pCreateInfo->queueFamilyIndex;
664 list_inithead(&pool->command_buffers);
665 list_inithead(&pool->free_query_batches);
666
667 vn_cached_storage_init(&pool->storage, alloc);
668
669 VkCommandPool pool_handle = vn_command_pool_to_handle(pool);
670 vn_async_vkCreateCommandPool(dev->primary_ring, device, pCreateInfo, NULL,
671 &pool_handle);
672
673 vn_tls_set_async_pipeline_create();
674
675 *pCommandPool = pool_handle;
676
677 return VK_SUCCESS;
678 }
679
680 void
vn_DestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)681 vn_DestroyCommandPool(VkDevice device,
682 VkCommandPool commandPool,
683 const VkAllocationCallbacks *pAllocator)
684 {
685 VN_TRACE_FUNC();
686 struct vn_device *dev = vn_device_from_handle(device);
687 struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
688 const VkAllocationCallbacks *alloc;
689
690 if (!pool)
691 return;
692
693 alloc = pAllocator ? pAllocator : &pool->allocator;
694
695 vn_async_vkDestroyCommandPool(dev->primary_ring, device, commandPool,
696 NULL);
697
698 list_for_each_entry_safe(struct vn_command_buffer, cmd,
699 &pool->command_buffers, head) {
700 vn_cs_encoder_fini(&cmd->cs);
701 vn_object_base_fini(&cmd->base);
702
703 if (cmd->builder.present_src_images)
704 vk_free(alloc, cmd->builder.present_src_images);
705
706 list_for_each_entry_safe(struct vn_feedback_query_batch, batch,
707 &cmd->builder.query_batches, head)
708 vk_free(alloc, batch);
709
710 if (cmd->linked_qfb_cmd) {
711 vn_feedback_query_cmd_free(cmd->linked_qfb_cmd);
712 cmd->linked_qfb_cmd = NULL;
713 }
714
715 vk_free(alloc, cmd);
716 }
717
718 list_for_each_entry_safe(struct vn_feedback_query_batch, batch,
719 &pool->free_query_batches, head)
720 vk_free(alloc, batch);
721
722 vn_cached_storage_fini(&pool->storage);
723
724 vn_object_base_fini(&pool->base);
725 vk_free(alloc, pool);
726 }
727
728 static void
vn_cmd_reset(struct vn_command_buffer * cmd)729 vn_cmd_reset(struct vn_command_buffer *cmd)
730 {
731 vn_cs_encoder_reset(&cmd->cs);
732
733 cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
734 cmd->draw_cmd_batched = 0;
735
736 if (cmd->builder.present_src_images)
737 vk_free(&cmd->pool->allocator, cmd->builder.present_src_images);
738
739 list_for_each_entry_safe(struct vn_feedback_query_batch, batch,
740 &cmd->builder.query_batches, head)
741 list_move_to(&batch->head, &cmd->pool->free_query_batches);
742
743 if (cmd->linked_qfb_cmd) {
744 vn_feedback_query_cmd_free(cmd->linked_qfb_cmd);
745 cmd->linked_qfb_cmd = NULL;
746 }
747
748 memset(&cmd->builder, 0, sizeof(cmd->builder));
749
750 list_inithead(&cmd->builder.query_batches);
751 }
752
753 VkResult
vn_ResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)754 vn_ResetCommandPool(VkDevice device,
755 VkCommandPool commandPool,
756 VkCommandPoolResetFlags flags)
757 {
758 VN_TRACE_FUNC();
759 struct vn_device *dev = vn_device_from_handle(device);
760 struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
761
762 list_for_each_entry_safe(struct vn_command_buffer, cmd,
763 &pool->command_buffers, head)
764 vn_cmd_reset(cmd);
765
766 if (flags & VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT) {
767 list_for_each_entry_safe(struct vn_feedback_query_batch, batch,
768 &pool->free_query_batches, head)
769 vk_free(&pool->allocator, batch);
770
771 vn_cached_storage_fini(&pool->storage);
772 vn_cached_storage_init(&pool->storage, &pool->allocator);
773 }
774
775 vn_async_vkResetCommandPool(dev->primary_ring, device, commandPool, flags);
776
777 return VK_SUCCESS;
778 }
779
780 void
vn_TrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)781 vn_TrimCommandPool(VkDevice device,
782 VkCommandPool commandPool,
783 VkCommandPoolTrimFlags flags)
784 {
785 VN_TRACE_FUNC();
786 struct vn_device *dev = vn_device_from_handle(device);
787
788 vn_async_vkTrimCommandPool(dev->primary_ring, device, commandPool, flags);
789 }
790
791 /* command buffer commands */
792
793 VkResult
vn_AllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)794 vn_AllocateCommandBuffers(VkDevice device,
795 const VkCommandBufferAllocateInfo *pAllocateInfo,
796 VkCommandBuffer *pCommandBuffers)
797 {
798 VN_TRACE_FUNC();
799 struct vn_device *dev = vn_device_from_handle(device);
800 struct vn_command_pool *pool =
801 vn_command_pool_from_handle(pAllocateInfo->commandPool);
802 const VkAllocationCallbacks *alloc = &pool->allocator;
803
804 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
805 struct vn_command_buffer *cmd =
806 vk_zalloc(alloc, sizeof(*cmd), VN_DEFAULT_ALIGN,
807 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
808 if (!cmd) {
809 for (uint32_t j = 0; j < i; j++) {
810 cmd = vn_command_buffer_from_handle(pCommandBuffers[j]);
811 vn_cs_encoder_fini(&cmd->cs);
812 list_del(&cmd->head);
813 vn_object_base_fini(&cmd->base);
814 vk_free(alloc, cmd);
815 }
816 memset(pCommandBuffers, 0,
817 sizeof(*pCommandBuffers) * pAllocateInfo->commandBufferCount);
818 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
819 }
820
821 vn_object_base_init(&cmd->base, VK_OBJECT_TYPE_COMMAND_BUFFER,
822 &dev->base);
823 cmd->pool = pool;
824 cmd->level = pAllocateInfo->level;
825 cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
826 vn_cs_encoder_init(&cmd->cs, dev->instance,
827 VN_CS_ENCODER_STORAGE_SHMEM_POOL, 16 * 1024);
828
829 list_inithead(&cmd->builder.query_batches);
830
831 list_addtail(&cmd->head, &pool->command_buffers);
832
833 VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
834 pCommandBuffers[i] = cmd_handle;
835 }
836
837 vn_async_vkAllocateCommandBuffers(dev->primary_ring, device, pAllocateInfo,
838 pCommandBuffers);
839
840 return VK_SUCCESS;
841 }
842
843 void
vn_FreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)844 vn_FreeCommandBuffers(VkDevice device,
845 VkCommandPool commandPool,
846 uint32_t commandBufferCount,
847 const VkCommandBuffer *pCommandBuffers)
848 {
849 VN_TRACE_FUNC();
850 struct vn_device *dev = vn_device_from_handle(device);
851 struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
852 const VkAllocationCallbacks *alloc = &pool->allocator;
853
854 vn_async_vkFreeCommandBuffers(dev->primary_ring, device, commandPool,
855 commandBufferCount, pCommandBuffers);
856
857 for (uint32_t i = 0; i < commandBufferCount; i++) {
858 struct vn_command_buffer *cmd =
859 vn_command_buffer_from_handle(pCommandBuffers[i]);
860
861 if (!cmd)
862 continue;
863
864 vn_cs_encoder_fini(&cmd->cs);
865 list_del(&cmd->head);
866
867 if (cmd->builder.present_src_images)
868 vk_free(alloc, cmd->builder.present_src_images);
869
870 list_for_each_entry_safe(struct vn_feedback_query_batch, batch,
871 &cmd->builder.query_batches, head)
872 list_move_to(&batch->head, &cmd->pool->free_query_batches);
873
874 if (cmd->linked_qfb_cmd) {
875 vn_feedback_query_cmd_free(cmd->linked_qfb_cmd);
876 cmd->linked_qfb_cmd = NULL;
877 }
878
879 vn_object_base_fini(&cmd->base);
880 vk_free(alloc, cmd);
881 }
882 }
883
884 VkResult
vn_ResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)885 vn_ResetCommandBuffer(VkCommandBuffer commandBuffer,
886 VkCommandBufferResetFlags flags)
887 {
888 VN_TRACE_FUNC();
889 struct vn_command_buffer *cmd =
890 vn_command_buffer_from_handle(commandBuffer);
891 struct vn_ring *ring = cmd->pool->device->primary_ring;
892
893 vn_cmd_reset(cmd);
894
895 vn_async_vkResetCommandBuffer(ring, commandBuffer, flags);
896
897 return VK_SUCCESS;
898 }
899
900 struct vn_command_buffer_begin_info {
901 VkCommandBufferBeginInfo begin;
902 VkCommandBufferInheritanceInfo inheritance;
903 VkCommandBufferInheritanceConditionalRenderingInfoEXT conditional_rendering;
904
905 bool has_inherited_pass;
906 bool in_render_pass;
907 };
908
909 static const VkCommandBufferBeginInfo *
vn_fix_command_buffer_begin_info(struct vn_command_buffer * cmd,const VkCommandBufferBeginInfo * begin_info,struct vn_command_buffer_begin_info * local)910 vn_fix_command_buffer_begin_info(struct vn_command_buffer *cmd,
911 const VkCommandBufferBeginInfo *begin_info,
912 struct vn_command_buffer_begin_info *local)
913 {
914 local->has_inherited_pass = false;
915
916 if (!begin_info->pInheritanceInfo)
917 return begin_info;
918
919 const bool is_cmd_secondary =
920 cmd->level == VK_COMMAND_BUFFER_LEVEL_SECONDARY;
921 const bool has_continue =
922 begin_info->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
923 const bool has_renderpass =
924 is_cmd_secondary &&
925 begin_info->pInheritanceInfo->renderPass != VK_NULL_HANDLE;
926
927 /* Per spec 1.3.255: "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT
928 * specifies that a secondary command buffer is considered to be
929 * entirely inside a render pass. If this is a primary command buffer,
930 * then this bit is ignored."
931 */
932 local->in_render_pass = has_continue && is_cmd_secondary;
933
934 /* Can early-return if dynamic rendering is used and no structures need to
935 * be dropped from the pNext chain of VkCommandBufferInheritanceInfo.
936 */
937 if (is_cmd_secondary && has_continue && !has_renderpass)
938 return begin_info;
939
940 local->begin = *begin_info;
941
942 if (!is_cmd_secondary) {
943 local->begin.pInheritanceInfo = NULL;
944 return &local->begin;
945 }
946
947 local->inheritance = *begin_info->pInheritanceInfo;
948 local->begin.pInheritanceInfo = &local->inheritance;
949
950 if (!has_continue) {
951 local->inheritance.framebuffer = VK_NULL_HANDLE;
952 local->inheritance.renderPass = VK_NULL_HANDLE;
953 local->inheritance.subpass = 0;
954 } else {
955 /* With early-returns above, it must be an inherited pass. */
956 local->has_inherited_pass = true;
957 }
958
959 /* Per spec, about VkCommandBufferInheritanceRenderingInfo:
960 *
961 * If VkCommandBufferInheritanceInfo::renderPass is not VK_NULL_HANDLE, or
962 * VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT is not specified in
963 * VkCommandBufferBeginInfo::flags, parameters of this structure are
964 * ignored.
965 */
966 VkBaseOutStructure *head = NULL;
967 VkBaseOutStructure *tail = NULL;
968 vk_foreach_struct_const(src, local->inheritance.pNext) {
969 void *pnext = NULL;
970 switch (src->sType) {
971 case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
972 memcpy(
973 &local->conditional_rendering, src,
974 sizeof(VkCommandBufferInheritanceConditionalRenderingInfoEXT));
975 pnext = &local->conditional_rendering;
976 break;
977 case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO:
978 default:
979 break;
980 }
981
982 if (pnext) {
983 if (!head)
984 head = pnext;
985 else
986 tail->pNext = pnext;
987
988 tail = pnext;
989 }
990 }
991 local->inheritance.pNext = head;
992
993 return &local->begin;
994 }
995
996 VkResult
vn_BeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)997 vn_BeginCommandBuffer(VkCommandBuffer commandBuffer,
998 const VkCommandBufferBeginInfo *pBeginInfo)
999 {
1000 VN_TRACE_FUNC();
1001 struct vn_command_buffer *cmd =
1002 vn_command_buffer_from_handle(commandBuffer);
1003 struct vn_instance *instance = cmd->pool->device->instance;
1004 size_t cmd_size;
1005
1006 /* reset regardless of VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT */
1007 vn_cmd_reset(cmd);
1008
1009 struct vn_command_buffer_begin_info local_begin_info;
1010 pBeginInfo =
1011 vn_fix_command_buffer_begin_info(cmd, pBeginInfo, &local_begin_info);
1012
1013 cmd_size = vn_sizeof_vkBeginCommandBuffer(commandBuffer, pBeginInfo);
1014 if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
1015 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1016 return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1017 }
1018 cmd->builder.is_simultaneous =
1019 pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
1020
1021 vn_encode_vkBeginCommandBuffer(&cmd->cs, 0, commandBuffer, pBeginInfo);
1022
1023 cmd->state = VN_COMMAND_BUFFER_STATE_RECORDING;
1024
1025 const VkCommandBufferInheritanceInfo *inheritance_info =
1026 pBeginInfo->pInheritanceInfo;
1027
1028 if (inheritance_info) {
1029 cmd->builder.in_render_pass = local_begin_info.in_render_pass;
1030
1031 if (local_begin_info.has_inherited_pass) {
1032 /* Store the viewMask from the inherited render pass subpass for
1033 * query feedback.
1034 */
1035 cmd->builder.view_mask = vn_render_pass_get_subpass_view_mask(
1036 vn_render_pass_from_handle(inheritance_info->renderPass),
1037 inheritance_info->subpass);
1038 } else {
1039 /* Store the viewMask from the
1040 * VkCommandBufferInheritanceRenderingInfo.
1041 */
1042 const VkCommandBufferInheritanceRenderingInfo
1043 *inheritance_rendering_info = vk_find_struct_const(
1044 inheritance_info->pNext,
1045 COMMAND_BUFFER_INHERITANCE_RENDERING_INFO);
1046 if (inheritance_rendering_info)
1047 cmd->builder.view_mask = inheritance_rendering_info->viewMask;
1048 }
1049 }
1050
1051 return VK_SUCCESS;
1052 }
1053
1054 static void
vn_cmd_submit(struct vn_command_buffer * cmd)1055 vn_cmd_submit(struct vn_command_buffer *cmd)
1056 {
1057 struct vn_ring *ring = cmd->pool->device->primary_ring;
1058
1059 if (cmd->state != VN_COMMAND_BUFFER_STATE_RECORDING)
1060 return;
1061
1062 vn_cs_encoder_commit(&cmd->cs);
1063 if (vn_cs_encoder_get_fatal(&cmd->cs)) {
1064 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1065 vn_cs_encoder_reset(&cmd->cs);
1066 return;
1067 }
1068
1069 if (vn_ring_submit_command_simple(ring, &cmd->cs) != VK_SUCCESS) {
1070 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1071 return;
1072 }
1073
1074 vn_cs_encoder_reset(&cmd->cs);
1075 cmd->draw_cmd_batched = 0;
1076 }
1077
1078 static inline void
vn_cmd_count_draw_and_submit_on_batch_limit(struct vn_command_buffer * cmd)1079 vn_cmd_count_draw_and_submit_on_batch_limit(struct vn_command_buffer *cmd)
1080 {
1081 if (++cmd->draw_cmd_batched >= vn_env.draw_cmd_batch_limit)
1082 vn_cmd_submit(cmd);
1083 }
1084
1085 VkResult
vn_EndCommandBuffer(VkCommandBuffer commandBuffer)1086 vn_EndCommandBuffer(VkCommandBuffer commandBuffer)
1087 {
1088 VN_TRACE_FUNC();
1089 struct vn_command_buffer *cmd =
1090 vn_command_buffer_from_handle(commandBuffer);
1091 struct vn_instance *instance = cmd->pool->device->instance;
1092 size_t cmd_size;
1093
1094 if (cmd->state != VN_COMMAND_BUFFER_STATE_RECORDING)
1095 return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1096
1097 cmd_size = vn_sizeof_vkEndCommandBuffer(commandBuffer);
1098 if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
1099 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1100 return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1101 }
1102
1103 vn_encode_vkEndCommandBuffer(&cmd->cs, 0, commandBuffer);
1104
1105 vn_cmd_submit(cmd);
1106 if (cmd->state == VN_COMMAND_BUFFER_STATE_INVALID)
1107 return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1108
1109 cmd->state = VN_COMMAND_BUFFER_STATE_EXECUTABLE;
1110
1111 return VK_SUCCESS;
1112 }
1113
1114 void
vn_CmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)1115 vn_CmdBindPipeline(VkCommandBuffer commandBuffer,
1116 VkPipelineBindPoint pipelineBindPoint,
1117 VkPipeline pipeline)
1118 {
1119 VN_CMD_ENQUEUE(vkCmdBindPipeline, commandBuffer, pipelineBindPoint,
1120 pipeline);
1121 }
1122
1123 void
vn_CmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)1124 vn_CmdSetViewport(VkCommandBuffer commandBuffer,
1125 uint32_t firstViewport,
1126 uint32_t viewportCount,
1127 const VkViewport *pViewports)
1128 {
1129 VN_CMD_ENQUEUE(vkCmdSetViewport, commandBuffer, firstViewport,
1130 viewportCount, pViewports);
1131 }
1132
1133 void
vn_CmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)1134 vn_CmdSetScissor(VkCommandBuffer commandBuffer,
1135 uint32_t firstScissor,
1136 uint32_t scissorCount,
1137 const VkRect2D *pScissors)
1138 {
1139 VN_CMD_ENQUEUE(vkCmdSetScissor, commandBuffer, firstScissor, scissorCount,
1140 pScissors);
1141 }
1142
1143 void
vn_CmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)1144 vn_CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
1145 {
1146 VN_CMD_ENQUEUE(vkCmdSetLineWidth, commandBuffer, lineWidth);
1147 }
1148
1149 void
vn_CmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)1150 vn_CmdSetDepthBias(VkCommandBuffer commandBuffer,
1151 float depthBiasConstantFactor,
1152 float depthBiasClamp,
1153 float depthBiasSlopeFactor)
1154 {
1155 VN_CMD_ENQUEUE(vkCmdSetDepthBias, commandBuffer, depthBiasConstantFactor,
1156 depthBiasClamp, depthBiasSlopeFactor);
1157 }
1158
1159 void
vn_CmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])1160 vn_CmdSetBlendConstants(VkCommandBuffer commandBuffer,
1161 const float blendConstants[4])
1162 {
1163 VN_CMD_ENQUEUE(vkCmdSetBlendConstants, commandBuffer, blendConstants);
1164 }
1165
1166 void
vn_CmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)1167 vn_CmdSetDepthBounds(VkCommandBuffer commandBuffer,
1168 float minDepthBounds,
1169 float maxDepthBounds)
1170 {
1171 VN_CMD_ENQUEUE(vkCmdSetDepthBounds, commandBuffer, minDepthBounds,
1172 maxDepthBounds);
1173 }
1174
1175 void
vn_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)1176 vn_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
1177 VkStencilFaceFlags faceMask,
1178 uint32_t compareMask)
1179 {
1180 VN_CMD_ENQUEUE(vkCmdSetStencilCompareMask, commandBuffer, faceMask,
1181 compareMask);
1182 }
1183
1184 void
vn_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)1185 vn_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
1186 VkStencilFaceFlags faceMask,
1187 uint32_t writeMask)
1188 {
1189 VN_CMD_ENQUEUE(vkCmdSetStencilWriteMask, commandBuffer, faceMask,
1190 writeMask);
1191 }
1192
1193 void
vn_CmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)1194 vn_CmdSetStencilReference(VkCommandBuffer commandBuffer,
1195 VkStencilFaceFlags faceMask,
1196 uint32_t reference)
1197 {
1198 VN_CMD_ENQUEUE(vkCmdSetStencilReference, commandBuffer, faceMask,
1199 reference);
1200 }
1201
1202 void
vn_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)1203 vn_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
1204 VkPipelineBindPoint pipelineBindPoint,
1205 VkPipelineLayout layout,
1206 uint32_t firstSet,
1207 uint32_t descriptorSetCount,
1208 const VkDescriptorSet *pDescriptorSets,
1209 uint32_t dynamicOffsetCount,
1210 const uint32_t *pDynamicOffsets)
1211 {
1212 VN_CMD_ENQUEUE(vkCmdBindDescriptorSets, commandBuffer, pipelineBindPoint,
1213 layout, firstSet, descriptorSetCount, pDescriptorSets,
1214 dynamicOffsetCount, pDynamicOffsets);
1215 }
1216
1217 void
vn_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)1218 vn_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,
1219 VkBuffer buffer,
1220 VkDeviceSize offset,
1221 VkIndexType indexType)
1222 {
1223 VN_CMD_ENQUEUE(vkCmdBindIndexBuffer, commandBuffer, buffer, offset,
1224 indexType);
1225 }
1226
1227 void
vn_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)1228 vn_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,
1229 uint32_t firstBinding,
1230 uint32_t bindingCount,
1231 const VkBuffer *pBuffers,
1232 const VkDeviceSize *pOffsets)
1233 {
1234 VN_CMD_ENQUEUE(vkCmdBindVertexBuffers, commandBuffer, firstBinding,
1235 bindingCount, pBuffers, pOffsets);
1236 }
1237
1238 void
vn_CmdDraw(VkCommandBuffer commandBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)1239 vn_CmdDraw(VkCommandBuffer commandBuffer,
1240 uint32_t vertexCount,
1241 uint32_t instanceCount,
1242 uint32_t firstVertex,
1243 uint32_t firstInstance)
1244 {
1245 VN_CMD_ENQUEUE(vkCmdDraw, commandBuffer, vertexCount, instanceCount,
1246 firstVertex, firstInstance);
1247
1248 vn_cmd_count_draw_and_submit_on_batch_limit(
1249 vn_command_buffer_from_handle(commandBuffer));
1250 }
1251
1252 void
vn_CmdBeginRendering(VkCommandBuffer commandBuffer,const VkRenderingInfo * pRenderingInfo)1253 vn_CmdBeginRendering(VkCommandBuffer commandBuffer,
1254 const VkRenderingInfo *pRenderingInfo)
1255 {
1256 vn_cmd_begin_rendering(vn_command_buffer_from_handle(commandBuffer),
1257 pRenderingInfo);
1258
1259 VN_CMD_ENQUEUE(vkCmdBeginRendering, commandBuffer, pRenderingInfo);
1260 }
1261
1262 void
vn_CmdEndRendering(VkCommandBuffer commandBuffer)1263 vn_CmdEndRendering(VkCommandBuffer commandBuffer)
1264 {
1265 VN_CMD_ENQUEUE(vkCmdEndRendering, commandBuffer);
1266
1267 vn_cmd_end_rendering(vn_command_buffer_from_handle(commandBuffer));
1268 }
1269
1270 void
vn_CmdDrawIndexed(VkCommandBuffer commandBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)1271 vn_CmdDrawIndexed(VkCommandBuffer commandBuffer,
1272 uint32_t indexCount,
1273 uint32_t instanceCount,
1274 uint32_t firstIndex,
1275 int32_t vertexOffset,
1276 uint32_t firstInstance)
1277 {
1278 VN_CMD_ENQUEUE(vkCmdDrawIndexed, commandBuffer, indexCount, instanceCount,
1279 firstIndex, vertexOffset, firstInstance);
1280
1281 vn_cmd_count_draw_and_submit_on_batch_limit(
1282 vn_command_buffer_from_handle(commandBuffer));
1283 }
1284
1285 void
vn_CmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)1286 vn_CmdDrawIndirect(VkCommandBuffer commandBuffer,
1287 VkBuffer buffer,
1288 VkDeviceSize offset,
1289 uint32_t drawCount,
1290 uint32_t stride)
1291 {
1292 VN_CMD_ENQUEUE(vkCmdDrawIndirect, commandBuffer, buffer, offset, drawCount,
1293 stride);
1294
1295 vn_cmd_count_draw_and_submit_on_batch_limit(
1296 vn_command_buffer_from_handle(commandBuffer));
1297 }
1298
1299 void
vn_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)1300 vn_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,
1301 VkBuffer buffer,
1302 VkDeviceSize offset,
1303 uint32_t drawCount,
1304 uint32_t stride)
1305 {
1306 VN_CMD_ENQUEUE(vkCmdDrawIndexedIndirect, commandBuffer, buffer, offset,
1307 drawCount, stride);
1308
1309 vn_cmd_count_draw_and_submit_on_batch_limit(
1310 vn_command_buffer_from_handle(commandBuffer));
1311 }
1312
1313 void
vn_CmdDrawIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)1314 vn_CmdDrawIndirectCount(VkCommandBuffer commandBuffer,
1315 VkBuffer buffer,
1316 VkDeviceSize offset,
1317 VkBuffer countBuffer,
1318 VkDeviceSize countBufferOffset,
1319 uint32_t maxDrawCount,
1320 uint32_t stride)
1321 {
1322 VN_CMD_ENQUEUE(vkCmdDrawIndirectCount, commandBuffer, buffer, offset,
1323 countBuffer, countBufferOffset, maxDrawCount, stride);
1324
1325 vn_cmd_count_draw_and_submit_on_batch_limit(
1326 vn_command_buffer_from_handle(commandBuffer));
1327 }
1328
1329 void
vn_CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)1330 vn_CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,
1331 VkBuffer buffer,
1332 VkDeviceSize offset,
1333 VkBuffer countBuffer,
1334 VkDeviceSize countBufferOffset,
1335 uint32_t maxDrawCount,
1336 uint32_t stride)
1337 {
1338 VN_CMD_ENQUEUE(vkCmdDrawIndexedIndirectCount, commandBuffer, buffer,
1339 offset, countBuffer, countBufferOffset, maxDrawCount,
1340 stride);
1341
1342 vn_cmd_count_draw_and_submit_on_batch_limit(
1343 vn_command_buffer_from_handle(commandBuffer));
1344 }
1345
1346 void
vn_CmdDispatch(VkCommandBuffer commandBuffer,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)1347 vn_CmdDispatch(VkCommandBuffer commandBuffer,
1348 uint32_t groupCountX,
1349 uint32_t groupCountY,
1350 uint32_t groupCountZ)
1351 {
1352 VN_CMD_ENQUEUE(vkCmdDispatch, commandBuffer, groupCountX, groupCountY,
1353 groupCountZ);
1354 }
1355
1356 void
vn_CmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset)1357 vn_CmdDispatchIndirect(VkCommandBuffer commandBuffer,
1358 VkBuffer buffer,
1359 VkDeviceSize offset)
1360 {
1361 VN_CMD_ENQUEUE(vkCmdDispatchIndirect, commandBuffer, buffer, offset);
1362 }
1363
1364 void
vn_CmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)1365 vn_CmdCopyBuffer(VkCommandBuffer commandBuffer,
1366 VkBuffer srcBuffer,
1367 VkBuffer dstBuffer,
1368 uint32_t regionCount,
1369 const VkBufferCopy *pRegions)
1370 {
1371 VN_CMD_ENQUEUE(vkCmdCopyBuffer, commandBuffer, srcBuffer, dstBuffer,
1372 regionCount, pRegions);
1373 }
1374
1375 void
vn_CmdCopyBuffer2(VkCommandBuffer commandBuffer,const VkCopyBufferInfo2 * pCopyBufferInfo)1376 vn_CmdCopyBuffer2(VkCommandBuffer commandBuffer,
1377 const VkCopyBufferInfo2 *pCopyBufferInfo)
1378 {
1379 VN_CMD_ENQUEUE(vkCmdCopyBuffer2, commandBuffer, pCopyBufferInfo);
1380 }
1381
1382 void
vn_CmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)1383 vn_CmdCopyImage(VkCommandBuffer commandBuffer,
1384 VkImage srcImage,
1385 VkImageLayout srcImageLayout,
1386 VkImage dstImage,
1387 VkImageLayout dstImageLayout,
1388 uint32_t regionCount,
1389 const VkImageCopy *pRegions)
1390 {
1391 VN_CMD_ENQUEUE(vkCmdCopyImage, commandBuffer, srcImage, srcImageLayout,
1392 dstImage, dstImageLayout, regionCount, pRegions);
1393 }
1394
1395 void
vn_CmdCopyImage2(VkCommandBuffer commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)1396 vn_CmdCopyImage2(VkCommandBuffer commandBuffer,
1397 const VkCopyImageInfo2 *pCopyImageInfo)
1398 {
1399 VN_CMD_ENQUEUE(vkCmdCopyImage2, commandBuffer, pCopyImageInfo);
1400 }
1401
1402 void
vn_CmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)1403 vn_CmdBlitImage(VkCommandBuffer commandBuffer,
1404 VkImage srcImage,
1405 VkImageLayout srcImageLayout,
1406 VkImage dstImage,
1407 VkImageLayout dstImageLayout,
1408 uint32_t regionCount,
1409 const VkImageBlit *pRegions,
1410 VkFilter filter)
1411 {
1412 VN_CMD_ENQUEUE(vkCmdBlitImage, commandBuffer, srcImage, srcImageLayout,
1413 dstImage, dstImageLayout, regionCount, pRegions, filter);
1414 }
1415
1416 void
vn_CmdBlitImage2(VkCommandBuffer commandBuffer,const VkBlitImageInfo2 * pBlitImageInfo)1417 vn_CmdBlitImage2(VkCommandBuffer commandBuffer,
1418 const VkBlitImageInfo2 *pBlitImageInfo)
1419 {
1420 VN_CMD_ENQUEUE(vkCmdBlitImage2, commandBuffer, pBlitImageInfo);
1421 }
1422
1423 void
vn_CmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)1424 vn_CmdCopyBufferToImage(VkCommandBuffer commandBuffer,
1425 VkBuffer srcBuffer,
1426 VkImage dstImage,
1427 VkImageLayout dstImageLayout,
1428 uint32_t regionCount,
1429 const VkBufferImageCopy *pRegions)
1430 {
1431 VN_CMD_ENQUEUE(vkCmdCopyBufferToImage, commandBuffer, srcBuffer, dstImage,
1432 dstImageLayout, regionCount, pRegions);
1433 }
1434
1435 void
vn_CmdCopyBufferToImage2(VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo)1436 vn_CmdCopyBufferToImage2(
1437 VkCommandBuffer commandBuffer,
1438 const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo)
1439 {
1440 VN_CMD_ENQUEUE(vkCmdCopyBufferToImage2, commandBuffer,
1441 pCopyBufferToImageInfo);
1442 }
1443
1444 static bool
vn_needs_prime_blit(VkImage src_image,VkImageLayout src_image_layout)1445 vn_needs_prime_blit(VkImage src_image, VkImageLayout src_image_layout)
1446 {
1447 if (src_image_layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
1448 VN_PRESENT_SRC_INTERNAL_LAYOUT != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
1449
1450 /* sanity check */
1451 ASSERTED const struct vn_image *img = vn_image_from_handle(src_image);
1452 assert(img->wsi.is_wsi && img->wsi.is_prime_blit_src);
1453 return true;
1454 }
1455
1456 return false;
1457 }
1458
1459 static void
vn_transition_prime_layout(struct vn_command_buffer * cmd,VkBuffer dst_buffer)1460 vn_transition_prime_layout(struct vn_command_buffer *cmd, VkBuffer dst_buffer)
1461 {
1462 const VkBufferMemoryBarrier buf_barrier = {
1463 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
1464 .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
1465 .srcQueueFamilyIndex = cmd->pool->queue_family_index,
1466 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT,
1467 .buffer = dst_buffer,
1468 .size = VK_WHOLE_SIZE,
1469 };
1470 vn_cmd_encode_memory_barriers(cmd, VK_PIPELINE_STAGE_TRANSFER_BIT,
1471 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 1,
1472 &buf_barrier, 0, NULL);
1473 }
1474
1475 void
vn_CmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)1476 vn_CmdCopyImageToBuffer(VkCommandBuffer commandBuffer,
1477 VkImage srcImage,
1478 VkImageLayout srcImageLayout,
1479 VkBuffer dstBuffer,
1480 uint32_t regionCount,
1481 const VkBufferImageCopy *pRegions)
1482 {
1483 struct vn_command_buffer *cmd =
1484 vn_command_buffer_from_handle(commandBuffer);
1485
1486 bool prime_blit = vn_needs_prime_blit(srcImage, srcImageLayout);
1487 if (prime_blit)
1488 srcImageLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
1489
1490 VN_CMD_ENQUEUE(vkCmdCopyImageToBuffer, commandBuffer, srcImage,
1491 srcImageLayout, dstBuffer, regionCount, pRegions);
1492
1493 if (prime_blit)
1494 vn_transition_prime_layout(cmd, dstBuffer);
1495 }
1496
1497 void
vn_CmdCopyImageToBuffer2(VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)1498 vn_CmdCopyImageToBuffer2(
1499 VkCommandBuffer commandBuffer,
1500 const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo)
1501 {
1502 struct vn_command_buffer *cmd =
1503 vn_command_buffer_from_handle(commandBuffer);
1504 struct VkCopyImageToBufferInfo2 copy_info = *pCopyImageToBufferInfo;
1505
1506 bool prime_blit =
1507 vn_needs_prime_blit(copy_info.srcImage, copy_info.srcImageLayout);
1508 if (prime_blit)
1509 copy_info.srcImageLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
1510
1511 VN_CMD_ENQUEUE(vkCmdCopyImageToBuffer2, commandBuffer, ©_info);
1512
1513 if (prime_blit)
1514 vn_transition_prime_layout(cmd, copy_info.dstBuffer);
1515 }
1516
1517 void
vn_CmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)1518 vn_CmdUpdateBuffer(VkCommandBuffer commandBuffer,
1519 VkBuffer dstBuffer,
1520 VkDeviceSize dstOffset,
1521 VkDeviceSize dataSize,
1522 const void *pData)
1523 {
1524 VN_CMD_ENQUEUE(vkCmdUpdateBuffer, commandBuffer, dstBuffer, dstOffset,
1525 dataSize, pData);
1526 }
1527
1528 void
vn_CmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)1529 vn_CmdFillBuffer(VkCommandBuffer commandBuffer,
1530 VkBuffer dstBuffer,
1531 VkDeviceSize dstOffset,
1532 VkDeviceSize size,
1533 uint32_t data)
1534 {
1535 VN_CMD_ENQUEUE(vkCmdFillBuffer, commandBuffer, dstBuffer, dstOffset, size,
1536 data);
1537 }
1538
1539 void
vn_CmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1540 vn_CmdClearColorImage(VkCommandBuffer commandBuffer,
1541 VkImage image,
1542 VkImageLayout imageLayout,
1543 const VkClearColorValue *pColor,
1544 uint32_t rangeCount,
1545 const VkImageSubresourceRange *pRanges)
1546 {
1547 VN_CMD_ENQUEUE(vkCmdClearColorImage, commandBuffer, image, imageLayout,
1548 pColor, rangeCount, pRanges);
1549 }
1550
1551 void
vn_CmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1552 vn_CmdClearDepthStencilImage(VkCommandBuffer commandBuffer,
1553 VkImage image,
1554 VkImageLayout imageLayout,
1555 const VkClearDepthStencilValue *pDepthStencil,
1556 uint32_t rangeCount,
1557 const VkImageSubresourceRange *pRanges)
1558 {
1559 VN_CMD_ENQUEUE(vkCmdClearDepthStencilImage, commandBuffer, image,
1560 imageLayout, pDepthStencil, rangeCount, pRanges);
1561 }
1562
1563 void
vn_CmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)1564 vn_CmdClearAttachments(VkCommandBuffer commandBuffer,
1565 uint32_t attachmentCount,
1566 const VkClearAttachment *pAttachments,
1567 uint32_t rectCount,
1568 const VkClearRect *pRects)
1569 {
1570 VN_CMD_ENQUEUE(vkCmdClearAttachments, commandBuffer, attachmentCount,
1571 pAttachments, rectCount, pRects);
1572 }
1573
1574 void
vn_CmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)1575 vn_CmdResolveImage(VkCommandBuffer commandBuffer,
1576 VkImage srcImage,
1577 VkImageLayout srcImageLayout,
1578 VkImage dstImage,
1579 VkImageLayout dstImageLayout,
1580 uint32_t regionCount,
1581 const VkImageResolve *pRegions)
1582 {
1583 VN_CMD_ENQUEUE(vkCmdResolveImage, commandBuffer, srcImage, srcImageLayout,
1584 dstImage, dstImageLayout, regionCount, pRegions);
1585 }
1586
1587 void
vn_CmdResolveImage2(VkCommandBuffer commandBuffer,const VkResolveImageInfo2 * pResolveImageInfo)1588 vn_CmdResolveImage2(VkCommandBuffer commandBuffer,
1589 const VkResolveImageInfo2 *pResolveImageInfo)
1590 {
1591 VN_CMD_ENQUEUE(vkCmdResolveImage2, commandBuffer, pResolveImageInfo);
1592 }
1593
1594 void
vn_CmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)1595 vn_CmdSetEvent(VkCommandBuffer commandBuffer,
1596 VkEvent event,
1597 VkPipelineStageFlags stageMask)
1598 {
1599 VN_CMD_ENQUEUE(vkCmdSetEvent, commandBuffer, event, stageMask);
1600
1601 vn_event_feedback_cmd_record(commandBuffer, event, stageMask, VK_EVENT_SET,
1602 false);
1603 }
1604
1605 static VkPipelineStageFlags2
vn_dependency_info_collect_src_stage_mask(const VkDependencyInfo * dep_info)1606 vn_dependency_info_collect_src_stage_mask(const VkDependencyInfo *dep_info)
1607 {
1608 VkPipelineStageFlags2 mask = 0;
1609
1610 for (uint32_t i = 0; i < dep_info->memoryBarrierCount; i++)
1611 mask |= dep_info->pMemoryBarriers[i].srcStageMask;
1612
1613 for (uint32_t i = 0; i < dep_info->bufferMemoryBarrierCount; i++)
1614 mask |= dep_info->pBufferMemoryBarriers[i].srcStageMask;
1615
1616 for (uint32_t i = 0; i < dep_info->imageMemoryBarrierCount; i++)
1617 mask |= dep_info->pImageMemoryBarriers[i].srcStageMask;
1618
1619 return mask;
1620 }
1621
1622 void
vn_CmdSetEvent2(VkCommandBuffer commandBuffer,VkEvent event,const VkDependencyInfo * pDependencyInfo)1623 vn_CmdSetEvent2(VkCommandBuffer commandBuffer,
1624 VkEvent event,
1625 const VkDependencyInfo *pDependencyInfo)
1626
1627 {
1628 struct vn_command_buffer *cmd =
1629 vn_command_buffer_from_handle(commandBuffer);
1630
1631 pDependencyInfo = vn_cmd_fix_dependency_infos(cmd, 1, pDependencyInfo);
1632
1633 VN_CMD_ENQUEUE(vkCmdSetEvent2, commandBuffer, event, pDependencyInfo);
1634
1635 const VkPipelineStageFlags2 src_stage_mask =
1636 vn_dependency_info_collect_src_stage_mask(pDependencyInfo);
1637 vn_event_feedback_cmd_record(commandBuffer, event, src_stage_mask,
1638 VK_EVENT_SET, true);
1639 }
1640
1641 void
vn_CmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)1642 vn_CmdResetEvent(VkCommandBuffer commandBuffer,
1643 VkEvent event,
1644 VkPipelineStageFlags stageMask)
1645 {
1646 VN_CMD_ENQUEUE(vkCmdResetEvent, commandBuffer, event, stageMask);
1647
1648 vn_event_feedback_cmd_record(commandBuffer, event, stageMask,
1649 VK_EVENT_RESET, false);
1650 }
1651
1652 void
vn_CmdResetEvent2(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags2 stageMask)1653 vn_CmdResetEvent2(VkCommandBuffer commandBuffer,
1654 VkEvent event,
1655 VkPipelineStageFlags2 stageMask)
1656 {
1657 VN_CMD_ENQUEUE(vkCmdResetEvent2, commandBuffer, event, stageMask);
1658 vn_event_feedback_cmd_record(commandBuffer, event, stageMask,
1659 VK_EVENT_RESET, true);
1660 }
1661
1662 void
vn_CmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1663 vn_CmdWaitEvents(VkCommandBuffer commandBuffer,
1664 uint32_t eventCount,
1665 const VkEvent *pEvents,
1666 VkPipelineStageFlags srcStageMask,
1667 VkPipelineStageFlags dstStageMask,
1668 uint32_t memoryBarrierCount,
1669 const VkMemoryBarrier *pMemoryBarriers,
1670 uint32_t bufferMemoryBarrierCount,
1671 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1672 uint32_t imageMemoryBarrierCount,
1673 const VkImageMemoryBarrier *pImageMemoryBarriers)
1674 {
1675 struct vn_command_buffer *cmd =
1676 vn_command_buffer_from_handle(commandBuffer);
1677 uint32_t transfer_count;
1678
1679 pImageMemoryBarriers = vn_cmd_wait_events_fix_image_memory_barriers(
1680 cmd, pImageMemoryBarriers, imageMemoryBarrierCount, &transfer_count);
1681 imageMemoryBarrierCount -= transfer_count;
1682
1683 VN_CMD_ENQUEUE(vkCmdWaitEvents, commandBuffer, eventCount, pEvents,
1684 srcStageMask, dstStageMask, memoryBarrierCount,
1685 pMemoryBarriers, bufferMemoryBarrierCount,
1686 pBufferMemoryBarriers, imageMemoryBarrierCount,
1687 pImageMemoryBarriers);
1688
1689 if (transfer_count) {
1690 pImageMemoryBarriers += imageMemoryBarrierCount;
1691 vn_cmd_encode_memory_barriers(cmd, srcStageMask, dstStageMask, 0, NULL,
1692 transfer_count, pImageMemoryBarriers);
1693 }
1694 }
1695
1696 void
vn_CmdWaitEvents2(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,const VkDependencyInfo * pDependencyInfos)1697 vn_CmdWaitEvents2(VkCommandBuffer commandBuffer,
1698 uint32_t eventCount,
1699 const VkEvent *pEvents,
1700 const VkDependencyInfo *pDependencyInfos)
1701 {
1702 struct vn_command_buffer *cmd =
1703 vn_command_buffer_from_handle(commandBuffer);
1704
1705 pDependencyInfos =
1706 vn_cmd_fix_dependency_infos(cmd, eventCount, pDependencyInfos);
1707
1708 VN_CMD_ENQUEUE(vkCmdWaitEvents2, commandBuffer, eventCount, pEvents,
1709 pDependencyInfos);
1710 }
1711
1712 void
vn_CmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1713 vn_CmdPipelineBarrier(VkCommandBuffer commandBuffer,
1714 VkPipelineStageFlags srcStageMask,
1715 VkPipelineStageFlags dstStageMask,
1716 VkDependencyFlags dependencyFlags,
1717 uint32_t memoryBarrierCount,
1718 const VkMemoryBarrier *pMemoryBarriers,
1719 uint32_t bufferMemoryBarrierCount,
1720 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1721 uint32_t imageMemoryBarrierCount,
1722 const VkImageMemoryBarrier *pImageMemoryBarriers)
1723 {
1724 struct vn_command_buffer *cmd =
1725 vn_command_buffer_from_handle(commandBuffer);
1726
1727 pImageMemoryBarriers = vn_cmd_pipeline_barrier_fix_image_memory_barriers(
1728 cmd, pImageMemoryBarriers, imageMemoryBarrierCount);
1729
1730 VN_CMD_ENQUEUE(vkCmdPipelineBarrier, commandBuffer, srcStageMask,
1731 dstStageMask, dependencyFlags, memoryBarrierCount,
1732 pMemoryBarriers, bufferMemoryBarrierCount,
1733 pBufferMemoryBarriers, imageMemoryBarrierCount,
1734 pImageMemoryBarriers);
1735 }
1736
1737 void
vn_CmdPipelineBarrier2(VkCommandBuffer commandBuffer,const VkDependencyInfo * pDependencyInfo)1738 vn_CmdPipelineBarrier2(VkCommandBuffer commandBuffer,
1739 const VkDependencyInfo *pDependencyInfo)
1740 {
1741 struct vn_command_buffer *cmd =
1742 vn_command_buffer_from_handle(commandBuffer);
1743
1744 pDependencyInfo = vn_cmd_fix_dependency_infos(cmd, 1, pDependencyInfo);
1745
1746 VN_CMD_ENQUEUE(vkCmdPipelineBarrier2, commandBuffer, pDependencyInfo);
1747 }
1748
1749 void
vn_CmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags)1750 vn_CmdBeginQuery(VkCommandBuffer commandBuffer,
1751 VkQueryPool queryPool,
1752 uint32_t query,
1753 VkQueryControlFlags flags)
1754 {
1755 VN_CMD_ENQUEUE(vkCmdBeginQuery, commandBuffer, queryPool, query, flags);
1756 }
1757
1758 static inline void
vn_cmd_add_query_feedback(VkCommandBuffer cmd_handle,VkQueryPool pool_handle,uint32_t query)1759 vn_cmd_add_query_feedback(VkCommandBuffer cmd_handle,
1760 VkQueryPool pool_handle,
1761 uint32_t query)
1762 {
1763 struct vn_command_buffer *cmd = vn_command_buffer_from_handle(cmd_handle);
1764 struct vn_query_pool *query_pool = vn_query_pool_from_handle(pool_handle);
1765
1766 if (!query_pool->fb_buf)
1767 return;
1768
1769 /* Per 1.3.255 spec "If queries are used while executing a render pass
1770 * instance that has multiview enabled, the query uses N consecutive
1771 * query indices in the query pool (starting at query) where N is the
1772 * number of bits set in the view mask in the subpass the query is used
1773 * in."
1774 */
1775 uint32_t query_count =
1776 (cmd->builder.in_render_pass && cmd->builder.view_mask)
1777 ? util_bitcount(cmd->builder.view_mask)
1778 : 1;
1779
1780 struct vn_feedback_query_batch *batch = vn_cmd_query_batch_alloc(
1781 cmd->pool, query_pool, query, query_count, true);
1782 if (!batch) {
1783 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1784 return;
1785 }
1786
1787 list_addtail(&batch->head, &cmd->builder.query_batches);
1788 }
1789
1790 static inline void
vn_cmd_add_query_reset_feedback(VkCommandBuffer cmd_handle,VkQueryPool pool_handle,uint32_t query,uint32_t query_count)1791 vn_cmd_add_query_reset_feedback(VkCommandBuffer cmd_handle,
1792 VkQueryPool pool_handle,
1793 uint32_t query,
1794 uint32_t query_count)
1795 {
1796 struct vn_command_buffer *cmd = vn_command_buffer_from_handle(cmd_handle);
1797 struct vn_query_pool *query_pool = vn_query_pool_from_handle(pool_handle);
1798
1799 if (!query_pool->fb_buf)
1800 return;
1801
1802 struct vn_feedback_query_batch *batch = vn_cmd_query_batch_alloc(
1803 cmd->pool, query_pool, query, query_count, false);
1804 if (!batch)
1805 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1806
1807 list_addtail(&batch->head, &cmd->builder.query_batches);
1808 }
1809
1810 void
vn_CmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query)1811 vn_CmdEndQuery(VkCommandBuffer commandBuffer,
1812 VkQueryPool queryPool,
1813 uint32_t query)
1814 {
1815 VN_CMD_ENQUEUE(vkCmdEndQuery, commandBuffer, queryPool, query);
1816
1817 vn_cmd_add_query_feedback(commandBuffer, queryPool, query);
1818 }
1819
1820 void
vn_CmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)1821 vn_CmdResetQueryPool(VkCommandBuffer commandBuffer,
1822 VkQueryPool queryPool,
1823 uint32_t firstQuery,
1824 uint32_t queryCount)
1825 {
1826 VN_CMD_ENQUEUE(vkCmdResetQueryPool, commandBuffer, queryPool, firstQuery,
1827 queryCount);
1828
1829 vn_cmd_add_query_reset_feedback(commandBuffer, queryPool, firstQuery,
1830 queryCount);
1831 }
1832
1833 void
vn_CmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query)1834 vn_CmdWriteTimestamp(VkCommandBuffer commandBuffer,
1835 VkPipelineStageFlagBits pipelineStage,
1836 VkQueryPool queryPool,
1837 uint32_t query)
1838 {
1839 VN_CMD_ENQUEUE(vkCmdWriteTimestamp, commandBuffer, pipelineStage,
1840 queryPool, query);
1841
1842 vn_cmd_add_query_feedback(commandBuffer, queryPool, query);
1843 }
1844
1845 void
vn_CmdWriteTimestamp2(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits2 stage,VkQueryPool queryPool,uint32_t query)1846 vn_CmdWriteTimestamp2(VkCommandBuffer commandBuffer,
1847 VkPipelineStageFlagBits2 stage,
1848 VkQueryPool queryPool,
1849 uint32_t query)
1850 {
1851 VN_CMD_ENQUEUE(vkCmdWriteTimestamp2, commandBuffer, stage, queryPool,
1852 query);
1853
1854 vn_cmd_add_query_feedback(commandBuffer, queryPool, query);
1855 }
1856
1857 void
vn_CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)1858 vn_CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
1859 VkQueryPool queryPool,
1860 uint32_t firstQuery,
1861 uint32_t queryCount,
1862 VkBuffer dstBuffer,
1863 VkDeviceSize dstOffset,
1864 VkDeviceSize stride,
1865 VkQueryResultFlags flags)
1866 {
1867 VN_CMD_ENQUEUE(vkCmdCopyQueryPoolResults, commandBuffer, queryPool,
1868 firstQuery, queryCount, dstBuffer, dstOffset, stride,
1869 flags);
1870 }
1871
1872 void
vn_CmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)1873 vn_CmdPushConstants(VkCommandBuffer commandBuffer,
1874 VkPipelineLayout layout,
1875 VkShaderStageFlags stageFlags,
1876 uint32_t offset,
1877 uint32_t size,
1878 const void *pValues)
1879 {
1880 VN_CMD_ENQUEUE(vkCmdPushConstants, commandBuffer, layout, stageFlags,
1881 offset, size, pValues);
1882 }
1883
1884 void
vn_CmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)1885 vn_CmdBeginRenderPass(VkCommandBuffer commandBuffer,
1886 const VkRenderPassBeginInfo *pRenderPassBegin,
1887 VkSubpassContents contents)
1888 {
1889 struct vn_command_buffer *cmd =
1890 vn_command_buffer_from_handle(commandBuffer);
1891
1892 vn_cmd_begin_render_pass(
1893 cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1894 vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1895 pRenderPassBegin);
1896
1897 VN_CMD_ENQUEUE(vkCmdBeginRenderPass, commandBuffer, pRenderPassBegin,
1898 contents);
1899 }
1900
1901 void
vn_CmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)1902 vn_CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
1903 {
1904 vn_cmd_next_subpass(vn_command_buffer_from_handle(commandBuffer));
1905
1906 VN_CMD_ENQUEUE(vkCmdNextSubpass, commandBuffer, contents);
1907 }
1908
1909 void
vn_CmdEndRenderPass(VkCommandBuffer commandBuffer)1910 vn_CmdEndRenderPass(VkCommandBuffer commandBuffer)
1911 {
1912 VN_CMD_ENQUEUE(vkCmdEndRenderPass, commandBuffer);
1913
1914 vn_cmd_end_render_pass(vn_command_buffer_from_handle(commandBuffer));
1915 }
1916
1917 void
vn_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)1918 vn_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,
1919 const VkRenderPassBeginInfo *pRenderPassBegin,
1920 const VkSubpassBeginInfo *pSubpassBeginInfo)
1921 {
1922 struct vn_command_buffer *cmd =
1923 vn_command_buffer_from_handle(commandBuffer);
1924
1925 vn_cmd_begin_render_pass(
1926 cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1927 vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1928 pRenderPassBegin);
1929
1930 VN_CMD_ENQUEUE(vkCmdBeginRenderPass2, commandBuffer, pRenderPassBegin,
1931 pSubpassBeginInfo);
1932 }
1933
1934 void
vn_CmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfo * pSubpassBeginInfo,const VkSubpassEndInfo * pSubpassEndInfo)1935 vn_CmdNextSubpass2(VkCommandBuffer commandBuffer,
1936 const VkSubpassBeginInfo *pSubpassBeginInfo,
1937 const VkSubpassEndInfo *pSubpassEndInfo)
1938 {
1939 vn_cmd_next_subpass(vn_command_buffer_from_handle(commandBuffer));
1940
1941 VN_CMD_ENQUEUE(vkCmdNextSubpass2, commandBuffer, pSubpassBeginInfo,
1942 pSubpassEndInfo);
1943 }
1944
1945 void
vn_CmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfo * pSubpassEndInfo)1946 vn_CmdEndRenderPass2(VkCommandBuffer commandBuffer,
1947 const VkSubpassEndInfo *pSubpassEndInfo)
1948 {
1949 VN_CMD_ENQUEUE(vkCmdEndRenderPass2, commandBuffer, pSubpassEndInfo);
1950
1951 vn_cmd_end_render_pass(vn_command_buffer_from_handle(commandBuffer));
1952 }
1953
1954 void
vn_CmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)1955 vn_CmdExecuteCommands(VkCommandBuffer commandBuffer,
1956 uint32_t commandBufferCount,
1957 const VkCommandBuffer *pCommandBuffers)
1958 {
1959 VN_CMD_ENQUEUE(vkCmdExecuteCommands, commandBuffer, commandBufferCount,
1960 pCommandBuffers);
1961
1962 struct vn_command_buffer *primary_cmd =
1963 vn_command_buffer_from_handle(commandBuffer);
1964 for (uint32_t i = 0; i < commandBufferCount; i++) {
1965 struct vn_command_buffer *secondary_cmd =
1966 vn_command_buffer_from_handle(pCommandBuffers[i]);
1967 vn_cmd_merge_batched_query_feedback(primary_cmd, secondary_cmd);
1968 }
1969 }
1970
1971 void
vn_CmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)1972 vn_CmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
1973 {
1974 VN_CMD_ENQUEUE(vkCmdSetDeviceMask, commandBuffer, deviceMask);
1975 }
1976
1977 void
vn_CmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)1978 vn_CmdDispatchBase(VkCommandBuffer commandBuffer,
1979 uint32_t baseGroupX,
1980 uint32_t baseGroupY,
1981 uint32_t baseGroupZ,
1982 uint32_t groupCountX,
1983 uint32_t groupCountY,
1984 uint32_t groupCountZ)
1985 {
1986 VN_CMD_ENQUEUE(vkCmdDispatchBase, commandBuffer, baseGroupX, baseGroupY,
1987 baseGroupZ, groupCountX, groupCountY, groupCountZ);
1988 }
1989
1990 void
vn_CmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)1991 vn_CmdSetLineStippleEXT(VkCommandBuffer commandBuffer,
1992 uint32_t lineStippleFactor,
1993 uint16_t lineStipplePattern)
1994 {
1995 VN_CMD_ENQUEUE(vkCmdSetLineStippleEXT, commandBuffer, lineStippleFactor,
1996 lineStipplePattern);
1997 }
1998
1999 void
vn_CmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags,uint32_t index)2000 vn_CmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,
2001 VkQueryPool queryPool,
2002 uint32_t query,
2003 VkQueryControlFlags flags,
2004 uint32_t index)
2005 {
2006 VN_CMD_ENQUEUE(vkCmdBeginQueryIndexedEXT, commandBuffer, queryPool, query,
2007 flags, index);
2008 }
2009
2010 void
vn_CmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,uint32_t index)2011 vn_CmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,
2012 VkQueryPool queryPool,
2013 uint32_t query,
2014 uint32_t index)
2015 {
2016 VN_CMD_ENQUEUE(vkCmdEndQueryIndexedEXT, commandBuffer, queryPool, query,
2017 index);
2018
2019 vn_cmd_add_query_feedback(commandBuffer, queryPool, query);
2020 }
2021
2022 void
vn_CmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes)2023 vn_CmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer,
2024 uint32_t firstBinding,
2025 uint32_t bindingCount,
2026 const VkBuffer *pBuffers,
2027 const VkDeviceSize *pOffsets,
2028 const VkDeviceSize *pSizes)
2029 {
2030 VN_CMD_ENQUEUE(vkCmdBindTransformFeedbackBuffersEXT, commandBuffer,
2031 firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
2032 }
2033
2034 void
vn_CmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer,uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VkBuffer * pCounterBuffers,const VkDeviceSize * pCounterBufferOffsets)2035 vn_CmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer,
2036 uint32_t firstCounterBuffer,
2037 uint32_t counterBufferCount,
2038 const VkBuffer *pCounterBuffers,
2039 const VkDeviceSize *pCounterBufferOffsets)
2040 {
2041 VN_CMD_ENQUEUE(vkCmdBeginTransformFeedbackEXT, commandBuffer,
2042 firstCounterBuffer, counterBufferCount, pCounterBuffers,
2043 pCounterBufferOffsets);
2044 }
2045
2046 void
vn_CmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer,uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VkBuffer * pCounterBuffers,const VkDeviceSize * pCounterBufferOffsets)2047 vn_CmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer,
2048 uint32_t firstCounterBuffer,
2049 uint32_t counterBufferCount,
2050 const VkBuffer *pCounterBuffers,
2051 const VkDeviceSize *pCounterBufferOffsets)
2052 {
2053 VN_CMD_ENQUEUE(vkCmdEndTransformFeedbackEXT, commandBuffer,
2054 firstCounterBuffer, counterBufferCount, pCounterBuffers,
2055 pCounterBufferOffsets);
2056 }
2057
2058 void
vn_CmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer,uint32_t instanceCount,uint32_t firstInstance,VkBuffer counterBuffer,VkDeviceSize counterBufferOffset,uint32_t counterOffset,uint32_t vertexStride)2059 vn_CmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer,
2060 uint32_t instanceCount,
2061 uint32_t firstInstance,
2062 VkBuffer counterBuffer,
2063 VkDeviceSize counterBufferOffset,
2064 uint32_t counterOffset,
2065 uint32_t vertexStride)
2066 {
2067 VN_CMD_ENQUEUE(vkCmdDrawIndirectByteCountEXT, commandBuffer, instanceCount,
2068 firstInstance, counterBuffer, counterBufferOffset,
2069 counterOffset, vertexStride);
2070
2071 vn_cmd_count_draw_and_submit_on_batch_limit(
2072 vn_command_buffer_from_handle(commandBuffer));
2073 }
2074
2075 void
vn_CmdBindVertexBuffers2(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes,const VkDeviceSize * pStrides)2076 vn_CmdBindVertexBuffers2(VkCommandBuffer commandBuffer,
2077 uint32_t firstBinding,
2078 uint32_t bindingCount,
2079 const VkBuffer *pBuffers,
2080 const VkDeviceSize *pOffsets,
2081 const VkDeviceSize *pSizes,
2082 const VkDeviceSize *pStrides)
2083 {
2084 VN_CMD_ENQUEUE(vkCmdBindVertexBuffers2, commandBuffer, firstBinding,
2085 bindingCount, pBuffers, pOffsets, pSizes, pStrides);
2086 }
2087
2088 void
vn_CmdSetCullMode(VkCommandBuffer commandBuffer,VkCullModeFlags cullMode)2089 vn_CmdSetCullMode(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode)
2090 {
2091 VN_CMD_ENQUEUE(vkCmdSetCullMode, commandBuffer, cullMode);
2092 }
2093
2094 void
vn_CmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthBoundsTestEnable)2095 vn_CmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,
2096 VkBool32 depthBoundsTestEnable)
2097 {
2098 VN_CMD_ENQUEUE(vkCmdSetDepthBoundsTestEnable, commandBuffer,
2099 depthBoundsTestEnable);
2100 }
2101
2102 void
vn_CmdSetDepthCompareOp(VkCommandBuffer commandBuffer,VkCompareOp depthCompareOp)2103 vn_CmdSetDepthCompareOp(VkCommandBuffer commandBuffer,
2104 VkCompareOp depthCompareOp)
2105 {
2106 VN_CMD_ENQUEUE(vkCmdSetDepthCompareOp, commandBuffer, depthCompareOp);
2107 }
2108
2109 void
vn_CmdSetDepthTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthTestEnable)2110 vn_CmdSetDepthTestEnable(VkCommandBuffer commandBuffer,
2111 VkBool32 depthTestEnable)
2112 {
2113 VN_CMD_ENQUEUE(vkCmdSetDepthTestEnable, commandBuffer, depthTestEnable);
2114 }
2115
2116 void
vn_CmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,VkBool32 depthWriteEnable)2117 vn_CmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,
2118 VkBool32 depthWriteEnable)
2119 {
2120 VN_CMD_ENQUEUE(vkCmdSetDepthWriteEnable, commandBuffer, depthWriteEnable);
2121 }
2122
2123 void
vn_CmdSetFrontFace(VkCommandBuffer commandBuffer,VkFrontFace frontFace)2124 vn_CmdSetFrontFace(VkCommandBuffer commandBuffer, VkFrontFace frontFace)
2125 {
2126 VN_CMD_ENQUEUE(vkCmdSetFrontFace, commandBuffer, frontFace);
2127 }
2128
2129 void
vn_CmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,VkPrimitiveTopology primitiveTopology)2130 vn_CmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,
2131 VkPrimitiveTopology primitiveTopology)
2132 {
2133 VN_CMD_ENQUEUE(vkCmdSetPrimitiveTopology, commandBuffer,
2134 primitiveTopology);
2135 }
2136
2137 void
vn_CmdSetScissorWithCount(VkCommandBuffer commandBuffer,uint32_t scissorCount,const VkRect2D * pScissors)2138 vn_CmdSetScissorWithCount(VkCommandBuffer commandBuffer,
2139 uint32_t scissorCount,
2140 const VkRect2D *pScissors)
2141 {
2142 VN_CMD_ENQUEUE(vkCmdSetScissorWithCount, commandBuffer, scissorCount,
2143 pScissors);
2144 }
2145
2146 void
vn_CmdSetStencilOp(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,VkStencilOp failOp,VkStencilOp passOp,VkStencilOp depthFailOp,VkCompareOp compareOp)2147 vn_CmdSetStencilOp(VkCommandBuffer commandBuffer,
2148 VkStencilFaceFlags faceMask,
2149 VkStencilOp failOp,
2150 VkStencilOp passOp,
2151 VkStencilOp depthFailOp,
2152 VkCompareOp compareOp)
2153 {
2154 VN_CMD_ENQUEUE(vkCmdSetStencilOp, commandBuffer, faceMask, failOp, passOp,
2155 depthFailOp, compareOp);
2156 }
2157
2158 void
vn_CmdSetStencilTestEnable(VkCommandBuffer commandBuffer,VkBool32 stencilTestEnable)2159 vn_CmdSetStencilTestEnable(VkCommandBuffer commandBuffer,
2160 VkBool32 stencilTestEnable)
2161 {
2162 VN_CMD_ENQUEUE(vkCmdSetStencilTestEnable, commandBuffer,
2163 stencilTestEnable);
2164 }
2165
2166 void
vn_CmdSetViewportWithCount(VkCommandBuffer commandBuffer,uint32_t viewportCount,const VkViewport * pViewports)2167 vn_CmdSetViewportWithCount(VkCommandBuffer commandBuffer,
2168 uint32_t viewportCount,
2169 const VkViewport *pViewports)
2170 {
2171 VN_CMD_ENQUEUE(vkCmdSetViewportWithCount, commandBuffer, viewportCount,
2172 pViewports);
2173 }
2174
2175 void
vn_CmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,VkBool32 depthBiasEnable)2176 vn_CmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,
2177 VkBool32 depthBiasEnable)
2178 {
2179 VN_CMD_ENQUEUE(vkCmdSetDepthBiasEnable, commandBuffer, depthBiasEnable);
2180 }
2181
2182 void
vn_CmdSetLogicOpEXT(VkCommandBuffer commandBuffer,VkLogicOp logicOp)2183 vn_CmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp)
2184 {
2185 VN_CMD_ENQUEUE(vkCmdSetLogicOpEXT, commandBuffer, logicOp);
2186 }
2187
2188 void
vn_CmdSetColorWriteEnableEXT(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkBool32 * pColorWriteEnables)2189 vn_CmdSetColorWriteEnableEXT(VkCommandBuffer commandBuffer,
2190 uint32_t attachmentCount,
2191 const VkBool32 *pColorWriteEnables)
2192 {
2193 VN_CMD_ENQUEUE(vkCmdSetColorWriteEnableEXT, commandBuffer, attachmentCount,
2194 pColorWriteEnables);
2195 }
2196
2197 void
vn_CmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer,uint32_t patchControlPoints)2198 vn_CmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer,
2199 uint32_t patchControlPoints)
2200 {
2201 VN_CMD_ENQUEUE(vkCmdSetPatchControlPointsEXT, commandBuffer,
2202 patchControlPoints);
2203 }
2204
2205 void
vn_CmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,VkBool32 primitiveRestartEnable)2206 vn_CmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,
2207 VkBool32 primitiveRestartEnable)
2208 {
2209 VN_CMD_ENQUEUE(vkCmdSetPrimitiveRestartEnable, commandBuffer,
2210 primitiveRestartEnable);
2211 }
2212
2213 void
vn_CmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer,VkBool32 rasterizerDiscardEnable)2214 vn_CmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer,
2215 VkBool32 rasterizerDiscardEnable)
2216 {
2217 VN_CMD_ENQUEUE(vkCmdSetRasterizerDiscardEnable, commandBuffer,
2218 rasterizerDiscardEnable);
2219 }
2220
2221 void
vn_CmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer,const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin)2222 vn_CmdBeginConditionalRenderingEXT(
2223 VkCommandBuffer commandBuffer,
2224 const VkConditionalRenderingBeginInfoEXT *pConditionalRenderingBegin)
2225 {
2226 VN_CMD_ENQUEUE(vkCmdBeginConditionalRenderingEXT, commandBuffer,
2227 pConditionalRenderingBegin);
2228 }
2229
2230 void
vn_CmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer)2231 vn_CmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer)
2232 {
2233 VN_CMD_ENQUEUE(vkCmdEndConditionalRenderingEXT, commandBuffer);
2234 }
2235
2236 void
vn_CmdDrawMultiEXT(VkCommandBuffer commandBuffer,uint32_t drawCount,const VkMultiDrawInfoEXT * pVertexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride)2237 vn_CmdDrawMultiEXT(VkCommandBuffer commandBuffer,
2238 uint32_t drawCount,
2239 const VkMultiDrawInfoEXT *pVertexInfo,
2240 uint32_t instanceCount,
2241 uint32_t firstInstance,
2242 uint32_t stride)
2243 {
2244 VN_CMD_ENQUEUE(vkCmdDrawMultiEXT, commandBuffer, drawCount, pVertexInfo,
2245 instanceCount, firstInstance, stride);
2246
2247 vn_cmd_count_draw_and_submit_on_batch_limit(
2248 vn_command_buffer_from_handle(commandBuffer));
2249 }
2250
2251 void
vn_CmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer,uint32_t drawCount,const VkMultiDrawIndexedInfoEXT * pIndexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,const int32_t * pVertexOffset)2252 vn_CmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer,
2253 uint32_t drawCount,
2254 const VkMultiDrawIndexedInfoEXT *pIndexInfo,
2255 uint32_t instanceCount,
2256 uint32_t firstInstance,
2257 uint32_t stride,
2258 const int32_t *pVertexOffset)
2259 {
2260 VN_CMD_ENQUEUE(vkCmdDrawMultiIndexedEXT, commandBuffer, drawCount,
2261 pIndexInfo, instanceCount, firstInstance, stride,
2262 pVertexOffset);
2263
2264 vn_cmd_count_draw_and_submit_on_batch_limit(
2265 vn_command_buffer_from_handle(commandBuffer));
2266 }
2267
2268 void
vn_CmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites)2269 vn_CmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
2270 VkPipelineBindPoint pipelineBindPoint,
2271 VkPipelineLayout layout,
2272 uint32_t set,
2273 uint32_t descriptorWriteCount,
2274 const VkWriteDescriptorSet *pDescriptorWrites)
2275 {
2276 if (vn_should_sanitize_descriptor_set_writes(descriptorWriteCount,
2277 pDescriptorWrites, layout)) {
2278 struct vn_command_buffer *cmd =
2279 vn_command_buffer_from_handle(commandBuffer);
2280 struct vn_update_descriptor_sets *update =
2281 vn_update_descriptor_sets_parse_writes(
2282 descriptorWriteCount, pDescriptorWrites, &cmd->pool->allocator,
2283 layout);
2284 if (!update) {
2285 cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
2286 return;
2287 }
2288
2289 VN_CMD_ENQUEUE(vkCmdPushDescriptorSetKHR, commandBuffer,
2290 pipelineBindPoint, layout, set, update->write_count,
2291 update->writes);
2292
2293 vk_free(&cmd->pool->allocator, update);
2294 } else {
2295 VN_CMD_ENQUEUE(vkCmdPushDescriptorSetKHR, commandBuffer,
2296 pipelineBindPoint, layout, set, descriptorWriteCount,
2297 pDescriptorWrites);
2298 }
2299 }
2300
2301 void
vn_CmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,VkDescriptorUpdateTemplate descriptorUpdateTemplate,VkPipelineLayout layout,uint32_t set,const void * pData)2302 vn_CmdPushDescriptorSetWithTemplateKHR(
2303 VkCommandBuffer commandBuffer,
2304 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
2305 VkPipelineLayout layout,
2306 uint32_t set,
2307 const void *pData)
2308 {
2309 struct vn_descriptor_update_template *templ =
2310 vn_descriptor_update_template_from_handle(descriptorUpdateTemplate);
2311
2312 mtx_lock(&templ->mutex);
2313
2314 struct vn_update_descriptor_sets *update =
2315 vn_update_descriptor_set_with_template_locked(templ, VK_NULL_HANDLE,
2316 pData);
2317 VN_CMD_ENQUEUE(vkCmdPushDescriptorSetKHR, commandBuffer,
2318 templ->pipeline_bind_point, layout, set,
2319 update->write_count, update->writes);
2320
2321 mtx_unlock(&templ->mutex);
2322 }
2323
2324 void
vn_CmdSetVertexInputEXT(VkCommandBuffer commandBuffer,uint32_t vertexBindingDescriptionCount,const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions)2325 vn_CmdSetVertexInputEXT(
2326 VkCommandBuffer commandBuffer,
2327 uint32_t vertexBindingDescriptionCount,
2328 const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions,
2329 uint32_t vertexAttributeDescriptionCount,
2330 const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions)
2331 {
2332 VN_CMD_ENQUEUE(vkCmdSetVertexInputEXT, commandBuffer,
2333 vertexBindingDescriptionCount, pVertexBindingDescriptions,
2334 vertexAttributeDescriptionCount,
2335 pVertexAttributeDescriptions);
2336 }
2337
2338 void
vn_CmdSetAlphaToCoverageEnableEXT(VkCommandBuffer commandBuffer,VkBool32 alphaToCoverageEnable)2339 vn_CmdSetAlphaToCoverageEnableEXT(VkCommandBuffer commandBuffer,
2340 VkBool32 alphaToCoverageEnable)
2341 {
2342 VN_CMD_ENQUEUE(vkCmdSetAlphaToCoverageEnableEXT, commandBuffer,
2343 alphaToCoverageEnable);
2344 }
2345
2346 void
vn_CmdSetAlphaToOneEnableEXT(VkCommandBuffer commandBuffer,VkBool32 alphaToOneEnable)2347 vn_CmdSetAlphaToOneEnableEXT(VkCommandBuffer commandBuffer,
2348 VkBool32 alphaToOneEnable)
2349 {
2350 VN_CMD_ENQUEUE(vkCmdSetAlphaToOneEnableEXT, commandBuffer,
2351 alphaToOneEnable);
2352 }
2353
2354 void
vn_CmdSetColorBlendAdvancedEXT(VkCommandBuffer commandBuffer,uint32_t firstAttachment,uint32_t attachmentCount,const VkColorBlendAdvancedEXT * pColorBlendAdvanced)2355 vn_CmdSetColorBlendAdvancedEXT(
2356 VkCommandBuffer commandBuffer,
2357 uint32_t firstAttachment,
2358 uint32_t attachmentCount,
2359 const VkColorBlendAdvancedEXT *pColorBlendAdvanced)
2360 {
2361 VN_CMD_ENQUEUE(vkCmdSetColorBlendAdvancedEXT, commandBuffer,
2362 firstAttachment, attachmentCount, pColorBlendAdvanced);
2363 }
2364
2365 void
vn_CmdSetColorBlendEnableEXT(VkCommandBuffer commandBuffer,uint32_t firstAttachment,uint32_t attachmentCount,const VkBool32 * pColorBlendEnables)2366 vn_CmdSetColorBlendEnableEXT(VkCommandBuffer commandBuffer,
2367 uint32_t firstAttachment,
2368 uint32_t attachmentCount,
2369 const VkBool32 *pColorBlendEnables)
2370 {
2371 VN_CMD_ENQUEUE(vkCmdSetColorBlendEnableEXT, commandBuffer, firstAttachment,
2372 attachmentCount, pColorBlendEnables);
2373 }
2374
2375 void
vn_CmdSetColorBlendEquationEXT(VkCommandBuffer commandBuffer,uint32_t firstAttachment,uint32_t attachmentCount,const VkColorBlendEquationEXT * pColorBlendEquations)2376 vn_CmdSetColorBlendEquationEXT(
2377 VkCommandBuffer commandBuffer,
2378 uint32_t firstAttachment,
2379 uint32_t attachmentCount,
2380 const VkColorBlendEquationEXT *pColorBlendEquations)
2381 {
2382 VN_CMD_ENQUEUE(vkCmdSetColorBlendEquationEXT, commandBuffer,
2383 firstAttachment, attachmentCount, pColorBlendEquations);
2384 }
2385
2386 void
vn_CmdSetColorWriteMaskEXT(VkCommandBuffer commandBuffer,uint32_t firstAttachment,uint32_t attachmentCount,const VkColorComponentFlags * pColorWriteMasks)2387 vn_CmdSetColorWriteMaskEXT(VkCommandBuffer commandBuffer,
2388 uint32_t firstAttachment,
2389 uint32_t attachmentCount,
2390 const VkColorComponentFlags *pColorWriteMasks)
2391 {
2392 VN_CMD_ENQUEUE(vkCmdSetColorWriteMaskEXT, commandBuffer, firstAttachment,
2393 attachmentCount, pColorWriteMasks);
2394 }
2395
2396 void
vn_CmdSetConservativeRasterizationModeEXT(VkCommandBuffer commandBuffer,VkConservativeRasterizationModeEXT conservativeRasterizationMode)2397 vn_CmdSetConservativeRasterizationModeEXT(
2398 VkCommandBuffer commandBuffer,
2399 VkConservativeRasterizationModeEXT conservativeRasterizationMode)
2400 {
2401 VN_CMD_ENQUEUE(vkCmdSetConservativeRasterizationModeEXT, commandBuffer,
2402 conservativeRasterizationMode);
2403 }
2404
2405 void
vn_CmdSetDepthClampEnableEXT(VkCommandBuffer commandBuffer,VkBool32 depthClampEnable)2406 vn_CmdSetDepthClampEnableEXT(VkCommandBuffer commandBuffer,
2407 VkBool32 depthClampEnable)
2408 {
2409 VN_CMD_ENQUEUE(vkCmdSetDepthClampEnableEXT, commandBuffer,
2410 depthClampEnable);
2411 }
2412
2413 void
vn_CmdSetDepthClipEnableEXT(VkCommandBuffer commandBuffer,VkBool32 depthClipEnable)2414 vn_CmdSetDepthClipEnableEXT(VkCommandBuffer commandBuffer,
2415 VkBool32 depthClipEnable)
2416 {
2417 VN_CMD_ENQUEUE(vkCmdSetDepthClipEnableEXT, commandBuffer, depthClipEnable);
2418 }
2419
2420 void
vn_CmdSetDepthClipNegativeOneToOneEXT(VkCommandBuffer commandBuffer,VkBool32 negativeOneToOne)2421 vn_CmdSetDepthClipNegativeOneToOneEXT(VkCommandBuffer commandBuffer,
2422 VkBool32 negativeOneToOne)
2423 {
2424 VN_CMD_ENQUEUE(vkCmdSetDepthClipNegativeOneToOneEXT, commandBuffer,
2425 negativeOneToOne);
2426 }
2427
2428 void
vn_CmdSetExtraPrimitiveOverestimationSizeEXT(VkCommandBuffer commandBuffer,float extraPrimitiveOverestimationSize)2429 vn_CmdSetExtraPrimitiveOverestimationSizeEXT(
2430 VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize)
2431 {
2432 VN_CMD_ENQUEUE(vkCmdSetExtraPrimitiveOverestimationSizeEXT, commandBuffer,
2433 extraPrimitiveOverestimationSize);
2434 }
2435
2436 void
vn_CmdSetLineRasterizationModeEXT(VkCommandBuffer commandBuffer,VkLineRasterizationModeEXT lineRasterizationMode)2437 vn_CmdSetLineRasterizationModeEXT(
2438 VkCommandBuffer commandBuffer,
2439 VkLineRasterizationModeEXT lineRasterizationMode)
2440 {
2441 VN_CMD_ENQUEUE(vkCmdSetLineRasterizationModeEXT, commandBuffer,
2442 lineRasterizationMode);
2443 }
2444
2445 void
vn_CmdSetLineStippleEnableEXT(VkCommandBuffer commandBuffer,VkBool32 stippledLineEnable)2446 vn_CmdSetLineStippleEnableEXT(VkCommandBuffer commandBuffer,
2447 VkBool32 stippledLineEnable)
2448 {
2449 VN_CMD_ENQUEUE(vkCmdSetLineStippleEnableEXT, commandBuffer,
2450 stippledLineEnable);
2451 }
2452
2453 void
vn_CmdSetLogicOpEnableEXT(VkCommandBuffer commandBuffer,VkBool32 logicOpEnable)2454 vn_CmdSetLogicOpEnableEXT(VkCommandBuffer commandBuffer,
2455 VkBool32 logicOpEnable)
2456 {
2457 VN_CMD_ENQUEUE(vkCmdSetLogicOpEnableEXT, commandBuffer, logicOpEnable);
2458 }
2459
2460 void
vn_CmdSetPolygonModeEXT(VkCommandBuffer commandBuffer,VkPolygonMode polygonMode)2461 vn_CmdSetPolygonModeEXT(VkCommandBuffer commandBuffer,
2462 VkPolygonMode polygonMode)
2463 {
2464 VN_CMD_ENQUEUE(vkCmdSetPolygonModeEXT, commandBuffer, polygonMode);
2465 }
2466
2467 void
vn_CmdSetProvokingVertexModeEXT(VkCommandBuffer commandBuffer,VkProvokingVertexModeEXT provokingVertexMode)2468 vn_CmdSetProvokingVertexModeEXT(VkCommandBuffer commandBuffer,
2469 VkProvokingVertexModeEXT provokingVertexMode)
2470 {
2471 VN_CMD_ENQUEUE(vkCmdSetProvokingVertexModeEXT, commandBuffer,
2472 provokingVertexMode);
2473 }
2474
2475 void
vn_CmdSetRasterizationSamplesEXT(VkCommandBuffer commandBuffer,VkSampleCountFlagBits rasterizationSamples)2476 vn_CmdSetRasterizationSamplesEXT(VkCommandBuffer commandBuffer,
2477 VkSampleCountFlagBits rasterizationSamples)
2478 {
2479 VN_CMD_ENQUEUE(vkCmdSetRasterizationSamplesEXT, commandBuffer,
2480 rasterizationSamples);
2481 }
2482
2483 void
vn_CmdSetRasterizationStreamEXT(VkCommandBuffer commandBuffer,uint32_t rasterizationStream)2484 vn_CmdSetRasterizationStreamEXT(VkCommandBuffer commandBuffer,
2485 uint32_t rasterizationStream)
2486 {
2487 VN_CMD_ENQUEUE(vkCmdSetRasterizationStreamEXT, commandBuffer,
2488 rasterizationStream);
2489 }
2490
2491 void
vn_CmdSetSampleLocationsEnableEXT(VkCommandBuffer commandBuffer,VkBool32 sampleLocationsEnable)2492 vn_CmdSetSampleLocationsEnableEXT(VkCommandBuffer commandBuffer,
2493 VkBool32 sampleLocationsEnable)
2494 {
2495 VN_CMD_ENQUEUE(vkCmdSetSampleLocationsEnableEXT, commandBuffer,
2496 sampleLocationsEnable);
2497 }
2498
2499 void
vn_CmdSetSampleMaskEXT(VkCommandBuffer commandBuffer,VkSampleCountFlagBits samples,const VkSampleMask * pSampleMask)2500 vn_CmdSetSampleMaskEXT(VkCommandBuffer commandBuffer,
2501 VkSampleCountFlagBits samples,
2502 const VkSampleMask *pSampleMask)
2503 {
2504 VN_CMD_ENQUEUE(vkCmdSetSampleMaskEXT, commandBuffer, samples, pSampleMask);
2505 }
2506
2507 void
vn_CmdSetTessellationDomainOriginEXT(VkCommandBuffer commandBuffer,VkTessellationDomainOrigin domainOrigin)2508 vn_CmdSetTessellationDomainOriginEXT(VkCommandBuffer commandBuffer,
2509 VkTessellationDomainOrigin domainOrigin)
2510 {
2511 VN_CMD_ENQUEUE(vkCmdSetTessellationDomainOriginEXT, commandBuffer,
2512 domainOrigin);
2513 }
2514