• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat.
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_command_buffer.h"
12 
13 #include "venus-protocol/vn_protocol_driver_command_buffer.h"
14 #include "venus-protocol/vn_protocol_driver_command_pool.h"
15 
16 #include "vn_device.h"
17 #include "vn_image.h"
18 #include "vn_render_pass.h"
19 
20 #define VN_CMD_ENQUEUE(cmd_name, commandBuffer, ...)                         \
21    do {                                                                      \
22       struct vn_command_buffer *_cmd =                                       \
23          vn_command_buffer_from_handle(commandBuffer);                       \
24       size_t _cmd_size = vn_sizeof_##cmd_name(commandBuffer, ##__VA_ARGS__); \
25                                                                              \
26       if (vn_cs_encoder_reserve(&_cmd->cs, _cmd_size))                       \
27          vn_encode_##cmd_name(&_cmd->cs, 0, commandBuffer, ##__VA_ARGS__);   \
28       else                                                                   \
29          _cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;                      \
30    } while (0)
31 
32 static bool
vn_image_memory_barrier_has_present_src(const VkImageMemoryBarrier * img_barriers,uint32_t count)33 vn_image_memory_barrier_has_present_src(
34    const VkImageMemoryBarrier *img_barriers, uint32_t count)
35 {
36    for (uint32_t i = 0; i < count; i++) {
37       if (img_barriers[i].oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ||
38           img_barriers[i].newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
39          return true;
40    }
41    return false;
42 }
43 
44 static VkImageMemoryBarrier *
vn_cmd_get_image_memory_barriers(struct vn_command_buffer * cmd,uint32_t count)45 vn_cmd_get_image_memory_barriers(struct vn_command_buffer *cmd,
46                                  uint32_t count)
47 {
48    /* avoid shrinking in case of non efficient reallocation implementation */
49    if (count > cmd->builder.image_barrier_count) {
50       size_t size = sizeof(VkImageMemoryBarrier) * count;
51       VkImageMemoryBarrier *img_barriers =
52          vk_realloc(&cmd->allocator, cmd->builder.image_barriers, size,
53                     VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
54       if (!img_barriers)
55          return NULL;
56 
57       /* update upon successful reallocation */
58       cmd->builder.image_barrier_count = count;
59       cmd->builder.image_barriers = img_barriers;
60    }
61 
62    return cmd->builder.image_barriers;
63 }
64 
65 /* About VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, the spec says
66  *
67  *    VK_IMAGE_LAYOUT_PRESENT_SRC_KHR must only be used for presenting a
68  *    presentable image for display. A swapchain's image must be transitioned
69  *    to this layout before calling vkQueuePresentKHR, and must be
70  *    transitioned away from this layout after calling vkAcquireNextImageKHR.
71  *
72  * That allows us to treat the layout internally as
73  *
74  *  - VK_IMAGE_LAYOUT_GENERAL
75  *  - VK_QUEUE_FAMILY_FOREIGN_EXT has the ownership, if the image is not a
76  *    prime blit source
77  *
78  * while staying performant.
79  *
80  * About queue family ownerships, the spec says
81  *
82  *    A queue family can take ownership of an image subresource or buffer
83  *    range of a resource created with VK_SHARING_MODE_EXCLUSIVE, without an
84  *    ownership transfer, in the same way as for a resource that was just
85  *    created; however, taking ownership in this way has the effect that the
86  *    contents of the image subresource or buffer range are undefined.
87  *
88  * It is unclear if that is applicable to external resources, which supposedly
89  * have the same semantics
90  *
91  *    Binding a resource to a memory object shared between multiple Vulkan
92  *    instances or other APIs does not change the ownership of the underlying
93  *    memory. The first entity to access the resource implicitly acquires
94  *    ownership. Accessing a resource backed by memory that is owned by a
95  *    particular instance or API has the same semantics as accessing a
96  *    VK_SHARING_MODE_EXCLUSIVE resource[...]
97  *
98  * We should get the spec clarified, or get rid of this completely broken code
99  * (TODO).
100  *
101  * Assuming a queue family can acquire the ownership implicitly when the
102  * contents are not needed, we do not need to worry about
103  * VK_IMAGE_LAYOUT_UNDEFINED.  We can use VK_IMAGE_LAYOUT_PRESENT_SRC_KHR as
104  * the sole signal to trigger queue family ownership transfers.
105  *
106  * When the image has VK_SHARING_MODE_CONCURRENT, we can, and are required to,
107  * use VK_QUEUE_FAMILY_IGNORED as the other queue family whether we are
108  * transitioning to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR.
109  *
110  * When the image has VK_SHARING_MODE_EXCLUSIVE, we have to work out who the
111  * other queue family is.  It is easier when the barrier does not also define
112  * a queue family ownership transfer (i.e., srcQueueFamilyIndex equals to
113  * dstQueueFamilyIndex).  The other queue family must be the queue family the
114  * command buffer was allocated for.
115  *
116  * When the barrier also defines a queue family ownership transfer, it is
117  * submitted both to the source queue family to release the ownership and to
118  * the destination queue family to acquire the ownership.  Depending on
119  * whether the barrier transitions to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
120  * we are only interested in the ownership release or acquire respectively and
121  * should be careful to avoid double releases/acquires.
122  *
123  * I haven't followed all transition paths mentally to verify the correctness.
124  * I likely also violate some VUs or miss some cases below.  They are
125  * hopefully fixable and are left as TODOs.
126  */
127 static void
vn_cmd_fix_image_memory_barrier(const struct vn_command_buffer * cmd,const VkImageMemoryBarrier * src_barrier,VkImageMemoryBarrier * out_barrier)128 vn_cmd_fix_image_memory_barrier(const struct vn_command_buffer *cmd,
129                                 const VkImageMemoryBarrier *src_barrier,
130                                 VkImageMemoryBarrier *out_barrier)
131 {
132    const struct vn_image *img = vn_image_from_handle(src_barrier->image);
133 
134    *out_barrier = *src_barrier;
135 
136    /* no fix needed */
137    if (out_barrier->oldLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
138        out_barrier->newLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
139       return;
140 
141    assert(img->wsi.is_wsi);
142 
143    if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
144       return;
145 
146    /* prime blit src or no layout transition */
147    if (img->wsi.is_prime_blit_src ||
148        out_barrier->oldLayout == out_barrier->newLayout) {
149       if (out_barrier->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
150          out_barrier->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
151       if (out_barrier->newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
152          out_barrier->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
153       return;
154    }
155 
156    if (out_barrier->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
157       out_barrier->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
158 
159       /* no availability operation needed */
160       out_barrier->srcAccessMask = 0;
161 
162       const uint32_t dst_qfi = out_barrier->dstQueueFamilyIndex;
163       if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
164          out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
165          out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
166       } else if (dst_qfi == out_barrier->srcQueueFamilyIndex ||
167                  dst_qfi == cmd->queue_family_index) {
168          out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
169          out_barrier->dstQueueFamilyIndex = cmd->queue_family_index;
170       } else {
171          /* The barrier also defines a queue family ownership transfer, and
172           * this is the one that gets submitted to the source queue family to
173           * release the ownership.  Skip both the transfer and the transition.
174           */
175          out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
176          out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
177          out_barrier->newLayout = out_barrier->oldLayout;
178       }
179    } else {
180       out_barrier->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
181 
182       /* no visibility operation needed */
183       out_barrier->dstAccessMask = 0;
184 
185       const uint32_t src_qfi = out_barrier->srcQueueFamilyIndex;
186       if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
187          out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
188          out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
189       } else if (src_qfi == out_barrier->dstQueueFamilyIndex ||
190                  src_qfi == cmd->queue_family_index) {
191          out_barrier->srcQueueFamilyIndex = cmd->queue_family_index;
192          out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
193       } else {
194          /* The barrier also defines a queue family ownership transfer, and
195           * this is the one that gets submitted to the destination queue
196           * family to acquire the ownership.  Skip both the transfer and the
197           * transition.
198           */
199          out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
200          out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
201          out_barrier->oldLayout = out_barrier->newLayout;
202       }
203    }
204 }
205 
206 static const VkImageMemoryBarrier *
vn_cmd_wait_events_fix_image_memory_barriers(struct vn_command_buffer * cmd,const VkImageMemoryBarrier * src_barriers,uint32_t count,uint32_t * out_transfer_count)207 vn_cmd_wait_events_fix_image_memory_barriers(
208    struct vn_command_buffer *cmd,
209    const VkImageMemoryBarrier *src_barriers,
210    uint32_t count,
211    uint32_t *out_transfer_count)
212 {
213    *out_transfer_count = 0;
214 
215    if (cmd->builder.render_pass ||
216        !vn_image_memory_barrier_has_present_src(src_barriers, count))
217       return src_barriers;
218 
219    VkImageMemoryBarrier *img_barriers =
220       vn_cmd_get_image_memory_barriers(cmd, count * 2);
221    if (!img_barriers) {
222       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
223       return src_barriers;
224    }
225 
226    /* vkCmdWaitEvents cannot be used for queue family ownership transfers.
227     * Nothing appears to be said about the submission order of image memory
228     * barriers in the same array.  We take the liberty to move queue family
229     * ownership transfers to the tail.
230     */
231    VkImageMemoryBarrier *transfer_barriers = img_barriers + count;
232    uint32_t transfer_count = 0;
233    uint32_t valid_count = 0;
234    for (uint32_t i = 0; i < count; i++) {
235       VkImageMemoryBarrier *img_barrier = &img_barriers[valid_count];
236       vn_cmd_fix_image_memory_barrier(cmd, &src_barriers[i], img_barrier);
237 
238       if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
239          valid_count++;
240          continue;
241       }
242 
243       if (img_barrier->srcQueueFamilyIndex ==
244           img_barrier->dstQueueFamilyIndex) {
245          valid_count++;
246       } else {
247          transfer_barriers[transfer_count++] = *img_barrier;
248       }
249    }
250 
251    assert(valid_count + transfer_count == count);
252    if (transfer_count) {
253       /* copy back to the tail */
254       memcpy(&img_barriers[valid_count], transfer_barriers,
255              sizeof(*transfer_barriers) * transfer_count);
256       *out_transfer_count = transfer_count;
257    }
258 
259    return img_barriers;
260 }
261 
262 static const VkImageMemoryBarrier *
vn_cmd_pipeline_barrier_fix_image_memory_barriers(struct vn_command_buffer * cmd,const VkImageMemoryBarrier * src_barriers,uint32_t count)263 vn_cmd_pipeline_barrier_fix_image_memory_barriers(
264    struct vn_command_buffer *cmd,
265    const VkImageMemoryBarrier *src_barriers,
266    uint32_t count)
267 {
268    if (cmd->builder.render_pass ||
269        !vn_image_memory_barrier_has_present_src(src_barriers, count))
270       return src_barriers;
271 
272    VkImageMemoryBarrier *img_barriers =
273       vn_cmd_get_image_memory_barriers(cmd, count);
274    if (!img_barriers) {
275       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
276       return src_barriers;
277    }
278 
279    for (uint32_t i = 0; i < count; i++) {
280       vn_cmd_fix_image_memory_barrier(cmd, &src_barriers[i],
281                                       &img_barriers[i]);
282    }
283 
284    return img_barriers;
285 }
286 
287 static void
vn_cmd_encode_memory_barriers(struct vn_command_buffer * cmd,VkPipelineStageFlags src_stage_mask,VkPipelineStageFlags dst_stage_mask,uint32_t buf_barrier_count,const VkBufferMemoryBarrier * buf_barriers,uint32_t img_barrier_count,const VkImageMemoryBarrier * img_barriers)288 vn_cmd_encode_memory_barriers(struct vn_command_buffer *cmd,
289                               VkPipelineStageFlags src_stage_mask,
290                               VkPipelineStageFlags dst_stage_mask,
291                               uint32_t buf_barrier_count,
292                               const VkBufferMemoryBarrier *buf_barriers,
293                               uint32_t img_barrier_count,
294                               const VkImageMemoryBarrier *img_barriers)
295 {
296    const VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
297 
298    VN_CMD_ENQUEUE(vkCmdPipelineBarrier, cmd_handle, src_stage_mask,
299                   dst_stage_mask, 0, 0, NULL, buf_barrier_count, buf_barriers,
300                   img_barrier_count, img_barriers);
301 }
302 
303 static void
vn_present_src_attachment_to_image_memory_barrier(const struct vn_image * img,const struct vn_present_src_attachment * att,VkImageMemoryBarrier * img_barrier)304 vn_present_src_attachment_to_image_memory_barrier(
305    const struct vn_image *img,
306    const struct vn_present_src_attachment *att,
307    VkImageMemoryBarrier *img_barrier)
308 {
309    *img_barrier = (VkImageMemoryBarrier)
310    {
311       .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
312       .srcAccessMask = att->src_access_mask,
313       .dstAccessMask = att->dst_access_mask,
314       .oldLayout = att->acquire ? VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
315                                 : VN_PRESENT_SRC_INTERNAL_LAYOUT,
316       .newLayout = att->acquire ? VN_PRESENT_SRC_INTERNAL_LAYOUT
317                                 : VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
318       .image = vn_image_to_handle((struct vn_image *)img),
319       .subresourceRange = {
320          .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
321          .levelCount = 1,
322          .layerCount = 1,
323       },
324    };
325 }
326 
327 static void
vn_cmd_transfer_present_src_images(struct vn_command_buffer * cmd,const struct vn_image * const * images,const struct vn_present_src_attachment * atts,uint32_t count)328 vn_cmd_transfer_present_src_images(
329    struct vn_command_buffer *cmd,
330    const struct vn_image *const *images,
331    const struct vn_present_src_attachment *atts,
332    uint32_t count)
333 {
334    VkImageMemoryBarrier *img_barriers =
335       vn_cmd_get_image_memory_barriers(cmd, count);
336    if (!img_barriers) {
337       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
338       return;
339    }
340 
341    VkPipelineStageFlags src_stage_mask = 0;
342    VkPipelineStageFlags dst_stage_mask = 0;
343    for (uint32_t i = 0; i < count; i++) {
344       src_stage_mask |= atts[i].src_stage_mask;
345       dst_stage_mask |= atts[i].dst_stage_mask;
346 
347       vn_present_src_attachment_to_image_memory_barrier(images[i], &atts[i],
348                                                         &img_barriers[i]);
349       vn_cmd_fix_image_memory_barrier(cmd, &img_barriers[i],
350                                       &img_barriers[i]);
351    }
352 
353    if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
354       return;
355 
356    vn_cmd_encode_memory_barriers(cmd, src_stage_mask, dst_stage_mask, 0, NULL,
357                                  count, img_barriers);
358 }
359 
360 static void
vn_cmd_begin_render_pass(struct vn_command_buffer * cmd,const struct vn_render_pass * pass,const struct vn_framebuffer * fb,const VkRenderPassBeginInfo * begin_info)361 vn_cmd_begin_render_pass(struct vn_command_buffer *cmd,
362                          const struct vn_render_pass *pass,
363                          const struct vn_framebuffer *fb,
364                          const VkRenderPassBeginInfo *begin_info)
365 {
366    cmd->builder.render_pass = pass;
367    cmd->builder.framebuffer = fb;
368 
369    if (!pass->present_src_count ||
370        cmd->level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)
371       return;
372 
373    /* find fb attachments */
374    const VkImageView *views;
375    ASSERTED uint32_t view_count;
376    if (fb->image_view_count) {
377       views = fb->image_views;
378       view_count = fb->image_view_count;
379    } else {
380       const VkRenderPassAttachmentBeginInfo *imageless_info =
381          vk_find_struct_const(begin_info->pNext,
382                               RENDER_PASS_ATTACHMENT_BEGIN_INFO);
383       assert(imageless_info);
384       views = imageless_info->pAttachments;
385       view_count = imageless_info->attachmentCount;
386    }
387 
388    const struct vn_image **images =
389       vk_alloc(&cmd->allocator, sizeof(*images) * pass->present_src_count,
390                VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
391    if (!images) {
392       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
393       return;
394    }
395 
396    for (uint32_t i = 0; i < pass->present_src_count; i++) {
397       const uint32_t index = pass->present_src_attachments[i].index;
398       assert(index < view_count);
399       images[i] = vn_image_view_from_handle(views[index])->image;
400    }
401 
402    if (pass->acquire_count) {
403       vn_cmd_transfer_present_src_images(
404          cmd, images, pass->present_src_attachments, pass->acquire_count);
405    }
406 
407    cmd->builder.present_src_images = images;
408 }
409 
410 static void
vn_cmd_end_render_pass(struct vn_command_buffer * cmd)411 vn_cmd_end_render_pass(struct vn_command_buffer *cmd)
412 {
413    const struct vn_render_pass *pass = cmd->builder.render_pass;
414 
415    cmd->builder.render_pass = NULL;
416    cmd->builder.framebuffer = NULL;
417 
418    if (!pass->present_src_count || !cmd->builder.present_src_images)
419       return;
420 
421    const struct vn_image **images = cmd->builder.present_src_images;
422    cmd->builder.present_src_images = NULL;
423 
424    if (pass->release_count) {
425       vn_cmd_transfer_present_src_images(
426          cmd, images + pass->acquire_count,
427          pass->present_src_attachments + pass->acquire_count,
428          pass->release_count);
429    }
430 
431    vk_free(&cmd->allocator, images);
432 }
433 
434 /* command pool commands */
435 
436 VkResult
vn_CreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)437 vn_CreateCommandPool(VkDevice device,
438                      const VkCommandPoolCreateInfo *pCreateInfo,
439                      const VkAllocationCallbacks *pAllocator,
440                      VkCommandPool *pCommandPool)
441 {
442    VN_TRACE_FUNC();
443    struct vn_device *dev = vn_device_from_handle(device);
444    const VkAllocationCallbacks *alloc =
445       pAllocator ? pAllocator : &dev->base.base.alloc;
446 
447    struct vn_command_pool *pool =
448       vk_zalloc(alloc, sizeof(*pool), VN_DEFAULT_ALIGN,
449                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
450    if (!pool)
451       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
452 
453    vn_object_base_init(&pool->base, VK_OBJECT_TYPE_COMMAND_POOL, &dev->base);
454 
455    pool->allocator = *alloc;
456    pool->queue_family_index = pCreateInfo->queueFamilyIndex;
457    list_inithead(&pool->command_buffers);
458 
459    VkCommandPool pool_handle = vn_command_pool_to_handle(pool);
460    vn_async_vkCreateCommandPool(dev->instance, device, pCreateInfo, NULL,
461                                 &pool_handle);
462 
463    *pCommandPool = pool_handle;
464 
465    return VK_SUCCESS;
466 }
467 
468 void
vn_DestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)469 vn_DestroyCommandPool(VkDevice device,
470                       VkCommandPool commandPool,
471                       const VkAllocationCallbacks *pAllocator)
472 {
473    VN_TRACE_FUNC();
474    struct vn_device *dev = vn_device_from_handle(device);
475    struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
476    const VkAllocationCallbacks *alloc;
477 
478    if (!pool)
479       return;
480 
481    alloc = pAllocator ? pAllocator : &pool->allocator;
482 
483    /* We must emit vkDestroyCommandPool before freeing the command buffers in
484     * pool->command_buffers.  Otherwise, another thread might reuse their
485     * object ids while they still refer to the command buffers in the
486     * renderer.
487     */
488    vn_async_vkDestroyCommandPool(dev->instance, device, commandPool, NULL);
489 
490    list_for_each_entry_safe(struct vn_command_buffer, cmd,
491                             &pool->command_buffers, head) {
492       vn_cs_encoder_fini(&cmd->cs);
493       vn_object_base_fini(&cmd->base);
494       vk_free(alloc, cmd);
495    }
496 
497    vn_object_base_fini(&pool->base);
498    vk_free(alloc, pool);
499 }
500 
501 VkResult
vn_ResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)502 vn_ResetCommandPool(VkDevice device,
503                     VkCommandPool commandPool,
504                     VkCommandPoolResetFlags flags)
505 {
506    VN_TRACE_FUNC();
507    struct vn_device *dev = vn_device_from_handle(device);
508    struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
509 
510    list_for_each_entry_safe(struct vn_command_buffer, cmd,
511                             &pool->command_buffers, head) {
512       vn_cs_encoder_reset(&cmd->cs);
513       cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
514    }
515 
516    vn_async_vkResetCommandPool(dev->instance, device, commandPool, flags);
517 
518    return VK_SUCCESS;
519 }
520 
521 void
vn_TrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)522 vn_TrimCommandPool(VkDevice device,
523                    VkCommandPool commandPool,
524                    VkCommandPoolTrimFlags flags)
525 {
526    VN_TRACE_FUNC();
527    struct vn_device *dev = vn_device_from_handle(device);
528 
529    vn_async_vkTrimCommandPool(dev->instance, device, commandPool, flags);
530 }
531 
532 /* command buffer commands */
533 
534 VkResult
vn_AllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)535 vn_AllocateCommandBuffers(VkDevice device,
536                           const VkCommandBufferAllocateInfo *pAllocateInfo,
537                           VkCommandBuffer *pCommandBuffers)
538 {
539    VN_TRACE_FUNC();
540    struct vn_device *dev = vn_device_from_handle(device);
541    struct vn_command_pool *pool =
542       vn_command_pool_from_handle(pAllocateInfo->commandPool);
543    const VkAllocationCallbacks *alloc = &pool->allocator;
544 
545    for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
546       struct vn_command_buffer *cmd =
547          vk_zalloc(alloc, sizeof(*cmd), VN_DEFAULT_ALIGN,
548                    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
549       if (!cmd) {
550          for (uint32_t j = 0; j < i; j++) {
551             cmd = vn_command_buffer_from_handle(pCommandBuffers[j]);
552             vn_cs_encoder_fini(&cmd->cs);
553             list_del(&cmd->head);
554             vn_object_base_fini(&cmd->base);
555             vk_free(alloc, cmd);
556          }
557          memset(pCommandBuffers, 0,
558                 sizeof(*pCommandBuffers) * pAllocateInfo->commandBufferCount);
559          return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
560       }
561 
562       vn_object_base_init(&cmd->base, VK_OBJECT_TYPE_COMMAND_BUFFER,
563                           &dev->base);
564       cmd->device = dev;
565       cmd->allocator = pool->allocator;
566       cmd->level = pAllocateInfo->level;
567       cmd->queue_family_index = pool->queue_family_index;
568 
569       list_addtail(&cmd->head, &pool->command_buffers);
570 
571       cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
572       vn_cs_encoder_init(&cmd->cs, dev->instance,
573                          VN_CS_ENCODER_STORAGE_SHMEM_POOL, 16 * 1024);
574 
575       VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
576       pCommandBuffers[i] = cmd_handle;
577    }
578 
579    vn_async_vkAllocateCommandBuffers(dev->instance, device, pAllocateInfo,
580                                      pCommandBuffers);
581 
582    return VK_SUCCESS;
583 }
584 
585 void
vn_FreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)586 vn_FreeCommandBuffers(VkDevice device,
587                       VkCommandPool commandPool,
588                       uint32_t commandBufferCount,
589                       const VkCommandBuffer *pCommandBuffers)
590 {
591    VN_TRACE_FUNC();
592    struct vn_device *dev = vn_device_from_handle(device);
593    struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
594    const VkAllocationCallbacks *alloc = &pool->allocator;
595 
596    vn_async_vkFreeCommandBuffers(dev->instance, device, commandPool,
597                                  commandBufferCount, pCommandBuffers);
598 
599    for (uint32_t i = 0; i < commandBufferCount; i++) {
600       struct vn_command_buffer *cmd =
601          vn_command_buffer_from_handle(pCommandBuffers[i]);
602 
603       if (!cmd)
604          continue;
605 
606       if (cmd->builder.image_barriers)
607          vk_free(alloc, cmd->builder.image_barriers);
608 
609       vn_cs_encoder_fini(&cmd->cs);
610       list_del(&cmd->head);
611 
612       vn_object_base_fini(&cmd->base);
613       vk_free(alloc, cmd);
614    }
615 }
616 
617 VkResult
vn_ResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)618 vn_ResetCommandBuffer(VkCommandBuffer commandBuffer,
619                       VkCommandBufferResetFlags flags)
620 {
621    VN_TRACE_FUNC();
622    struct vn_command_buffer *cmd =
623       vn_command_buffer_from_handle(commandBuffer);
624 
625    vn_cs_encoder_reset(&cmd->cs);
626    cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
627    cmd->draw_cmd_batched = 0;
628 
629    vn_async_vkResetCommandBuffer(cmd->device->instance, commandBuffer, flags);
630 
631    return VK_SUCCESS;
632 }
633 
634 struct vn_command_buffer_begin_info {
635    VkCommandBufferBeginInfo begin;
636    VkCommandBufferInheritanceInfo inheritance;
637    VkCommandBufferInheritanceConditionalRenderingInfoEXT conditional_rendering;
638 
639    bool has_inherited_pass;
640 };
641 
642 static const VkCommandBufferBeginInfo *
vn_fix_command_buffer_begin_info(struct vn_command_buffer * cmd,const VkCommandBufferBeginInfo * begin_info,struct vn_command_buffer_begin_info * local)643 vn_fix_command_buffer_begin_info(struct vn_command_buffer *cmd,
644                                  const VkCommandBufferBeginInfo *begin_info,
645                                  struct vn_command_buffer_begin_info *local)
646 {
647    local->has_inherited_pass = false;
648 
649    if (!begin_info->pInheritanceInfo)
650       return begin_info;
651 
652    const bool is_cmd_secondary =
653       cmd->level == VK_COMMAND_BUFFER_LEVEL_SECONDARY;
654    const bool has_continue =
655       begin_info->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
656    const bool has_renderpass =
657       is_cmd_secondary &&
658       begin_info->pInheritanceInfo->renderPass != VK_NULL_HANDLE;
659 
660    /* Can early-return if dynamic rendering is used and no structures need to
661     * be dropped from the pNext chain of VkCommandBufferInheritanceInfo.
662     */
663    if (is_cmd_secondary && has_continue && !has_renderpass)
664       return begin_info;
665 
666    local->begin = *begin_info;
667 
668    if (!is_cmd_secondary) {
669       local->begin.pInheritanceInfo = NULL;
670       return &local->begin;
671    }
672 
673    local->inheritance = *begin_info->pInheritanceInfo;
674    local->begin.pInheritanceInfo = &local->inheritance;
675 
676    if (!has_continue) {
677       local->inheritance.framebuffer = VK_NULL_HANDLE;
678       local->inheritance.renderPass = VK_NULL_HANDLE;
679       local->inheritance.subpass = 0;
680    } else {
681       /* With early-returns above, it must be an inherited pass. */
682       local->has_inherited_pass = true;
683    }
684 
685    /* Per spec, about VkCommandBufferInheritanceRenderingInfo:
686     *
687     * If VkCommandBufferInheritanceInfo::renderPass is not VK_NULL_HANDLE, or
688     * VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT is not specified in
689     * VkCommandBufferBeginInfo::flags, parameters of this structure are
690     * ignored.
691     */
692    VkBaseOutStructure *head = NULL;
693    VkBaseOutStructure *tail = NULL;
694    vk_foreach_struct_const(src, local->inheritance.pNext) {
695       void *pnext = NULL;
696       switch (src->sType) {
697       case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
698          memcpy(
699             &local->conditional_rendering, src,
700             sizeof(VkCommandBufferInheritanceConditionalRenderingInfoEXT));
701          pnext = &local->conditional_rendering;
702          break;
703       case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO:
704       default:
705          break;
706       }
707 
708       if (pnext) {
709          if (!head)
710             head = pnext;
711          else
712             tail->pNext = pnext;
713 
714          tail = pnext;
715       }
716    }
717    local->inheritance.pNext = head;
718 
719    return &local->begin;
720 }
721 
722 VkResult
vn_BeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)723 vn_BeginCommandBuffer(VkCommandBuffer commandBuffer,
724                       const VkCommandBufferBeginInfo *pBeginInfo)
725 {
726    VN_TRACE_FUNC();
727    struct vn_command_buffer *cmd =
728       vn_command_buffer_from_handle(commandBuffer);
729    struct vn_instance *instance = cmd->device->instance;
730    size_t cmd_size;
731 
732    vn_cs_encoder_reset(&cmd->cs);
733    cmd->draw_cmd_batched = 0;
734 
735    struct vn_command_buffer_begin_info local_begin_info;
736    pBeginInfo =
737       vn_fix_command_buffer_begin_info(cmd, pBeginInfo, &local_begin_info);
738 
739    cmd_size = vn_sizeof_vkBeginCommandBuffer(commandBuffer, pBeginInfo);
740    if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
741       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
742       return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
743    }
744 
745    vn_encode_vkBeginCommandBuffer(&cmd->cs, 0, commandBuffer, pBeginInfo);
746 
747    cmd->state = VN_COMMAND_BUFFER_STATE_RECORDING;
748 
749    if (local_begin_info.has_inherited_pass) {
750       const VkCommandBufferInheritanceInfo *inheritance_info =
751          pBeginInfo->pInheritanceInfo;
752       vn_cmd_begin_render_pass(
753          cmd, vn_render_pass_from_handle(inheritance_info->renderPass),
754          vn_framebuffer_from_handle(inheritance_info->framebuffer), NULL);
755    }
756 
757    return VK_SUCCESS;
758 }
759 
760 static void
vn_cmd_submit(struct vn_command_buffer * cmd)761 vn_cmd_submit(struct vn_command_buffer *cmd)
762 {
763    struct vn_instance *instance = cmd->device->instance;
764 
765    if (cmd->state != VN_COMMAND_BUFFER_STATE_RECORDING)
766       return;
767 
768    vn_cs_encoder_commit(&cmd->cs);
769    if (vn_cs_encoder_get_fatal(&cmd->cs)) {
770       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
771       vn_cs_encoder_reset(&cmd->cs);
772       return;
773    }
774 
775    if (unlikely(!instance->renderer->info.supports_blob_id_0))
776       vn_instance_wait_roundtrip(instance, cmd->cs.current_buffer_roundtrip);
777 
778    if (vn_instance_ring_submit(instance, &cmd->cs) != VK_SUCCESS) {
779       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
780       return;
781    }
782 
783    vn_cs_encoder_reset(&cmd->cs);
784    cmd->draw_cmd_batched = 0;
785 }
786 
787 static inline void
vn_cmd_count_draw_and_submit_on_batch_limit(struct vn_command_buffer * cmd)788 vn_cmd_count_draw_and_submit_on_batch_limit(struct vn_command_buffer *cmd)
789 {
790    if (++cmd->draw_cmd_batched >= vn_env.draw_cmd_batch_limit)
791       vn_cmd_submit(cmd);
792 }
793 
794 VkResult
vn_EndCommandBuffer(VkCommandBuffer commandBuffer)795 vn_EndCommandBuffer(VkCommandBuffer commandBuffer)
796 {
797    VN_TRACE_FUNC();
798    struct vn_command_buffer *cmd =
799       vn_command_buffer_from_handle(commandBuffer);
800    struct vn_instance *instance = cmd->device->instance;
801    size_t cmd_size;
802 
803    if (cmd->state != VN_COMMAND_BUFFER_STATE_RECORDING)
804       return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
805 
806    cmd_size = vn_sizeof_vkEndCommandBuffer(commandBuffer);
807    if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
808       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
809       return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
810    }
811 
812    vn_encode_vkEndCommandBuffer(&cmd->cs, 0, commandBuffer);
813 
814    vn_cmd_submit(cmd);
815    if (cmd->state == VN_COMMAND_BUFFER_STATE_INVALID)
816       return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
817 
818    cmd->state = VN_COMMAND_BUFFER_STATE_EXECUTABLE;
819 
820    return VK_SUCCESS;
821 }
822 
823 void
vn_CmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)824 vn_CmdBindPipeline(VkCommandBuffer commandBuffer,
825                    VkPipelineBindPoint pipelineBindPoint,
826                    VkPipeline pipeline)
827 {
828    VN_CMD_ENQUEUE(vkCmdBindPipeline, commandBuffer, pipelineBindPoint,
829                   pipeline);
830 }
831 
832 void
vn_CmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)833 vn_CmdSetViewport(VkCommandBuffer commandBuffer,
834                   uint32_t firstViewport,
835                   uint32_t viewportCount,
836                   const VkViewport *pViewports)
837 {
838    VN_CMD_ENQUEUE(vkCmdSetViewport, commandBuffer, firstViewport,
839                   viewportCount, pViewports);
840 }
841 
842 void
vn_CmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)843 vn_CmdSetScissor(VkCommandBuffer commandBuffer,
844                  uint32_t firstScissor,
845                  uint32_t scissorCount,
846                  const VkRect2D *pScissors)
847 {
848    VN_CMD_ENQUEUE(vkCmdSetScissor, commandBuffer, firstScissor, scissorCount,
849                   pScissors);
850 }
851 
852 void
vn_CmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)853 vn_CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
854 {
855    VN_CMD_ENQUEUE(vkCmdSetLineWidth, commandBuffer, lineWidth);
856 }
857 
858 void
vn_CmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)859 vn_CmdSetDepthBias(VkCommandBuffer commandBuffer,
860                    float depthBiasConstantFactor,
861                    float depthBiasClamp,
862                    float depthBiasSlopeFactor)
863 {
864    VN_CMD_ENQUEUE(vkCmdSetDepthBias, commandBuffer, depthBiasConstantFactor,
865                   depthBiasClamp, depthBiasSlopeFactor);
866 }
867 
868 void
vn_CmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])869 vn_CmdSetBlendConstants(VkCommandBuffer commandBuffer,
870                         const float blendConstants[4])
871 {
872    VN_CMD_ENQUEUE(vkCmdSetBlendConstants, commandBuffer, blendConstants);
873 }
874 
875 void
vn_CmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)876 vn_CmdSetDepthBounds(VkCommandBuffer commandBuffer,
877                      float minDepthBounds,
878                      float maxDepthBounds)
879 {
880    VN_CMD_ENQUEUE(vkCmdSetDepthBounds, commandBuffer, minDepthBounds,
881                   maxDepthBounds);
882 }
883 
884 void
vn_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)885 vn_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
886                             VkStencilFaceFlags faceMask,
887                             uint32_t compareMask)
888 {
889    VN_CMD_ENQUEUE(vkCmdSetStencilCompareMask, commandBuffer, faceMask,
890                   compareMask);
891 }
892 
893 void
vn_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)894 vn_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
895                           VkStencilFaceFlags faceMask,
896                           uint32_t writeMask)
897 {
898    VN_CMD_ENQUEUE(vkCmdSetStencilWriteMask, commandBuffer, faceMask,
899                   writeMask);
900 }
901 
902 void
vn_CmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)903 vn_CmdSetStencilReference(VkCommandBuffer commandBuffer,
904                           VkStencilFaceFlags faceMask,
905                           uint32_t reference)
906 {
907    VN_CMD_ENQUEUE(vkCmdSetStencilReference, commandBuffer, faceMask,
908                   reference);
909 }
910 
911 void
vn_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)912 vn_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
913                          VkPipelineBindPoint pipelineBindPoint,
914                          VkPipelineLayout layout,
915                          uint32_t firstSet,
916                          uint32_t descriptorSetCount,
917                          const VkDescriptorSet *pDescriptorSets,
918                          uint32_t dynamicOffsetCount,
919                          const uint32_t *pDynamicOffsets)
920 {
921    VN_CMD_ENQUEUE(vkCmdBindDescriptorSets, commandBuffer, pipelineBindPoint,
922                   layout, firstSet, descriptorSetCount, pDescriptorSets,
923                   dynamicOffsetCount, pDynamicOffsets);
924 }
925 
926 void
vn_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)927 vn_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,
928                       VkBuffer buffer,
929                       VkDeviceSize offset,
930                       VkIndexType indexType)
931 {
932    VN_CMD_ENQUEUE(vkCmdBindIndexBuffer, commandBuffer, buffer, offset,
933                   indexType);
934 }
935 
936 void
vn_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)937 vn_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,
938                         uint32_t firstBinding,
939                         uint32_t bindingCount,
940                         const VkBuffer *pBuffers,
941                         const VkDeviceSize *pOffsets)
942 {
943    VN_CMD_ENQUEUE(vkCmdBindVertexBuffers, commandBuffer, firstBinding,
944                   bindingCount, pBuffers, pOffsets);
945 }
946 
947 void
vn_CmdDraw(VkCommandBuffer commandBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)948 vn_CmdDraw(VkCommandBuffer commandBuffer,
949            uint32_t vertexCount,
950            uint32_t instanceCount,
951            uint32_t firstVertex,
952            uint32_t firstInstance)
953 {
954    VN_CMD_ENQUEUE(vkCmdDraw, commandBuffer, vertexCount, instanceCount,
955                   firstVertex, firstInstance);
956 
957    vn_cmd_count_draw_and_submit_on_batch_limit(
958       vn_command_buffer_from_handle(commandBuffer));
959 }
960 
961 void
vn_CmdBeginRendering(VkCommandBuffer commandBuffer,const VkRenderingInfo * pRenderingInfo)962 vn_CmdBeginRendering(VkCommandBuffer commandBuffer,
963                      const VkRenderingInfo *pRenderingInfo)
964 {
965    VN_CMD_ENQUEUE(vkCmdBeginRendering, commandBuffer, pRenderingInfo);
966 }
967 
968 void
vn_CmdEndRendering(VkCommandBuffer commandBuffer)969 vn_CmdEndRendering(VkCommandBuffer commandBuffer)
970 {
971    VN_CMD_ENQUEUE(vkCmdEndRendering, commandBuffer);
972 }
973 
974 void
vn_CmdDrawIndexed(VkCommandBuffer commandBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)975 vn_CmdDrawIndexed(VkCommandBuffer commandBuffer,
976                   uint32_t indexCount,
977                   uint32_t instanceCount,
978                   uint32_t firstIndex,
979                   int32_t vertexOffset,
980                   uint32_t firstInstance)
981 {
982    VN_CMD_ENQUEUE(vkCmdDrawIndexed, commandBuffer, indexCount, instanceCount,
983                   firstIndex, vertexOffset, firstInstance);
984 
985    vn_cmd_count_draw_and_submit_on_batch_limit(
986       vn_command_buffer_from_handle(commandBuffer));
987 }
988 
989 void
vn_CmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)990 vn_CmdDrawIndirect(VkCommandBuffer commandBuffer,
991                    VkBuffer buffer,
992                    VkDeviceSize offset,
993                    uint32_t drawCount,
994                    uint32_t stride)
995 {
996    VN_CMD_ENQUEUE(vkCmdDrawIndirect, commandBuffer, buffer, offset, drawCount,
997                   stride);
998 
999    vn_cmd_count_draw_and_submit_on_batch_limit(
1000       vn_command_buffer_from_handle(commandBuffer));
1001 }
1002 
1003 void
vn_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)1004 vn_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,
1005                           VkBuffer buffer,
1006                           VkDeviceSize offset,
1007                           uint32_t drawCount,
1008                           uint32_t stride)
1009 {
1010    VN_CMD_ENQUEUE(vkCmdDrawIndexedIndirect, commandBuffer, buffer, offset,
1011                   drawCount, stride);
1012 
1013    vn_cmd_count_draw_and_submit_on_batch_limit(
1014       vn_command_buffer_from_handle(commandBuffer));
1015 }
1016 
1017 void
vn_CmdDrawIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)1018 vn_CmdDrawIndirectCount(VkCommandBuffer commandBuffer,
1019                         VkBuffer buffer,
1020                         VkDeviceSize offset,
1021                         VkBuffer countBuffer,
1022                         VkDeviceSize countBufferOffset,
1023                         uint32_t maxDrawCount,
1024                         uint32_t stride)
1025 {
1026    VN_CMD_ENQUEUE(vkCmdDrawIndirectCount, commandBuffer, buffer, offset,
1027                   countBuffer, countBufferOffset, maxDrawCount, stride);
1028 
1029    vn_cmd_count_draw_and_submit_on_batch_limit(
1030       vn_command_buffer_from_handle(commandBuffer));
1031 }
1032 
1033 void
vn_CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)1034 vn_CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,
1035                                VkBuffer buffer,
1036                                VkDeviceSize offset,
1037                                VkBuffer countBuffer,
1038                                VkDeviceSize countBufferOffset,
1039                                uint32_t maxDrawCount,
1040                                uint32_t stride)
1041 {
1042    VN_CMD_ENQUEUE(vkCmdDrawIndexedIndirectCount, commandBuffer, buffer,
1043                   offset, countBuffer, countBufferOffset, maxDrawCount,
1044                   stride);
1045 
1046    vn_cmd_count_draw_and_submit_on_batch_limit(
1047       vn_command_buffer_from_handle(commandBuffer));
1048 }
1049 
1050 void
vn_CmdDispatch(VkCommandBuffer commandBuffer,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)1051 vn_CmdDispatch(VkCommandBuffer commandBuffer,
1052                uint32_t groupCountX,
1053                uint32_t groupCountY,
1054                uint32_t groupCountZ)
1055 {
1056    VN_CMD_ENQUEUE(vkCmdDispatch, commandBuffer, groupCountX, groupCountY,
1057                   groupCountZ);
1058 }
1059 
1060 void
vn_CmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset)1061 vn_CmdDispatchIndirect(VkCommandBuffer commandBuffer,
1062                        VkBuffer buffer,
1063                        VkDeviceSize offset)
1064 {
1065    VN_CMD_ENQUEUE(vkCmdDispatchIndirect, commandBuffer, buffer, offset);
1066 }
1067 
1068 void
vn_CmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)1069 vn_CmdCopyBuffer(VkCommandBuffer commandBuffer,
1070                  VkBuffer srcBuffer,
1071                  VkBuffer dstBuffer,
1072                  uint32_t regionCount,
1073                  const VkBufferCopy *pRegions)
1074 {
1075    VN_CMD_ENQUEUE(vkCmdCopyBuffer, commandBuffer, srcBuffer, dstBuffer,
1076                   regionCount, pRegions);
1077 }
1078 
1079 void
vn_CmdCopyBuffer2(VkCommandBuffer commandBuffer,const VkCopyBufferInfo2 * pCopyBufferInfo)1080 vn_CmdCopyBuffer2(VkCommandBuffer commandBuffer,
1081                   const VkCopyBufferInfo2 *pCopyBufferInfo)
1082 {
1083    VN_CMD_ENQUEUE(vkCmdCopyBuffer2, commandBuffer, pCopyBufferInfo);
1084 }
1085 
1086 void
vn_CmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)1087 vn_CmdCopyImage(VkCommandBuffer commandBuffer,
1088                 VkImage srcImage,
1089                 VkImageLayout srcImageLayout,
1090                 VkImage dstImage,
1091                 VkImageLayout dstImageLayout,
1092                 uint32_t regionCount,
1093                 const VkImageCopy *pRegions)
1094 {
1095    VN_CMD_ENQUEUE(vkCmdCopyImage, commandBuffer, srcImage, srcImageLayout,
1096                   dstImage, dstImageLayout, regionCount, pRegions);
1097 }
1098 
1099 void
vn_CmdCopyImage2(VkCommandBuffer commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)1100 vn_CmdCopyImage2(VkCommandBuffer commandBuffer,
1101                  const VkCopyImageInfo2 *pCopyImageInfo)
1102 {
1103    VN_CMD_ENQUEUE(vkCmdCopyImage2, commandBuffer, pCopyImageInfo);
1104 }
1105 
1106 void
vn_CmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)1107 vn_CmdBlitImage(VkCommandBuffer commandBuffer,
1108                 VkImage srcImage,
1109                 VkImageLayout srcImageLayout,
1110                 VkImage dstImage,
1111                 VkImageLayout dstImageLayout,
1112                 uint32_t regionCount,
1113                 const VkImageBlit *pRegions,
1114                 VkFilter filter)
1115 {
1116    VN_CMD_ENQUEUE(vkCmdBlitImage, commandBuffer, srcImage, srcImageLayout,
1117                   dstImage, dstImageLayout, regionCount, pRegions, filter);
1118 }
1119 
1120 void
vn_CmdBlitImage2(VkCommandBuffer commandBuffer,const VkBlitImageInfo2 * pBlitImageInfo)1121 vn_CmdBlitImage2(VkCommandBuffer commandBuffer,
1122                  const VkBlitImageInfo2 *pBlitImageInfo)
1123 {
1124    VN_CMD_ENQUEUE(vkCmdBlitImage2, commandBuffer, pBlitImageInfo);
1125 }
1126 
1127 void
vn_CmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)1128 vn_CmdCopyBufferToImage(VkCommandBuffer commandBuffer,
1129                         VkBuffer srcBuffer,
1130                         VkImage dstImage,
1131                         VkImageLayout dstImageLayout,
1132                         uint32_t regionCount,
1133                         const VkBufferImageCopy *pRegions)
1134 {
1135    VN_CMD_ENQUEUE(vkCmdCopyBufferToImage, commandBuffer, srcBuffer, dstImage,
1136                   dstImageLayout, regionCount, pRegions);
1137 }
1138 
1139 void
vn_CmdCopyBufferToImage2(VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo)1140 vn_CmdCopyBufferToImage2(
1141    VkCommandBuffer commandBuffer,
1142    const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo)
1143 {
1144    VN_CMD_ENQUEUE(vkCmdCopyBufferToImage2, commandBuffer,
1145                   pCopyBufferToImageInfo);
1146 }
1147 
1148 static bool
vn_needs_prime_blit(VkImage src_image,VkImageLayout src_image_layout)1149 vn_needs_prime_blit(VkImage src_image, VkImageLayout src_image_layout)
1150 {
1151    if (src_image_layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
1152        VN_PRESENT_SRC_INTERNAL_LAYOUT != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
1153 
1154       /* sanity check */
1155       ASSERTED const struct vn_image *img = vn_image_from_handle(src_image);
1156       assert(img->wsi.is_wsi && img->wsi.is_prime_blit_src);
1157       return true;
1158    }
1159 
1160    return false;
1161 }
1162 
1163 static void
vn_transition_prime_layout(struct vn_command_buffer * cmd,VkBuffer dst_buffer)1164 vn_transition_prime_layout(struct vn_command_buffer *cmd, VkBuffer dst_buffer)
1165 {
1166    const VkBufferMemoryBarrier buf_barrier = {
1167       .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
1168       .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
1169       .srcQueueFamilyIndex = cmd->queue_family_index,
1170       .dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT,
1171       .buffer = dst_buffer,
1172       .size = VK_WHOLE_SIZE,
1173    };
1174    vn_cmd_encode_memory_barriers(cmd, VK_PIPELINE_STAGE_TRANSFER_BIT,
1175                                  VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 1,
1176                                  &buf_barrier, 0, NULL);
1177 }
1178 
1179 void
vn_CmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)1180 vn_CmdCopyImageToBuffer(VkCommandBuffer commandBuffer,
1181                         VkImage srcImage,
1182                         VkImageLayout srcImageLayout,
1183                         VkBuffer dstBuffer,
1184                         uint32_t regionCount,
1185                         const VkBufferImageCopy *pRegions)
1186 {
1187    struct vn_command_buffer *cmd =
1188       vn_command_buffer_from_handle(commandBuffer);
1189 
1190    bool prime_blit = vn_needs_prime_blit(srcImage, srcImageLayout);
1191    if (prime_blit)
1192       srcImageLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
1193 
1194    VN_CMD_ENQUEUE(vkCmdCopyImageToBuffer, commandBuffer, srcImage,
1195                   srcImageLayout, dstBuffer, regionCount, pRegions);
1196 
1197    if (prime_blit)
1198       vn_transition_prime_layout(cmd, dstBuffer);
1199 }
1200 
1201 void
vn_CmdCopyImageToBuffer2(VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)1202 vn_CmdCopyImageToBuffer2(
1203    VkCommandBuffer commandBuffer,
1204    const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo)
1205 {
1206    struct vn_command_buffer *cmd =
1207       vn_command_buffer_from_handle(commandBuffer);
1208    struct VkCopyImageToBufferInfo2 copy_info = *pCopyImageToBufferInfo;
1209 
1210    bool prime_blit =
1211       vn_needs_prime_blit(copy_info.srcImage, copy_info.srcImageLayout);
1212    if (prime_blit)
1213       copy_info.srcImageLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
1214 
1215    VN_CMD_ENQUEUE(vkCmdCopyImageToBuffer2, commandBuffer, &copy_info);
1216 
1217    if (prime_blit)
1218       vn_transition_prime_layout(cmd, copy_info.dstBuffer);
1219 }
1220 
1221 void
vn_CmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)1222 vn_CmdUpdateBuffer(VkCommandBuffer commandBuffer,
1223                    VkBuffer dstBuffer,
1224                    VkDeviceSize dstOffset,
1225                    VkDeviceSize dataSize,
1226                    const void *pData)
1227 {
1228    VN_CMD_ENQUEUE(vkCmdUpdateBuffer, commandBuffer, dstBuffer, dstOffset,
1229                   dataSize, pData);
1230 }
1231 
1232 void
vn_CmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)1233 vn_CmdFillBuffer(VkCommandBuffer commandBuffer,
1234                  VkBuffer dstBuffer,
1235                  VkDeviceSize dstOffset,
1236                  VkDeviceSize size,
1237                  uint32_t data)
1238 {
1239    VN_CMD_ENQUEUE(vkCmdFillBuffer, commandBuffer, dstBuffer, dstOffset, size,
1240                   data);
1241 }
1242 
1243 void
vn_CmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1244 vn_CmdClearColorImage(VkCommandBuffer commandBuffer,
1245                       VkImage image,
1246                       VkImageLayout imageLayout,
1247                       const VkClearColorValue *pColor,
1248                       uint32_t rangeCount,
1249                       const VkImageSubresourceRange *pRanges)
1250 {
1251    VN_CMD_ENQUEUE(vkCmdClearColorImage, commandBuffer, image, imageLayout,
1252                   pColor, rangeCount, pRanges);
1253 }
1254 
1255 void
vn_CmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1256 vn_CmdClearDepthStencilImage(VkCommandBuffer commandBuffer,
1257                              VkImage image,
1258                              VkImageLayout imageLayout,
1259                              const VkClearDepthStencilValue *pDepthStencil,
1260                              uint32_t rangeCount,
1261                              const VkImageSubresourceRange *pRanges)
1262 {
1263    VN_CMD_ENQUEUE(vkCmdClearDepthStencilImage, commandBuffer, image,
1264                   imageLayout, pDepthStencil, rangeCount, pRanges);
1265 }
1266 
1267 void
vn_CmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)1268 vn_CmdClearAttachments(VkCommandBuffer commandBuffer,
1269                        uint32_t attachmentCount,
1270                        const VkClearAttachment *pAttachments,
1271                        uint32_t rectCount,
1272                        const VkClearRect *pRects)
1273 {
1274    VN_CMD_ENQUEUE(vkCmdClearAttachments, commandBuffer, attachmentCount,
1275                   pAttachments, rectCount, pRects);
1276 }
1277 
1278 void
vn_CmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)1279 vn_CmdResolveImage(VkCommandBuffer commandBuffer,
1280                    VkImage srcImage,
1281                    VkImageLayout srcImageLayout,
1282                    VkImage dstImage,
1283                    VkImageLayout dstImageLayout,
1284                    uint32_t regionCount,
1285                    const VkImageResolve *pRegions)
1286 {
1287    VN_CMD_ENQUEUE(vkCmdResolveImage, commandBuffer, srcImage, srcImageLayout,
1288                   dstImage, dstImageLayout, regionCount, pRegions);
1289 }
1290 
1291 void
vn_CmdResolveImage2(VkCommandBuffer commandBuffer,const VkResolveImageInfo2 * pResolveImageInfo)1292 vn_CmdResolveImage2(VkCommandBuffer commandBuffer,
1293                     const VkResolveImageInfo2 *pResolveImageInfo)
1294 {
1295    VN_CMD_ENQUEUE(vkCmdResolveImage2, commandBuffer, pResolveImageInfo);
1296 }
1297 
1298 void
vn_CmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)1299 vn_CmdSetEvent(VkCommandBuffer commandBuffer,
1300                VkEvent event,
1301                VkPipelineStageFlags stageMask)
1302 {
1303    VN_CMD_ENQUEUE(vkCmdSetEvent, commandBuffer, event, stageMask);
1304 
1305    vn_feedback_event_cmd_record(commandBuffer, event, stageMask,
1306                                 VK_EVENT_SET);
1307 }
1308 
1309 void
vn_CmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)1310 vn_CmdResetEvent(VkCommandBuffer commandBuffer,
1311                  VkEvent event,
1312                  VkPipelineStageFlags stageMask)
1313 {
1314    VN_CMD_ENQUEUE(vkCmdResetEvent, commandBuffer, event, stageMask);
1315 
1316    vn_feedback_event_cmd_record(commandBuffer, event, stageMask,
1317                                 VK_EVENT_RESET);
1318 }
1319 
1320 void
vn_CmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1321 vn_CmdWaitEvents(VkCommandBuffer commandBuffer,
1322                  uint32_t eventCount,
1323                  const VkEvent *pEvents,
1324                  VkPipelineStageFlags srcStageMask,
1325                  VkPipelineStageFlags dstStageMask,
1326                  uint32_t memoryBarrierCount,
1327                  const VkMemoryBarrier *pMemoryBarriers,
1328                  uint32_t bufferMemoryBarrierCount,
1329                  const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1330                  uint32_t imageMemoryBarrierCount,
1331                  const VkImageMemoryBarrier *pImageMemoryBarriers)
1332 {
1333    struct vn_command_buffer *cmd =
1334       vn_command_buffer_from_handle(commandBuffer);
1335    uint32_t transfer_count;
1336 
1337    pImageMemoryBarriers = vn_cmd_wait_events_fix_image_memory_barriers(
1338       cmd, pImageMemoryBarriers, imageMemoryBarrierCount, &transfer_count);
1339    imageMemoryBarrierCount -= transfer_count;
1340 
1341    VN_CMD_ENQUEUE(vkCmdWaitEvents, commandBuffer, eventCount, pEvents,
1342                   srcStageMask, dstStageMask, memoryBarrierCount,
1343                   pMemoryBarriers, bufferMemoryBarrierCount,
1344                   pBufferMemoryBarriers, imageMemoryBarrierCount,
1345                   pImageMemoryBarriers);
1346 
1347    if (transfer_count) {
1348       pImageMemoryBarriers += imageMemoryBarrierCount;
1349       vn_cmd_encode_memory_barriers(cmd, srcStageMask, dstStageMask, 0, NULL,
1350                                     transfer_count, pImageMemoryBarriers);
1351    }
1352 }
1353 
1354 void
vn_CmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1355 vn_CmdPipelineBarrier(VkCommandBuffer commandBuffer,
1356                       VkPipelineStageFlags srcStageMask,
1357                       VkPipelineStageFlags dstStageMask,
1358                       VkDependencyFlags dependencyFlags,
1359                       uint32_t memoryBarrierCount,
1360                       const VkMemoryBarrier *pMemoryBarriers,
1361                       uint32_t bufferMemoryBarrierCount,
1362                       const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1363                       uint32_t imageMemoryBarrierCount,
1364                       const VkImageMemoryBarrier *pImageMemoryBarriers)
1365 {
1366    struct vn_command_buffer *cmd =
1367       vn_command_buffer_from_handle(commandBuffer);
1368 
1369    pImageMemoryBarriers = vn_cmd_pipeline_barrier_fix_image_memory_barriers(
1370       cmd, pImageMemoryBarriers, imageMemoryBarrierCount);
1371 
1372    VN_CMD_ENQUEUE(vkCmdPipelineBarrier, commandBuffer, srcStageMask,
1373                   dstStageMask, dependencyFlags, memoryBarrierCount,
1374                   pMemoryBarriers, bufferMemoryBarrierCount,
1375                   pBufferMemoryBarriers, imageMemoryBarrierCount,
1376                   pImageMemoryBarriers);
1377 }
1378 
1379 void
vn_CmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags)1380 vn_CmdBeginQuery(VkCommandBuffer commandBuffer,
1381                  VkQueryPool queryPool,
1382                  uint32_t query,
1383                  VkQueryControlFlags flags)
1384 {
1385    VN_CMD_ENQUEUE(vkCmdBeginQuery, commandBuffer, queryPool, query, flags);
1386 }
1387 
1388 void
vn_CmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query)1389 vn_CmdEndQuery(VkCommandBuffer commandBuffer,
1390                VkQueryPool queryPool,
1391                uint32_t query)
1392 {
1393    VN_CMD_ENQUEUE(vkCmdEndQuery, commandBuffer, queryPool, query);
1394 }
1395 
1396 void
vn_CmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)1397 vn_CmdResetQueryPool(VkCommandBuffer commandBuffer,
1398                      VkQueryPool queryPool,
1399                      uint32_t firstQuery,
1400                      uint32_t queryCount)
1401 {
1402    VN_CMD_ENQUEUE(vkCmdResetQueryPool, commandBuffer, queryPool, firstQuery,
1403                   queryCount);
1404 }
1405 
1406 void
vn_CmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query)1407 vn_CmdWriteTimestamp(VkCommandBuffer commandBuffer,
1408                      VkPipelineStageFlagBits pipelineStage,
1409                      VkQueryPool queryPool,
1410                      uint32_t query)
1411 {
1412    VN_CMD_ENQUEUE(vkCmdWriteTimestamp, commandBuffer, pipelineStage,
1413                   queryPool, query);
1414 }
1415 
1416 void
vn_CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)1417 vn_CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
1418                            VkQueryPool queryPool,
1419                            uint32_t firstQuery,
1420                            uint32_t queryCount,
1421                            VkBuffer dstBuffer,
1422                            VkDeviceSize dstOffset,
1423                            VkDeviceSize stride,
1424                            VkQueryResultFlags flags)
1425 {
1426    VN_CMD_ENQUEUE(vkCmdCopyQueryPoolResults, commandBuffer, queryPool,
1427                   firstQuery, queryCount, dstBuffer, dstOffset, stride,
1428                   flags);
1429 }
1430 
1431 void
vn_CmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)1432 vn_CmdPushConstants(VkCommandBuffer commandBuffer,
1433                     VkPipelineLayout layout,
1434                     VkShaderStageFlags stageFlags,
1435                     uint32_t offset,
1436                     uint32_t size,
1437                     const void *pValues)
1438 {
1439    VN_CMD_ENQUEUE(vkCmdPushConstants, commandBuffer, layout, stageFlags,
1440                   offset, size, pValues);
1441 }
1442 
1443 void
vn_CmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)1444 vn_CmdBeginRenderPass(VkCommandBuffer commandBuffer,
1445                       const VkRenderPassBeginInfo *pRenderPassBegin,
1446                       VkSubpassContents contents)
1447 {
1448    struct vn_command_buffer *cmd =
1449       vn_command_buffer_from_handle(commandBuffer);
1450 
1451    vn_cmd_begin_render_pass(
1452       cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1453       vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1454       pRenderPassBegin);
1455 
1456    VN_CMD_ENQUEUE(vkCmdBeginRenderPass, commandBuffer, pRenderPassBegin,
1457                   contents);
1458 }
1459 
1460 void
vn_CmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)1461 vn_CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
1462 {
1463    VN_CMD_ENQUEUE(vkCmdNextSubpass, commandBuffer, contents);
1464 }
1465 
1466 void
vn_CmdEndRenderPass(VkCommandBuffer commandBuffer)1467 vn_CmdEndRenderPass(VkCommandBuffer commandBuffer)
1468 {
1469    struct vn_command_buffer *cmd =
1470       vn_command_buffer_from_handle(commandBuffer);
1471 
1472    VN_CMD_ENQUEUE(vkCmdEndRenderPass, commandBuffer);
1473 
1474    vn_cmd_end_render_pass(cmd);
1475 }
1476 
1477 void
vn_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)1478 vn_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,
1479                        const VkRenderPassBeginInfo *pRenderPassBegin,
1480                        const VkSubpassBeginInfo *pSubpassBeginInfo)
1481 {
1482    struct vn_command_buffer *cmd =
1483       vn_command_buffer_from_handle(commandBuffer);
1484 
1485    vn_cmd_begin_render_pass(
1486       cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1487       vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1488       pRenderPassBegin);
1489 
1490    VN_CMD_ENQUEUE(vkCmdBeginRenderPass2, commandBuffer, pRenderPassBegin,
1491                   pSubpassBeginInfo);
1492 }
1493 
1494 void
vn_CmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfo * pSubpassBeginInfo,const VkSubpassEndInfo * pSubpassEndInfo)1495 vn_CmdNextSubpass2(VkCommandBuffer commandBuffer,
1496                    const VkSubpassBeginInfo *pSubpassBeginInfo,
1497                    const VkSubpassEndInfo *pSubpassEndInfo)
1498 {
1499    VN_CMD_ENQUEUE(vkCmdNextSubpass2, commandBuffer, pSubpassBeginInfo,
1500                   pSubpassEndInfo);
1501 }
1502 
1503 void
vn_CmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfo * pSubpassEndInfo)1504 vn_CmdEndRenderPass2(VkCommandBuffer commandBuffer,
1505                      const VkSubpassEndInfo *pSubpassEndInfo)
1506 {
1507    struct vn_command_buffer *cmd =
1508       vn_command_buffer_from_handle(commandBuffer);
1509 
1510    VN_CMD_ENQUEUE(vkCmdEndRenderPass2, commandBuffer, pSubpassEndInfo);
1511 
1512    vn_cmd_end_render_pass(cmd);
1513 }
1514 
1515 void
vn_CmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)1516 vn_CmdExecuteCommands(VkCommandBuffer commandBuffer,
1517                       uint32_t commandBufferCount,
1518                       const VkCommandBuffer *pCommandBuffers)
1519 {
1520    VN_CMD_ENQUEUE(vkCmdExecuteCommands, commandBuffer, commandBufferCount,
1521                   pCommandBuffers);
1522 }
1523 
1524 void
vn_CmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)1525 vn_CmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
1526 {
1527    VN_CMD_ENQUEUE(vkCmdSetDeviceMask, commandBuffer, deviceMask);
1528 }
1529 
1530 void
vn_CmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)1531 vn_CmdDispatchBase(VkCommandBuffer commandBuffer,
1532                    uint32_t baseGroupX,
1533                    uint32_t baseGroupY,
1534                    uint32_t baseGroupZ,
1535                    uint32_t groupCountX,
1536                    uint32_t groupCountY,
1537                    uint32_t groupCountZ)
1538 {
1539    VN_CMD_ENQUEUE(vkCmdDispatchBase, commandBuffer, baseGroupX, baseGroupY,
1540                   baseGroupZ, groupCountX, groupCountY, groupCountZ);
1541 }
1542 
1543 void
vn_CmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)1544 vn_CmdSetLineStippleEXT(VkCommandBuffer commandBuffer,
1545                         uint32_t lineStippleFactor,
1546                         uint16_t lineStipplePattern)
1547 {
1548    VN_CMD_ENQUEUE(vkCmdSetLineStippleEXT, commandBuffer, lineStippleFactor,
1549                   lineStipplePattern);
1550 }
1551 
1552 void
vn_CmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags,uint32_t index)1553 vn_CmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,
1554                            VkQueryPool queryPool,
1555                            uint32_t query,
1556                            VkQueryControlFlags flags,
1557                            uint32_t index)
1558 {
1559    VN_CMD_ENQUEUE(vkCmdBeginQueryIndexedEXT, commandBuffer, queryPool, query,
1560                   flags, index);
1561 }
1562 
1563 void
vn_CmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,uint32_t index)1564 vn_CmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,
1565                          VkQueryPool queryPool,
1566                          uint32_t query,
1567                          uint32_t index)
1568 {
1569    VN_CMD_ENQUEUE(vkCmdEndQueryIndexedEXT, commandBuffer, queryPool, query,
1570                   index);
1571 }
1572 
1573 void
vn_CmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes)1574 vn_CmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer,
1575                                       uint32_t firstBinding,
1576                                       uint32_t bindingCount,
1577                                       const VkBuffer *pBuffers,
1578                                       const VkDeviceSize *pOffsets,
1579                                       const VkDeviceSize *pSizes)
1580 {
1581    VN_CMD_ENQUEUE(vkCmdBindTransformFeedbackBuffersEXT, commandBuffer,
1582                   firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
1583 }
1584 
1585 void
vn_CmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer,uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VkBuffer * pCounterBuffers,const VkDeviceSize * pCounterBufferOffsets)1586 vn_CmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer,
1587                                 uint32_t firstCounterBuffer,
1588                                 uint32_t counterBufferCount,
1589                                 const VkBuffer *pCounterBuffers,
1590                                 const VkDeviceSize *pCounterBufferOffsets)
1591 {
1592    VN_CMD_ENQUEUE(vkCmdBeginTransformFeedbackEXT, commandBuffer,
1593                   firstCounterBuffer, counterBufferCount, pCounterBuffers,
1594                   pCounterBufferOffsets);
1595 }
1596 
1597 void
vn_CmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer,uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VkBuffer * pCounterBuffers,const VkDeviceSize * pCounterBufferOffsets)1598 vn_CmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer,
1599                               uint32_t firstCounterBuffer,
1600                               uint32_t counterBufferCount,
1601                               const VkBuffer *pCounterBuffers,
1602                               const VkDeviceSize *pCounterBufferOffsets)
1603 {
1604    VN_CMD_ENQUEUE(vkCmdEndTransformFeedbackEXT, commandBuffer,
1605                   firstCounterBuffer, counterBufferCount, pCounterBuffers,
1606                   pCounterBufferOffsets);
1607 }
1608 
1609 void
vn_CmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer,uint32_t instanceCount,uint32_t firstInstance,VkBuffer counterBuffer,VkDeviceSize counterBufferOffset,uint32_t counterOffset,uint32_t vertexStride)1610 vn_CmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer,
1611                                uint32_t instanceCount,
1612                                uint32_t firstInstance,
1613                                VkBuffer counterBuffer,
1614                                VkDeviceSize counterBufferOffset,
1615                                uint32_t counterOffset,
1616                                uint32_t vertexStride)
1617 {
1618    VN_CMD_ENQUEUE(vkCmdDrawIndirectByteCountEXT, commandBuffer, instanceCount,
1619                   firstInstance, counterBuffer, counterBufferOffset,
1620                   counterOffset, vertexStride);
1621 
1622    vn_cmd_count_draw_and_submit_on_batch_limit(
1623       vn_command_buffer_from_handle(commandBuffer));
1624 }
1625 
1626 void
vn_CmdBindVertexBuffers2(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes,const VkDeviceSize * pStrides)1627 vn_CmdBindVertexBuffers2(VkCommandBuffer commandBuffer,
1628                          uint32_t firstBinding,
1629                          uint32_t bindingCount,
1630                          const VkBuffer *pBuffers,
1631                          const VkDeviceSize *pOffsets,
1632                          const VkDeviceSize *pSizes,
1633                          const VkDeviceSize *pStrides)
1634 {
1635    VN_CMD_ENQUEUE(vkCmdBindVertexBuffers2, commandBuffer, firstBinding,
1636                   bindingCount, pBuffers, pOffsets, pSizes, pStrides);
1637 }
1638 
1639 void
vn_CmdSetCullMode(VkCommandBuffer commandBuffer,VkCullModeFlags cullMode)1640 vn_CmdSetCullMode(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode)
1641 {
1642    VN_CMD_ENQUEUE(vkCmdSetCullMode, commandBuffer, cullMode);
1643 }
1644 
1645 void
vn_CmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthBoundsTestEnable)1646 vn_CmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,
1647                                VkBool32 depthBoundsTestEnable)
1648 {
1649    VN_CMD_ENQUEUE(vkCmdSetDepthBoundsTestEnable, commandBuffer,
1650                   depthBoundsTestEnable);
1651 }
1652 
1653 void
vn_CmdSetDepthCompareOp(VkCommandBuffer commandBuffer,VkCompareOp depthCompareOp)1654 vn_CmdSetDepthCompareOp(VkCommandBuffer commandBuffer,
1655                         VkCompareOp depthCompareOp)
1656 {
1657    VN_CMD_ENQUEUE(vkCmdSetDepthCompareOp, commandBuffer, depthCompareOp);
1658 }
1659 
1660 void
vn_CmdSetDepthTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthTestEnable)1661 vn_CmdSetDepthTestEnable(VkCommandBuffer commandBuffer,
1662                          VkBool32 depthTestEnable)
1663 {
1664    VN_CMD_ENQUEUE(vkCmdSetDepthTestEnable, commandBuffer, depthTestEnable);
1665 }
1666 
1667 void
vn_CmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,VkBool32 depthWriteEnable)1668 vn_CmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,
1669                           VkBool32 depthWriteEnable)
1670 {
1671    VN_CMD_ENQUEUE(vkCmdSetDepthWriteEnable, commandBuffer, depthWriteEnable);
1672 }
1673 
1674 void
vn_CmdSetFrontFace(VkCommandBuffer commandBuffer,VkFrontFace frontFace)1675 vn_CmdSetFrontFace(VkCommandBuffer commandBuffer, VkFrontFace frontFace)
1676 {
1677    VN_CMD_ENQUEUE(vkCmdSetFrontFace, commandBuffer, frontFace);
1678 }
1679 
1680 void
vn_CmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,VkPrimitiveTopology primitiveTopology)1681 vn_CmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,
1682                            VkPrimitiveTopology primitiveTopology)
1683 {
1684    VN_CMD_ENQUEUE(vkCmdSetPrimitiveTopology, commandBuffer,
1685                   primitiveTopology);
1686 }
1687 
1688 void
vn_CmdSetScissorWithCount(VkCommandBuffer commandBuffer,uint32_t scissorCount,const VkRect2D * pScissors)1689 vn_CmdSetScissorWithCount(VkCommandBuffer commandBuffer,
1690                           uint32_t scissorCount,
1691                           const VkRect2D *pScissors)
1692 {
1693    VN_CMD_ENQUEUE(vkCmdSetScissorWithCount, commandBuffer, scissorCount,
1694                   pScissors);
1695 }
1696 
1697 void
vn_CmdSetStencilOp(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,VkStencilOp failOp,VkStencilOp passOp,VkStencilOp depthFailOp,VkCompareOp compareOp)1698 vn_CmdSetStencilOp(VkCommandBuffer commandBuffer,
1699                    VkStencilFaceFlags faceMask,
1700                    VkStencilOp failOp,
1701                    VkStencilOp passOp,
1702                    VkStencilOp depthFailOp,
1703                    VkCompareOp compareOp)
1704 {
1705    VN_CMD_ENQUEUE(vkCmdSetStencilOp, commandBuffer, faceMask, failOp, passOp,
1706                   depthFailOp, compareOp);
1707 }
1708 
1709 void
vn_CmdSetStencilTestEnable(VkCommandBuffer commandBuffer,VkBool32 stencilTestEnable)1710 vn_CmdSetStencilTestEnable(VkCommandBuffer commandBuffer,
1711                            VkBool32 stencilTestEnable)
1712 {
1713    VN_CMD_ENQUEUE(vkCmdSetStencilTestEnable, commandBuffer,
1714                   stencilTestEnable);
1715 }
1716 
1717 void
vn_CmdSetViewportWithCount(VkCommandBuffer commandBuffer,uint32_t viewportCount,const VkViewport * pViewports)1718 vn_CmdSetViewportWithCount(VkCommandBuffer commandBuffer,
1719                            uint32_t viewportCount,
1720                            const VkViewport *pViewports)
1721 {
1722    VN_CMD_ENQUEUE(vkCmdSetViewportWithCount, commandBuffer, viewportCount,
1723                   pViewports);
1724 }
1725 
1726 void
vn_CmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,VkBool32 depthBiasEnable)1727 vn_CmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,
1728                          VkBool32 depthBiasEnable)
1729 {
1730    VN_CMD_ENQUEUE(vkCmdSetDepthBiasEnable, commandBuffer, depthBiasEnable);
1731 }
1732 
1733 void
vn_CmdSetLogicOpEXT(VkCommandBuffer commandBuffer,VkLogicOp logicOp)1734 vn_CmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp)
1735 {
1736    VN_CMD_ENQUEUE(vkCmdSetLogicOpEXT, commandBuffer, logicOp);
1737 }
1738 
1739 void
vn_CmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer,uint32_t patchControlPoints)1740 vn_CmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer,
1741                                uint32_t patchControlPoints)
1742 {
1743    VN_CMD_ENQUEUE(vkCmdSetPatchControlPointsEXT, commandBuffer,
1744                   patchControlPoints);
1745 }
1746 
1747 void
vn_CmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,VkBool32 primitiveRestartEnable)1748 vn_CmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,
1749                                 VkBool32 primitiveRestartEnable)
1750 {
1751    VN_CMD_ENQUEUE(vkCmdSetPrimitiveRestartEnable, commandBuffer,
1752                   primitiveRestartEnable);
1753 }
1754 
1755 void
vn_CmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer,VkBool32 rasterizerDiscardEnable)1756 vn_CmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer,
1757                                  VkBool32 rasterizerDiscardEnable)
1758 {
1759    VN_CMD_ENQUEUE(vkCmdSetRasterizerDiscardEnable, commandBuffer,
1760                   rasterizerDiscardEnable);
1761 }
1762 
1763 void
vn_CmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer,const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin)1764 vn_CmdBeginConditionalRenderingEXT(
1765    VkCommandBuffer commandBuffer,
1766    const VkConditionalRenderingBeginInfoEXT *pConditionalRenderingBegin)
1767 {
1768    VN_CMD_ENQUEUE(vkCmdBeginConditionalRenderingEXT, commandBuffer,
1769                   pConditionalRenderingBegin);
1770 }
1771 
1772 void
vn_CmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer)1773 vn_CmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer)
1774 {
1775    VN_CMD_ENQUEUE(vkCmdEndConditionalRenderingEXT, commandBuffer);
1776 }
1777