• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat.
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_command_buffer.h"
12 
13 #include "venus-protocol/vn_protocol_driver_command_buffer.h"
14 #include "venus-protocol/vn_protocol_driver_command_pool.h"
15 
16 #include "vn_descriptor_set.h"
17 #include "vn_device.h"
18 #include "vn_feedback.h"
19 #include "vn_image.h"
20 #include "vn_physical_device.h"
21 #include "vn_query_pool.h"
22 #include "vn_render_pass.h"
23 
24 static void
25 vn_cmd_submit(struct vn_command_buffer *cmd);
26 
27 #define VN_CMD_ENQUEUE(cmd_name, commandBuffer, ...)                         \
28    do {                                                                      \
29       struct vn_command_buffer *_cmd =                                       \
30          vn_command_buffer_from_handle(commandBuffer);                       \
31       const size_t _cmd_size =                                               \
32          vn_sizeof_##cmd_name(commandBuffer, ##__VA_ARGS__);                 \
33                                                                              \
34       if (likely(vn_cs_encoder_reserve(&_cmd->cs, _cmd_size)))               \
35          vn_encode_##cmd_name(&_cmd->cs, 0, commandBuffer, ##__VA_ARGS__);   \
36       else                                                                   \
37          _cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;                      \
38                                                                              \
39       if (unlikely(VN_PERF(NO_CMD_BATCHING)))                                \
40          vn_cmd_submit(_cmd);                                                \
41    } while (0)
42 
43 static bool
vn_image_memory_barriers_needs_present_fix(const VkImageMemoryBarrier * img_barriers,uint32_t count)44 vn_image_memory_barriers_needs_present_fix(
45    const VkImageMemoryBarrier *img_barriers, uint32_t count)
46 {
47    if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
48       return false;
49 
50    for (uint32_t i = 0; i < count; i++) {
51       if (img_barriers[i].oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ||
52           img_barriers[i].newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
53          return true;
54    }
55    return false;
56 }
57 
58 static bool
vn_dependency_infos_needs_present_fix(uint32_t dep_count,const VkDependencyInfo * dep_infos)59 vn_dependency_infos_needs_present_fix(uint32_t dep_count,
60                                       const VkDependencyInfo *dep_infos)
61 {
62    if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
63       return false;
64 
65    for (uint32_t i = 0; i < dep_count; i++) {
66       for (uint32_t j = 0; j < dep_infos[i].imageMemoryBarrierCount; j++) {
67          const VkImageMemoryBarrier2 *b =
68             &dep_infos[i].pImageMemoryBarriers[j];
69          if (b->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ||
70              b->newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
71             return true;
72          }
73       }
74    }
75    return false;
76 }
77 
78 struct vn_cmd_fix_image_memory_barrier_result {
79    bool availability_op_needed; // set src access/stage (flush)
80    bool visibility_op_needed;   // set dst access/stage (invalidate)
81    bool external_acquire_unmodified;
82 };
83 
84 struct vn_cmd_cached_storage {
85    VkDependencyInfo *dep_infos;
86    union {
87       VkImageMemoryBarrier *barriers;
88       VkImageMemoryBarrier2 *barriers2;
89    };
90    uint32_t acquire_unmodified_count;
91    uint32_t used_acquire_unmodified;
92    VkExternalMemoryAcquireUnmodifiedEXT *acquire_unmodified_infos;
93 };
94 
95 static inline bool
vn_cmd_get_cached_storage(struct vn_command_buffer * cmd,VkStructureType barrier_type,uint32_t barrier_count,uint32_t dep_info_count,struct vn_cmd_cached_storage * out_storage)96 vn_cmd_get_cached_storage(struct vn_command_buffer *cmd,
97                           VkStructureType barrier_type,
98                           uint32_t barrier_count,
99                           uint32_t dep_info_count,
100                           struct vn_cmd_cached_storage *out_storage)
101 {
102    size_t dep_infos_size = dep_info_count * sizeof(VkDependencyInfo);
103    size_t barriers_size;
104 
105    switch (barrier_type) {
106    case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER:
107       assert(!dep_info_count);
108       barriers_size = barrier_count * sizeof(VkImageMemoryBarrier);
109       break;
110    case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2:
111       barriers_size = barrier_count * sizeof(VkImageMemoryBarrier2);
112       break;
113    default:
114       unreachable("invalid barrier_type");
115    }
116 
117    size_t total_size =
118       dep_infos_size + barriers_size +
119       barrier_count * sizeof(VkExternalMemoryAcquireUnmodifiedEXT);
120    void *data = vn_cached_storage_get(&cmd->pool->storage, total_size);
121    if (!data)
122       return false;
123 
124    memset(out_storage, 0, sizeof(*out_storage));
125    if (dep_info_count) {
126       out_storage->dep_infos = data;
127       data += dep_infos_size;
128    }
129    out_storage->barriers = data;
130    data += barriers_size;
131 
132    out_storage->acquire_unmodified_count = barrier_count;
133    out_storage->acquire_unmodified_infos = data;
134    return true;
135 }
136 
137 static inline VkExternalMemoryAcquireUnmodifiedEXT *
vn_cached_get_acquire_unmodified(struct vn_cmd_cached_storage * storage)138 vn_cached_get_acquire_unmodified(struct vn_cmd_cached_storage *storage)
139 {
140    VkExternalMemoryAcquireUnmodifiedEXT *acquire_unmodified =
141       &storage->acquire_unmodified_infos[storage->used_acquire_unmodified++];
142    assert(storage->used_acquire_unmodified <=
143           storage->acquire_unmodified_count);
144    return acquire_unmodified;
145 }
146 
147 /* About VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, the spec says
148  *
149  *    VK_IMAGE_LAYOUT_PRESENT_SRC_KHR must only be used for presenting a
150  *    presentable image for display. A swapchain's image must be transitioned
151  *    to this layout before calling vkQueuePresentKHR, and must be
152  *    transitioned away from this layout after calling vkAcquireNextImageKHR.
153  *
154  * That allows us to treat the layout internally as
155  *
156  *  - VK_IMAGE_LAYOUT_GENERAL
157  *  - VK_QUEUE_FAMILY_FOREIGN_EXT has the ownership, if the image is not a
158  *    prime blit source
159  *
160  * while staying performant.
161  *
162  * About queue family ownerships, the spec says
163  *
164  *    A queue family can take ownership of an image subresource or buffer
165  *    range of a resource created with VK_SHARING_MODE_EXCLUSIVE, without an
166  *    ownership transfer, in the same way as for a resource that was just
167  *    created; however, taking ownership in this way has the effect that the
168  *    contents of the image subresource or buffer range are undefined.
169  *
170  * It is unclear if that is applicable to external resources, which supposedly
171  * have the same semantics
172  *
173  *    Binding a resource to a memory object shared between multiple Vulkan
174  *    instances or other APIs does not change the ownership of the underlying
175  *    memory. The first entity to access the resource implicitly acquires
176  *    ownership. Accessing a resource backed by memory that is owned by a
177  *    particular instance or API has the same semantics as accessing a
178  *    VK_SHARING_MODE_EXCLUSIVE resource[...]
179  *
180  * We should get the spec clarified, or get rid of this completely broken code
181  * (TODO).
182  *
183  * Assuming a queue family can acquire the ownership implicitly when the
184  * contents are not needed, we do not need to worry about
185  * VK_IMAGE_LAYOUT_UNDEFINED.  We can use VK_IMAGE_LAYOUT_PRESENT_SRC_KHR as
186  * the sole signal to trigger queue family ownership transfers.
187  *
188  * When the image has VK_SHARING_MODE_CONCURRENT, we can, and are required to,
189  * use VK_QUEUE_FAMILY_IGNORED as the other queue family whether we are
190  * transitioning to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR.
191  *
192  * When the image has VK_SHARING_MODE_EXCLUSIVE, we have to work out who the
193  * other queue family is.  It is easier when the barrier does not also define
194  * a queue family ownership transfer (i.e., srcQueueFamilyIndex equals to
195  * dstQueueFamilyIndex).  The other queue family must be the queue family the
196  * command buffer was allocated for.
197  *
198  * When the barrier also defines a queue family ownership transfer, it is
199  * submitted both to the source queue family to release the ownership and to
200  * the destination queue family to acquire the ownership.  Depending on
201  * whether the barrier transitions to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
202  * we are only interested in the ownership release or acquire respectively and
203  * should be careful to avoid double releases/acquires.
204  *
205  * I haven't followed all transition paths mentally to verify the correctness.
206  * I likely also violate some VUs or miss some cases below.  They are
207  * hopefully fixable and are left as TODOs.
208  */
209 static struct vn_cmd_fix_image_memory_barrier_result
vn_cmd_fix_image_memory_barrier_common(const struct vn_image * img,uint32_t cmd_pool_qfi,VkImageLayout * old_layout,VkImageLayout * new_layout,uint32_t * src_qfi,uint32_t * dst_qfi)210 vn_cmd_fix_image_memory_barrier_common(const struct vn_image *img,
211                                        uint32_t cmd_pool_qfi,
212                                        VkImageLayout *old_layout,
213                                        VkImageLayout *new_layout,
214                                        uint32_t *src_qfi,
215                                        uint32_t *dst_qfi)
216 {
217    assert(VN_PRESENT_SRC_INTERNAL_LAYOUT != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
218 
219    struct vn_cmd_fix_image_memory_barrier_result result = {
220       .availability_op_needed = true,
221       .visibility_op_needed = true,
222    };
223 
224    /* no fix needed */
225    if (*old_layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
226        *new_layout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
227       return result;
228 
229    assert(img->wsi.is_wsi);
230 
231    /* prime blit src or no layout transition */
232    if (img->wsi.is_prime_blit_src || *old_layout == *new_layout) {
233       if (*old_layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
234          *old_layout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
235       if (*new_layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
236          *new_layout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
237       return result;
238    }
239 
240    if (*old_layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
241       *old_layout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
242 
243       result.availability_op_needed = false;
244       result.external_acquire_unmodified = true;
245 
246       if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
247          *src_qfi = VK_QUEUE_FAMILY_FOREIGN_EXT;
248          *dst_qfi = VK_QUEUE_FAMILY_IGNORED;
249       } else if (*dst_qfi == *src_qfi || *dst_qfi == cmd_pool_qfi) {
250          *src_qfi = VK_QUEUE_FAMILY_FOREIGN_EXT;
251          *dst_qfi = cmd_pool_qfi;
252       } else {
253          /* The barrier also defines a queue family ownership transfer, and
254           * this is the one that gets submitted to the source queue family to
255           * release the ownership.  Skip both the transfer and the transition.
256           */
257          *src_qfi = VK_QUEUE_FAMILY_IGNORED;
258          *dst_qfi = VK_QUEUE_FAMILY_IGNORED;
259          *new_layout = *old_layout;
260       }
261    } else {
262       *new_layout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
263 
264       result.visibility_op_needed = false;
265 
266       if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
267          *src_qfi = VK_QUEUE_FAMILY_IGNORED;
268          *dst_qfi = VK_QUEUE_FAMILY_FOREIGN_EXT;
269       } else if (*src_qfi == *dst_qfi || *src_qfi == cmd_pool_qfi) {
270          *src_qfi = cmd_pool_qfi;
271          *dst_qfi = VK_QUEUE_FAMILY_FOREIGN_EXT;
272       } else {
273          /* The barrier also defines a queue family ownership transfer, and
274           * this is the one that gets submitted to the destination queue
275           * family to acquire the ownership.  Skip both the transfer and the
276           * transition.
277           */
278          *src_qfi = VK_QUEUE_FAMILY_IGNORED;
279          *dst_qfi = VK_QUEUE_FAMILY_IGNORED;
280          *old_layout = *new_layout;
281       }
282    }
283 
284    return result;
285 }
286 
287 static void
vn_cmd_set_external_acquire_unmodified(VkBaseOutStructure * chain,struct vn_cmd_cached_storage * storage)288 vn_cmd_set_external_acquire_unmodified(VkBaseOutStructure *chain,
289                                        struct vn_cmd_cached_storage *storage)
290 {
291    VkExternalMemoryAcquireUnmodifiedEXT *acquire_unmodified =
292       vk_find_struct(chain->pNext, EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT);
293    if (acquire_unmodified) {
294       acquire_unmodified->acquireUnmodifiedMemory = VK_TRUE;
295    } else {
296       acquire_unmodified = vn_cached_get_acquire_unmodified(storage);
297       *acquire_unmodified = (VkExternalMemoryAcquireUnmodifiedEXT){
298          .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT,
299          .pNext = chain->pNext,
300          .acquireUnmodifiedMemory = VK_TRUE,
301       };
302       chain->pNext = (void *)acquire_unmodified;
303    }
304 }
305 
306 static void
vn_cmd_fix_image_memory_barrier(const struct vn_command_buffer * cmd,VkImageMemoryBarrier * barrier,struct vn_cmd_cached_storage * storage)307 vn_cmd_fix_image_memory_barrier(const struct vn_command_buffer *cmd,
308                                 VkImageMemoryBarrier *barrier,
309                                 struct vn_cmd_cached_storage *storage)
310 {
311    const struct vn_physical_device *physical_dev =
312       cmd->pool->device->physical_device;
313    const struct vn_image *img = vn_image_from_handle(barrier->image);
314 
315    struct vn_cmd_fix_image_memory_barrier_result result =
316       vn_cmd_fix_image_memory_barrier_common(
317          img, cmd->pool->queue_family_index, &barrier->oldLayout,
318          &barrier->newLayout, &barrier->srcQueueFamilyIndex,
319          &barrier->dstQueueFamilyIndex);
320    if (!result.availability_op_needed)
321       barrier->srcAccessMask = 0;
322    if (!result.visibility_op_needed)
323       barrier->dstAccessMask = 0;
324 
325    if (result.external_acquire_unmodified &&
326        physical_dev->renderer_extensions
327           .EXT_external_memory_acquire_unmodified)
328       vn_cmd_set_external_acquire_unmodified((VkBaseOutStructure *)barrier,
329                                              storage);
330 }
331 
332 static void
vn_cmd_fix_image_memory_barrier2(const struct vn_command_buffer * cmd,VkImageMemoryBarrier2 * barrier,struct vn_cmd_cached_storage * storage)333 vn_cmd_fix_image_memory_barrier2(const struct vn_command_buffer *cmd,
334                                  VkImageMemoryBarrier2 *barrier,
335                                  struct vn_cmd_cached_storage *storage)
336 {
337    const struct vn_physical_device *physical_dev =
338       cmd->pool->device->physical_device;
339    const struct vn_image *img = vn_image_from_handle(barrier->image);
340 
341    struct vn_cmd_fix_image_memory_barrier_result result =
342       vn_cmd_fix_image_memory_barrier_common(
343          img, cmd->pool->queue_family_index, &barrier->oldLayout,
344          &barrier->newLayout, &barrier->srcQueueFamilyIndex,
345          &barrier->dstQueueFamilyIndex);
346    if (!result.availability_op_needed) {
347       barrier->srcStageMask = 0;
348       barrier->srcAccessMask = 0;
349    }
350    if (!result.visibility_op_needed) {
351       barrier->dstStageMask = 0;
352       barrier->dstAccessMask = 0;
353    }
354 
355    if (result.external_acquire_unmodified &&
356        physical_dev->renderer_extensions
357           .EXT_external_memory_acquire_unmodified) {
358       vn_cmd_set_external_acquire_unmodified((VkBaseOutStructure *)barrier,
359                                              storage);
360    }
361 }
362 
363 static const VkImageMemoryBarrier *
vn_cmd_wait_events_fix_image_memory_barriers(struct vn_command_buffer * cmd,const VkImageMemoryBarrier * src_barriers,uint32_t count,uint32_t * out_transfer_count)364 vn_cmd_wait_events_fix_image_memory_barriers(
365    struct vn_command_buffer *cmd,
366    const VkImageMemoryBarrier *src_barriers,
367    uint32_t count,
368    uint32_t *out_transfer_count)
369 {
370    *out_transfer_count = 0;
371 
372    if (cmd->builder.in_render_pass ||
373        !vn_image_memory_barriers_needs_present_fix(src_barriers, count))
374       return src_barriers;
375 
376    struct vn_cmd_cached_storage storage;
377    if (!vn_cmd_get_cached_storage(cmd, VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
378                                   count * 2, /*dep_info_count=*/0,
379                                   &storage)) {
380       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
381       return src_barriers;
382    }
383    VkImageMemoryBarrier *img_barriers = storage.barriers;
384    VkImageMemoryBarrier *transfer_barriers = storage.barriers + count;
385 
386    /* vkCmdWaitEvents cannot be used for queue family ownership transfers.
387     * Nothing appears to be said about the submission order of image memory
388     * barriers in the same array.  We take the liberty to move queue family
389     * ownership transfers to the tail.
390     */
391    uint32_t transfer_count = 0;
392    uint32_t valid_count = 0;
393    for (uint32_t i = 0; i < count; i++) {
394       VkImageMemoryBarrier *img_barrier = &img_barriers[valid_count];
395       *img_barrier = src_barriers[i];
396       vn_cmd_fix_image_memory_barrier(cmd, img_barrier, &storage);
397 
398       if (img_barrier->srcQueueFamilyIndex ==
399           img_barrier->dstQueueFamilyIndex) {
400          valid_count++;
401       } else {
402          transfer_barriers[transfer_count++] = *img_barrier;
403       }
404    }
405 
406    assert(valid_count + transfer_count == count);
407    if (transfer_count) {
408       /* copy back to the tail */
409       memcpy(&img_barriers[valid_count], transfer_barriers,
410              sizeof(*transfer_barriers) * transfer_count);
411       *out_transfer_count = transfer_count;
412    }
413 
414    return img_barriers;
415 }
416 
417 static const VkImageMemoryBarrier *
vn_cmd_pipeline_barrier_fix_image_memory_barriers(struct vn_command_buffer * cmd,const VkImageMemoryBarrier * src_barriers,uint32_t count)418 vn_cmd_pipeline_barrier_fix_image_memory_barriers(
419    struct vn_command_buffer *cmd,
420    const VkImageMemoryBarrier *src_barriers,
421    uint32_t count)
422 {
423    if (cmd->builder.in_render_pass ||
424        !vn_image_memory_barriers_needs_present_fix(src_barriers, count))
425       return src_barriers;
426 
427    struct vn_cmd_cached_storage storage;
428    if (!vn_cmd_get_cached_storage(cmd, VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
429                                   count, /*dep_info_count=*/0, &storage)) {
430       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
431       return src_barriers;
432    }
433 
434    memcpy(storage.barriers, src_barriers,
435           count * sizeof(VkImageMemoryBarrier));
436    for (uint32_t i = 0; i < count; i++)
437       vn_cmd_fix_image_memory_barrier(cmd, &storage.barriers[i], &storage);
438 
439    return storage.barriers;
440 }
441 
442 static const VkDependencyInfo *
vn_cmd_fix_dependency_infos(struct vn_command_buffer * cmd,uint32_t dep_count,const VkDependencyInfo * dep_infos)443 vn_cmd_fix_dependency_infos(struct vn_command_buffer *cmd,
444                             uint32_t dep_count,
445                             const VkDependencyInfo *dep_infos)
446 {
447    if (cmd->builder.in_render_pass ||
448        !vn_dependency_infos_needs_present_fix(dep_count, dep_infos))
449       return dep_infos;
450 
451    uint32_t total_barrier_count = 0;
452    for (uint32_t i = 0; i < dep_count; i++)
453       total_barrier_count += dep_infos[i].imageMemoryBarrierCount;
454 
455    struct vn_cmd_cached_storage storage;
456    if (!vn_cmd_get_cached_storage(cmd,
457                                   VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
458                                   total_barrier_count, dep_count, &storage)) {
459       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
460       return dep_infos;
461    }
462    memcpy(storage.dep_infos, dep_infos, dep_count * sizeof(VkDependencyInfo));
463 
464    uint32_t barrier_index = 0;
465    for (uint32_t i = 0; i < dep_count; i++) {
466       uint32_t barrier_count = dep_infos[i].imageMemoryBarrierCount;
467 
468       VkImageMemoryBarrier2 *new_barriers = &storage.barriers2[barrier_index];
469       barrier_index += barrier_count;
470 
471       memcpy(new_barriers, dep_infos[i].pImageMemoryBarriers,
472              barrier_count * sizeof(VkImageMemoryBarrier2));
473       storage.dep_infos[i].pImageMemoryBarriers = new_barriers;
474 
475       for (uint32_t j = 0; j < barrier_count; j++) {
476          vn_cmd_fix_image_memory_barrier2(cmd, &new_barriers[j], &storage);
477       }
478    }
479 
480    return storage.dep_infos;
481 }
482 
483 static void
vn_cmd_encode_memory_barriers(struct vn_command_buffer * cmd,VkPipelineStageFlags src_stage_mask,VkPipelineStageFlags dst_stage_mask,uint32_t buf_barrier_count,const VkBufferMemoryBarrier * buf_barriers,uint32_t img_barrier_count,const VkImageMemoryBarrier * img_barriers)484 vn_cmd_encode_memory_barriers(struct vn_command_buffer *cmd,
485                               VkPipelineStageFlags src_stage_mask,
486                               VkPipelineStageFlags dst_stage_mask,
487                               uint32_t buf_barrier_count,
488                               const VkBufferMemoryBarrier *buf_barriers,
489                               uint32_t img_barrier_count,
490                               const VkImageMemoryBarrier *img_barriers)
491 {
492    const VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
493 
494    VN_CMD_ENQUEUE(vkCmdPipelineBarrier, cmd_handle, src_stage_mask,
495                   dst_stage_mask, 0, 0, NULL, buf_barrier_count, buf_barriers,
496                   img_barrier_count, img_barriers);
497 }
498 
499 static void
vn_present_src_attachment_to_image_memory_barrier(const struct vn_image * img,const struct vn_present_src_attachment * att,VkImageMemoryBarrier * img_barrier,bool acquire)500 vn_present_src_attachment_to_image_memory_barrier(
501    const struct vn_image *img,
502    const struct vn_present_src_attachment *att,
503    VkImageMemoryBarrier *img_barrier,
504    bool acquire)
505 {
506    *img_barrier = (VkImageMemoryBarrier)
507    {
508       .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
509       .srcAccessMask = att->src_access_mask,
510       .dstAccessMask = att->dst_access_mask,
511       .oldLayout = acquire ? VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
512                            : VN_PRESENT_SRC_INTERNAL_LAYOUT,
513       .newLayout = acquire ? VN_PRESENT_SRC_INTERNAL_LAYOUT
514                            : VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
515       .image = vn_image_to_handle((struct vn_image *)img),
516       .subresourceRange = {
517          .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
518          .levelCount = 1,
519          .layerCount = 1,
520       },
521    };
522 }
523 
524 static void
vn_cmd_transfer_present_src_images(struct vn_command_buffer * cmd,bool acquire,const struct vn_image * const * images,const struct vn_present_src_attachment * atts,uint32_t count)525 vn_cmd_transfer_present_src_images(
526    struct vn_command_buffer *cmd,
527    bool acquire,
528    const struct vn_image *const *images,
529    const struct vn_present_src_attachment *atts,
530    uint32_t count)
531 {
532    struct vn_cmd_cached_storage storage;
533    if (!vn_cmd_get_cached_storage(cmd, VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
534                                   count, /*dep_info_count=*/0, &storage)) {
535       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
536       return;
537    }
538 
539    VkPipelineStageFlags src_stage_mask = 0;
540    VkPipelineStageFlags dst_stage_mask = 0;
541    for (uint32_t i = 0; i < count; i++) {
542       src_stage_mask |= atts[i].src_stage_mask;
543       dst_stage_mask |= atts[i].dst_stage_mask;
544 
545       vn_present_src_attachment_to_image_memory_barrier(
546          images[i], &atts[i], &storage.barriers[i], acquire);
547       vn_cmd_fix_image_memory_barrier(cmd, &storage.barriers[i], &storage);
548    }
549 
550    vn_cmd_encode_memory_barriers(cmd, src_stage_mask, dst_stage_mask, 0, NULL,
551                                  count, storage.barriers);
552 }
553 
554 struct vn_cmd_query_record *
vn_cmd_pool_alloc_query_record(struct vn_command_pool * cmd_pool,struct vn_query_pool * query_pool,uint32_t query,uint32_t query_count,bool copy)555 vn_cmd_pool_alloc_query_record(struct vn_command_pool *cmd_pool,
556                                struct vn_query_pool *query_pool,
557                                uint32_t query,
558                                uint32_t query_count,
559                                bool copy)
560 {
561    struct vn_cmd_query_record *record;
562    if (list_is_empty(&cmd_pool->free_query_records)) {
563       record = vk_alloc(&cmd_pool->allocator, sizeof(*record),
564                         VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
565       if (!record)
566          return NULL;
567    } else {
568       record = list_first_entry(&cmd_pool->free_query_records,
569                                 struct vn_cmd_query_record, head);
570       list_del(&record->head);
571    }
572 
573    record->query_pool = query_pool;
574    record->query = query;
575    record->query_count = query_count;
576    record->copy = copy;
577 
578    return record;
579 }
580 
581 static inline void
vn_cmd_merge_query_records(struct vn_command_buffer * primary_cmd,struct vn_command_buffer * secondary_cmd)582 vn_cmd_merge_query_records(struct vn_command_buffer *primary_cmd,
583                            struct vn_command_buffer *secondary_cmd)
584 {
585    list_for_each_entry_safe(struct vn_cmd_query_record, secondary_record,
586                             &secondary_cmd->builder.query_records, head) {
587       struct vn_cmd_query_record *record = vn_cmd_pool_alloc_query_record(
588          primary_cmd->pool, secondary_record->query_pool,
589          secondary_record->query, secondary_record->query_count,
590          secondary_record->copy);
591       if (!record) {
592          primary_cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
593          return;
594       }
595 
596       list_addtail(&record->head, &primary_cmd->builder.query_records);
597    }
598 }
599 
600 static void
vn_cmd_begin_render_pass(struct vn_command_buffer * cmd,const struct vn_render_pass * pass,const struct vn_framebuffer * fb,const VkRenderPassBeginInfo * begin_info)601 vn_cmd_begin_render_pass(struct vn_command_buffer *cmd,
602                          const struct vn_render_pass *pass,
603                          const struct vn_framebuffer *fb,
604                          const VkRenderPassBeginInfo *begin_info)
605 {
606    assert(begin_info);
607    assert(cmd->level == VK_COMMAND_BUFFER_LEVEL_PRIMARY);
608 
609    cmd->builder.render_pass = pass;
610    cmd->builder.in_render_pass = true;
611    cmd->builder.subpass_index = 0;
612    cmd->builder.view_mask = vn_render_pass_get_subpass_view_mask(pass, 0);
613 
614    if (!pass->present_count)
615       return;
616 
617    /* find fb attachments */
618    const VkImageView *views;
619    ASSERTED uint32_t view_count;
620    if (fb->image_view_count) {
621       views = fb->image_views;
622       view_count = fb->image_view_count;
623    } else {
624       const VkRenderPassAttachmentBeginInfo *imageless_info =
625          vk_find_struct_const(begin_info->pNext,
626                               RENDER_PASS_ATTACHMENT_BEGIN_INFO);
627       assert(imageless_info);
628       views = imageless_info->pAttachments;
629       view_count = imageless_info->attachmentCount;
630    }
631 
632    const struct vn_image **images =
633       vk_alloc(&cmd->pool->allocator, sizeof(*images) * pass->present_count,
634                VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
635    if (!images) {
636       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
637       return;
638    }
639 
640    for (uint32_t i = 0; i < pass->present_count; i++) {
641       const uint32_t index = pass->present_attachments[i].index;
642       assert(index < view_count);
643       images[i] = vn_image_view_from_handle(views[index])->image;
644    }
645 
646    if (pass->present_acquire_count) {
647       vn_cmd_transfer_present_src_images(cmd, true, images,
648                                          pass->present_acquire_attachments,
649                                          pass->present_acquire_count);
650    }
651 
652    cmd->builder.present_src_images = images;
653 }
654 
655 static void
vn_cmd_end_render_pass(struct vn_command_buffer * cmd)656 vn_cmd_end_render_pass(struct vn_command_buffer *cmd)
657 {
658    const struct vn_render_pass *pass = cmd->builder.render_pass;
659    const struct vn_image **images = cmd->builder.present_src_images;
660 
661    cmd->builder.render_pass = NULL;
662    cmd->builder.present_src_images = NULL;
663    cmd->builder.in_render_pass = false;
664    cmd->builder.subpass_index = 0;
665    cmd->builder.view_mask = 0;
666 
667    if (!pass->present_count || !images)
668       return;
669 
670    if (pass->present_release_count) {
671       vn_cmd_transfer_present_src_images(
672          cmd, false, images + pass->present_acquire_count,
673          pass->present_release_attachments, pass->present_release_count);
674    }
675 
676    vk_free(&cmd->pool->allocator, images);
677 }
678 
679 static inline void
vn_cmd_next_subpass(struct vn_command_buffer * cmd)680 vn_cmd_next_subpass(struct vn_command_buffer *cmd)
681 {
682    cmd->builder.view_mask = vn_render_pass_get_subpass_view_mask(
683       cmd->builder.render_pass, ++cmd->builder.subpass_index);
684 }
685 
686 static inline void
vn_cmd_begin_rendering(struct vn_command_buffer * cmd,const VkRenderingInfo * rendering_info)687 vn_cmd_begin_rendering(struct vn_command_buffer *cmd,
688                        const VkRenderingInfo *rendering_info)
689 {
690    cmd->builder.in_render_pass = true;
691    cmd->builder.view_mask = rendering_info->viewMask;
692 }
693 
694 static inline void
vn_cmd_end_rendering(struct vn_command_buffer * cmd)695 vn_cmd_end_rendering(struct vn_command_buffer *cmd)
696 {
697    cmd->builder.in_render_pass = false;
698    cmd->builder.view_mask = 0;
699 }
700 
701 /* command pool commands */
702 
703 VkResult
vn_CreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)704 vn_CreateCommandPool(VkDevice device,
705                      const VkCommandPoolCreateInfo *pCreateInfo,
706                      const VkAllocationCallbacks *pAllocator,
707                      VkCommandPool *pCommandPool)
708 {
709    VN_TRACE_FUNC();
710    struct vn_device *dev = vn_device_from_handle(device);
711    const VkAllocationCallbacks *alloc =
712       pAllocator ? pAllocator : &dev->base.base.alloc;
713 
714    struct vn_command_pool *pool =
715       vk_zalloc(alloc, sizeof(*pool), VN_DEFAULT_ALIGN,
716                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
717    if (!pool)
718       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
719 
720    vn_object_base_init(&pool->base, VK_OBJECT_TYPE_COMMAND_POOL, &dev->base);
721 
722    pool->allocator = *alloc;
723    pool->device = dev;
724    pool->queue_family_index = pCreateInfo->queueFamilyIndex;
725    list_inithead(&pool->command_buffers);
726    list_inithead(&pool->free_query_records);
727 
728    vn_cached_storage_init(&pool->storage, alloc);
729 
730    VkCommandPool pool_handle = vn_command_pool_to_handle(pool);
731    vn_async_vkCreateCommandPool(dev->primary_ring, device, pCreateInfo, NULL,
732                                 &pool_handle);
733 
734    vn_tls_set_async_pipeline_create();
735 
736    *pCommandPool = pool_handle;
737 
738    return VK_SUCCESS;
739 }
740 
741 static void
vn_cmd_reset(struct vn_command_buffer * cmd)742 vn_cmd_reset(struct vn_command_buffer *cmd)
743 {
744    vn_cs_encoder_reset(&cmd->cs);
745 
746    cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
747 
748    /* reset cmd builder */
749    vk_free(&cmd->pool->allocator, cmd->builder.present_src_images);
750    vn_cmd_pool_free_query_records(cmd->pool, &cmd->builder.query_records);
751    memset(&cmd->builder, 0, sizeof(cmd->builder));
752    list_inithead(&cmd->builder.query_records);
753 
754    if (cmd->linked_qfb_cmd) {
755       vn_query_feedback_cmd_free(cmd->linked_qfb_cmd);
756       cmd->linked_qfb_cmd = NULL;
757    }
758 }
759 
760 void
vn_DestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)761 vn_DestroyCommandPool(VkDevice device,
762                       VkCommandPool commandPool,
763                       const VkAllocationCallbacks *pAllocator)
764 {
765    VN_TRACE_FUNC();
766    struct vn_device *dev = vn_device_from_handle(device);
767    struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
768    const VkAllocationCallbacks *alloc;
769 
770    if (!pool)
771       return;
772 
773    alloc = pAllocator ? pAllocator : &pool->allocator;
774 
775    vn_async_vkDestroyCommandPool(dev->primary_ring, device, commandPool,
776                                  NULL);
777 
778    list_for_each_entry_safe(struct vn_command_buffer, cmd,
779                             &pool->command_buffers, head) {
780       vn_cmd_reset(cmd);
781       vn_cs_encoder_fini(&cmd->cs);
782       vn_object_base_fini(&cmd->base);
783       vk_free(alloc, cmd);
784    }
785 
786    list_for_each_entry_safe(struct vn_cmd_query_record, record,
787                             &pool->free_query_records, head)
788       vk_free(alloc, record);
789 
790    vn_cached_storage_fini(&pool->storage);
791 
792    vn_object_base_fini(&pool->base);
793    vk_free(alloc, pool);
794 }
795 
796 VkResult
vn_ResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)797 vn_ResetCommandPool(VkDevice device,
798                     VkCommandPool commandPool,
799                     VkCommandPoolResetFlags flags)
800 {
801    VN_TRACE_FUNC();
802    struct vn_device *dev = vn_device_from_handle(device);
803    struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
804 
805    list_for_each_entry_safe(struct vn_command_buffer, cmd,
806                             &pool->command_buffers, head)
807       vn_cmd_reset(cmd);
808 
809    if (flags & VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT) {
810       list_for_each_entry_safe(struct vn_cmd_query_record, record,
811                                &pool->free_query_records, head)
812          vk_free(&pool->allocator, record);
813 
814       vn_cached_storage_fini(&pool->storage);
815       vn_cached_storage_init(&pool->storage, &pool->allocator);
816    }
817 
818    vn_async_vkResetCommandPool(dev->primary_ring, device, commandPool, flags);
819 
820    return VK_SUCCESS;
821 }
822 
823 void
vn_TrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)824 vn_TrimCommandPool(VkDevice device,
825                    VkCommandPool commandPool,
826                    VkCommandPoolTrimFlags flags)
827 {
828    VN_TRACE_FUNC();
829    struct vn_device *dev = vn_device_from_handle(device);
830 
831    vn_async_vkTrimCommandPool(dev->primary_ring, device, commandPool, flags);
832 }
833 
834 /* command buffer commands */
835 
836 VkResult
vn_AllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)837 vn_AllocateCommandBuffers(VkDevice device,
838                           const VkCommandBufferAllocateInfo *pAllocateInfo,
839                           VkCommandBuffer *pCommandBuffers)
840 {
841    VN_TRACE_FUNC();
842    struct vn_device *dev = vn_device_from_handle(device);
843    struct vn_command_pool *pool =
844       vn_command_pool_from_handle(pAllocateInfo->commandPool);
845    const VkAllocationCallbacks *alloc = &pool->allocator;
846 
847    for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
848       struct vn_command_buffer *cmd =
849          vk_zalloc(alloc, sizeof(*cmd), VN_DEFAULT_ALIGN,
850                    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
851       if (!cmd) {
852          for (uint32_t j = 0; j < i; j++) {
853             cmd = vn_command_buffer_from_handle(pCommandBuffers[j]);
854             vn_cs_encoder_fini(&cmd->cs);
855             list_del(&cmd->head);
856             vn_object_base_fini(&cmd->base);
857             vk_free(alloc, cmd);
858          }
859          memset(pCommandBuffers, 0,
860                 sizeof(*pCommandBuffers) * pAllocateInfo->commandBufferCount);
861          return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
862       }
863 
864       vn_object_base_init(&cmd->base, VK_OBJECT_TYPE_COMMAND_BUFFER,
865                           &dev->base);
866       cmd->pool = pool;
867       cmd->level = pAllocateInfo->level;
868       cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
869       vn_cs_encoder_init(&cmd->cs, dev->instance,
870                          VN_CS_ENCODER_STORAGE_SHMEM_POOL, 16 * 1024);
871 
872       list_inithead(&cmd->builder.query_records);
873 
874       list_addtail(&cmd->head, &pool->command_buffers);
875 
876       VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
877       pCommandBuffers[i] = cmd_handle;
878    }
879 
880    vn_async_vkAllocateCommandBuffers(dev->primary_ring, device, pAllocateInfo,
881                                      pCommandBuffers);
882 
883    return VK_SUCCESS;
884 }
885 
886 void
vn_FreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)887 vn_FreeCommandBuffers(VkDevice device,
888                       VkCommandPool commandPool,
889                       uint32_t commandBufferCount,
890                       const VkCommandBuffer *pCommandBuffers)
891 {
892    VN_TRACE_FUNC();
893    struct vn_device *dev = vn_device_from_handle(device);
894    struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
895    const VkAllocationCallbacks *alloc = &pool->allocator;
896 
897    vn_async_vkFreeCommandBuffers(dev->primary_ring, device, commandPool,
898                                  commandBufferCount, pCommandBuffers);
899 
900    for (uint32_t i = 0; i < commandBufferCount; i++) {
901       struct vn_command_buffer *cmd =
902          vn_command_buffer_from_handle(pCommandBuffers[i]);
903 
904       if (!cmd)
905          continue;
906 
907       list_del(&cmd->head);
908 
909       vn_cmd_reset(cmd);
910       vn_cs_encoder_fini(&cmd->cs);
911       vn_object_base_fini(&cmd->base);
912       vk_free(alloc, cmd);
913    }
914 }
915 
916 VkResult
vn_ResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)917 vn_ResetCommandBuffer(VkCommandBuffer commandBuffer,
918                       VkCommandBufferResetFlags flags)
919 {
920    VN_TRACE_FUNC();
921    struct vn_command_buffer *cmd =
922       vn_command_buffer_from_handle(commandBuffer);
923    struct vn_ring *ring = cmd->pool->device->primary_ring;
924 
925    vn_cmd_reset(cmd);
926 
927    vn_async_vkResetCommandBuffer(ring, commandBuffer, flags);
928 
929    return VK_SUCCESS;
930 }
931 
932 struct vn_command_buffer_begin_info {
933    VkCommandBufferBeginInfo begin;
934    VkCommandBufferInheritanceInfo inheritance;
935    VkCommandBufferInheritanceConditionalRenderingInfoEXT conditional_rendering;
936 
937    bool has_inherited_pass;
938    bool in_render_pass;
939 };
940 
941 static const VkCommandBufferBeginInfo *
vn_fix_command_buffer_begin_info(struct vn_command_buffer * cmd,const VkCommandBufferBeginInfo * begin_info,struct vn_command_buffer_begin_info * local)942 vn_fix_command_buffer_begin_info(struct vn_command_buffer *cmd,
943                                  const VkCommandBufferBeginInfo *begin_info,
944                                  struct vn_command_buffer_begin_info *local)
945 {
946    local->has_inherited_pass = false;
947 
948    if (!begin_info->pInheritanceInfo)
949       return begin_info;
950 
951    const bool is_cmd_secondary =
952       cmd->level == VK_COMMAND_BUFFER_LEVEL_SECONDARY;
953    const bool has_continue =
954       begin_info->flags & VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT;
955    const bool has_renderpass =
956       is_cmd_secondary &&
957       begin_info->pInheritanceInfo->renderPass != VK_NULL_HANDLE;
958 
959    /* Per spec 1.3.255: "VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT
960     * specifies that a secondary command buffer is considered to be
961     * entirely inside a render pass. If this is a primary command buffer,
962     * then this bit is ignored."
963     */
964    local->in_render_pass = has_continue && is_cmd_secondary;
965 
966    /* Can early-return if dynamic rendering is used and no structures need to
967     * be dropped from the pNext chain of VkCommandBufferInheritanceInfo.
968     */
969    if (is_cmd_secondary && has_continue && !has_renderpass)
970       return begin_info;
971 
972    local->begin = *begin_info;
973 
974    if (!is_cmd_secondary) {
975       local->begin.pInheritanceInfo = NULL;
976       return &local->begin;
977    }
978 
979    local->inheritance = *begin_info->pInheritanceInfo;
980    local->begin.pInheritanceInfo = &local->inheritance;
981 
982    if (!has_continue) {
983       local->inheritance.framebuffer = VK_NULL_HANDLE;
984       local->inheritance.renderPass = VK_NULL_HANDLE;
985       local->inheritance.subpass = 0;
986    } else {
987       /* With early-returns above, it must be an inherited pass. */
988       local->has_inherited_pass = true;
989    }
990 
991    /* Per spec, about VkCommandBufferInheritanceRenderingInfo:
992     *
993     * If VkCommandBufferInheritanceInfo::renderPass is not VK_NULL_HANDLE, or
994     * VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT is not specified in
995     * VkCommandBufferBeginInfo::flags, parameters of this structure are
996     * ignored.
997     */
998    VkBaseOutStructure *head = NULL;
999    VkBaseOutStructure *tail = NULL;
1000    vk_foreach_struct_const(src, local->inheritance.pNext) {
1001       void *pnext = NULL;
1002       switch (src->sType) {
1003       case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT:
1004          memcpy(
1005             &local->conditional_rendering, src,
1006             sizeof(VkCommandBufferInheritanceConditionalRenderingInfoEXT));
1007          pnext = &local->conditional_rendering;
1008          break;
1009       case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO:
1010       default:
1011          break;
1012       }
1013 
1014       if (pnext) {
1015          if (!head)
1016             head = pnext;
1017          else
1018             tail->pNext = pnext;
1019 
1020          tail = pnext;
1021       }
1022    }
1023    local->inheritance.pNext = head;
1024 
1025    return &local->begin;
1026 }
1027 
1028 VkResult
vn_BeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)1029 vn_BeginCommandBuffer(VkCommandBuffer commandBuffer,
1030                       const VkCommandBufferBeginInfo *pBeginInfo)
1031 {
1032    VN_TRACE_FUNC();
1033    struct vn_command_buffer *cmd =
1034       vn_command_buffer_from_handle(commandBuffer);
1035    struct vn_instance *instance = cmd->pool->device->instance;
1036    size_t cmd_size;
1037 
1038    /* reset regardless of VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT */
1039    vn_cmd_reset(cmd);
1040 
1041    struct vn_command_buffer_begin_info local_begin_info;
1042    pBeginInfo =
1043       vn_fix_command_buffer_begin_info(cmd, pBeginInfo, &local_begin_info);
1044 
1045    cmd_size = vn_sizeof_vkBeginCommandBuffer(commandBuffer, pBeginInfo);
1046    if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
1047       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1048       return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1049    }
1050    cmd->builder.is_simultaneous =
1051       pBeginInfo->flags & VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
1052 
1053    vn_encode_vkBeginCommandBuffer(&cmd->cs, 0, commandBuffer, pBeginInfo);
1054 
1055    cmd->state = VN_COMMAND_BUFFER_STATE_RECORDING;
1056 
1057    const VkCommandBufferInheritanceInfo *inheritance_info =
1058       pBeginInfo->pInheritanceInfo;
1059 
1060    if (inheritance_info) {
1061       cmd->builder.in_render_pass = local_begin_info.in_render_pass;
1062 
1063       if (local_begin_info.has_inherited_pass) {
1064          /* Store the viewMask from the inherited render pass subpass for
1065           * query feedback.
1066           */
1067          cmd->builder.view_mask = vn_render_pass_get_subpass_view_mask(
1068             vn_render_pass_from_handle(inheritance_info->renderPass),
1069             inheritance_info->subpass);
1070       } else {
1071          /* Store the viewMask from the
1072           * VkCommandBufferInheritanceRenderingInfo.
1073           */
1074          const VkCommandBufferInheritanceRenderingInfo
1075             *inheritance_rendering_info = vk_find_struct_const(
1076                inheritance_info->pNext,
1077                COMMAND_BUFFER_INHERITANCE_RENDERING_INFO);
1078          if (inheritance_rendering_info)
1079             cmd->builder.view_mask = inheritance_rendering_info->viewMask;
1080       }
1081    }
1082 
1083    return VK_SUCCESS;
1084 }
1085 
1086 static void
vn_cmd_submit(struct vn_command_buffer * cmd)1087 vn_cmd_submit(struct vn_command_buffer *cmd)
1088 {
1089    struct vn_ring *ring = cmd->pool->device->primary_ring;
1090 
1091    if (cmd->state != VN_COMMAND_BUFFER_STATE_RECORDING)
1092       return;
1093 
1094    vn_cs_encoder_commit(&cmd->cs);
1095    if (vn_cs_encoder_get_fatal(&cmd->cs)) {
1096       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1097       vn_cs_encoder_reset(&cmd->cs);
1098       return;
1099    }
1100 
1101    if (vn_cs_encoder_needs_roundtrip(&cmd->cs))
1102       vn_ring_roundtrip(ring);
1103 
1104    if (vn_ring_submit_command_simple(ring, &cmd->cs) != VK_SUCCESS) {
1105       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1106       return;
1107    }
1108 
1109    vn_cs_encoder_reset(&cmd->cs);
1110 }
1111 
1112 VkResult
vn_EndCommandBuffer(VkCommandBuffer commandBuffer)1113 vn_EndCommandBuffer(VkCommandBuffer commandBuffer)
1114 {
1115    VN_TRACE_FUNC();
1116    struct vn_command_buffer *cmd =
1117       vn_command_buffer_from_handle(commandBuffer);
1118    struct vn_instance *instance = cmd->pool->device->instance;
1119    size_t cmd_size;
1120 
1121    if (cmd->state != VN_COMMAND_BUFFER_STATE_RECORDING)
1122       return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1123 
1124    cmd_size = vn_sizeof_vkEndCommandBuffer(commandBuffer);
1125    if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
1126       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1127       return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1128    }
1129 
1130    vn_encode_vkEndCommandBuffer(&cmd->cs, 0, commandBuffer);
1131 
1132    vn_cmd_submit(cmd);
1133    if (cmd->state == VN_COMMAND_BUFFER_STATE_INVALID)
1134       return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1135 
1136    cmd->state = VN_COMMAND_BUFFER_STATE_EXECUTABLE;
1137 
1138    return VK_SUCCESS;
1139 }
1140 
1141 void
vn_CmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)1142 vn_CmdBindPipeline(VkCommandBuffer commandBuffer,
1143                    VkPipelineBindPoint pipelineBindPoint,
1144                    VkPipeline pipeline)
1145 {
1146    VN_CMD_ENQUEUE(vkCmdBindPipeline, commandBuffer, pipelineBindPoint,
1147                   pipeline);
1148 }
1149 
1150 void
vn_CmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)1151 vn_CmdSetViewport(VkCommandBuffer commandBuffer,
1152                   uint32_t firstViewport,
1153                   uint32_t viewportCount,
1154                   const VkViewport *pViewports)
1155 {
1156    VN_CMD_ENQUEUE(vkCmdSetViewport, commandBuffer, firstViewport,
1157                   viewportCount, pViewports);
1158 }
1159 
1160 void
vn_CmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)1161 vn_CmdSetScissor(VkCommandBuffer commandBuffer,
1162                  uint32_t firstScissor,
1163                  uint32_t scissorCount,
1164                  const VkRect2D *pScissors)
1165 {
1166    VN_CMD_ENQUEUE(vkCmdSetScissor, commandBuffer, firstScissor, scissorCount,
1167                   pScissors);
1168 }
1169 
1170 void
vn_CmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)1171 vn_CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
1172 {
1173    VN_CMD_ENQUEUE(vkCmdSetLineWidth, commandBuffer, lineWidth);
1174 }
1175 
1176 void
vn_CmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)1177 vn_CmdSetDepthBias(VkCommandBuffer commandBuffer,
1178                    float depthBiasConstantFactor,
1179                    float depthBiasClamp,
1180                    float depthBiasSlopeFactor)
1181 {
1182    VN_CMD_ENQUEUE(vkCmdSetDepthBias, commandBuffer, depthBiasConstantFactor,
1183                   depthBiasClamp, depthBiasSlopeFactor);
1184 }
1185 
1186 void
vn_CmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])1187 vn_CmdSetBlendConstants(VkCommandBuffer commandBuffer,
1188                         const float blendConstants[4])
1189 {
1190    VN_CMD_ENQUEUE(vkCmdSetBlendConstants, commandBuffer, blendConstants);
1191 }
1192 
1193 void
vn_CmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)1194 vn_CmdSetDepthBounds(VkCommandBuffer commandBuffer,
1195                      float minDepthBounds,
1196                      float maxDepthBounds)
1197 {
1198    VN_CMD_ENQUEUE(vkCmdSetDepthBounds, commandBuffer, minDepthBounds,
1199                   maxDepthBounds);
1200 }
1201 
1202 void
vn_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)1203 vn_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
1204                             VkStencilFaceFlags faceMask,
1205                             uint32_t compareMask)
1206 {
1207    VN_CMD_ENQUEUE(vkCmdSetStencilCompareMask, commandBuffer, faceMask,
1208                   compareMask);
1209 }
1210 
1211 void
vn_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)1212 vn_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
1213                           VkStencilFaceFlags faceMask,
1214                           uint32_t writeMask)
1215 {
1216    VN_CMD_ENQUEUE(vkCmdSetStencilWriteMask, commandBuffer, faceMask,
1217                   writeMask);
1218 }
1219 
1220 void
vn_CmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)1221 vn_CmdSetStencilReference(VkCommandBuffer commandBuffer,
1222                           VkStencilFaceFlags faceMask,
1223                           uint32_t reference)
1224 {
1225    VN_CMD_ENQUEUE(vkCmdSetStencilReference, commandBuffer, faceMask,
1226                   reference);
1227 }
1228 
1229 void
vn_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)1230 vn_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
1231                          VkPipelineBindPoint pipelineBindPoint,
1232                          VkPipelineLayout layout,
1233                          uint32_t firstSet,
1234                          uint32_t descriptorSetCount,
1235                          const VkDescriptorSet *pDescriptorSets,
1236                          uint32_t dynamicOffsetCount,
1237                          const uint32_t *pDynamicOffsets)
1238 {
1239    VN_CMD_ENQUEUE(vkCmdBindDescriptorSets, commandBuffer, pipelineBindPoint,
1240                   layout, firstSet, descriptorSetCount, pDescriptorSets,
1241                   dynamicOffsetCount, pDynamicOffsets);
1242 }
1243 
1244 void
vn_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)1245 vn_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,
1246                       VkBuffer buffer,
1247                       VkDeviceSize offset,
1248                       VkIndexType indexType)
1249 {
1250    VN_CMD_ENQUEUE(vkCmdBindIndexBuffer, commandBuffer, buffer, offset,
1251                   indexType);
1252 }
1253 
1254 void
vn_CmdBindIndexBuffer2KHR(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkDeviceSize size,VkIndexType indexType)1255 vn_CmdBindIndexBuffer2KHR(VkCommandBuffer commandBuffer,
1256                           VkBuffer buffer,
1257                           VkDeviceSize offset,
1258                           VkDeviceSize size,
1259                           VkIndexType indexType)
1260 {
1261    VN_CMD_ENQUEUE(vkCmdBindIndexBuffer2KHR, commandBuffer, buffer, offset,
1262                   size, indexType);
1263 }
1264 
1265 void
vn_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)1266 vn_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,
1267                         uint32_t firstBinding,
1268                         uint32_t bindingCount,
1269                         const VkBuffer *pBuffers,
1270                         const VkDeviceSize *pOffsets)
1271 {
1272    VN_CMD_ENQUEUE(vkCmdBindVertexBuffers, commandBuffer, firstBinding,
1273                   bindingCount, pBuffers, pOffsets);
1274 }
1275 
1276 void
vn_CmdDraw(VkCommandBuffer commandBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)1277 vn_CmdDraw(VkCommandBuffer commandBuffer,
1278            uint32_t vertexCount,
1279            uint32_t instanceCount,
1280            uint32_t firstVertex,
1281            uint32_t firstInstance)
1282 {
1283    VN_CMD_ENQUEUE(vkCmdDraw, commandBuffer, vertexCount, instanceCount,
1284                   firstVertex, firstInstance);
1285 }
1286 
1287 void
vn_CmdBeginRendering(VkCommandBuffer commandBuffer,const VkRenderingInfo * pRenderingInfo)1288 vn_CmdBeginRendering(VkCommandBuffer commandBuffer,
1289                      const VkRenderingInfo *pRenderingInfo)
1290 {
1291    vn_cmd_begin_rendering(vn_command_buffer_from_handle(commandBuffer),
1292                           pRenderingInfo);
1293 
1294    VN_CMD_ENQUEUE(vkCmdBeginRendering, commandBuffer, pRenderingInfo);
1295 }
1296 
1297 void
vn_CmdEndRendering(VkCommandBuffer commandBuffer)1298 vn_CmdEndRendering(VkCommandBuffer commandBuffer)
1299 {
1300    VN_CMD_ENQUEUE(vkCmdEndRendering, commandBuffer);
1301 
1302    vn_cmd_end_rendering(vn_command_buffer_from_handle(commandBuffer));
1303 }
1304 
1305 void
vn_CmdDrawIndexed(VkCommandBuffer commandBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)1306 vn_CmdDrawIndexed(VkCommandBuffer commandBuffer,
1307                   uint32_t indexCount,
1308                   uint32_t instanceCount,
1309                   uint32_t firstIndex,
1310                   int32_t vertexOffset,
1311                   uint32_t firstInstance)
1312 {
1313    VN_CMD_ENQUEUE(vkCmdDrawIndexed, commandBuffer, indexCount, instanceCount,
1314                   firstIndex, vertexOffset, firstInstance);
1315 }
1316 
1317 void
vn_CmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)1318 vn_CmdDrawIndirect(VkCommandBuffer commandBuffer,
1319                    VkBuffer buffer,
1320                    VkDeviceSize offset,
1321                    uint32_t drawCount,
1322                    uint32_t stride)
1323 {
1324    VN_CMD_ENQUEUE(vkCmdDrawIndirect, commandBuffer, buffer, offset, drawCount,
1325                   stride);
1326 }
1327 
1328 void
vn_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)1329 vn_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,
1330                           VkBuffer buffer,
1331                           VkDeviceSize offset,
1332                           uint32_t drawCount,
1333                           uint32_t stride)
1334 {
1335    VN_CMD_ENQUEUE(vkCmdDrawIndexedIndirect, commandBuffer, buffer, offset,
1336                   drawCount, stride);
1337 }
1338 
1339 void
vn_CmdDrawIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)1340 vn_CmdDrawIndirectCount(VkCommandBuffer commandBuffer,
1341                         VkBuffer buffer,
1342                         VkDeviceSize offset,
1343                         VkBuffer countBuffer,
1344                         VkDeviceSize countBufferOffset,
1345                         uint32_t maxDrawCount,
1346                         uint32_t stride)
1347 {
1348    VN_CMD_ENQUEUE(vkCmdDrawIndirectCount, commandBuffer, buffer, offset,
1349                   countBuffer, countBufferOffset, maxDrawCount, stride);
1350 }
1351 
1352 void
vn_CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)1353 vn_CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,
1354                                VkBuffer buffer,
1355                                VkDeviceSize offset,
1356                                VkBuffer countBuffer,
1357                                VkDeviceSize countBufferOffset,
1358                                uint32_t maxDrawCount,
1359                                uint32_t stride)
1360 {
1361    VN_CMD_ENQUEUE(vkCmdDrawIndexedIndirectCount, commandBuffer, buffer,
1362                   offset, countBuffer, countBufferOffset, maxDrawCount,
1363                   stride);
1364 }
1365 
1366 void
vn_CmdDispatch(VkCommandBuffer commandBuffer,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)1367 vn_CmdDispatch(VkCommandBuffer commandBuffer,
1368                uint32_t groupCountX,
1369                uint32_t groupCountY,
1370                uint32_t groupCountZ)
1371 {
1372    VN_CMD_ENQUEUE(vkCmdDispatch, commandBuffer, groupCountX, groupCountY,
1373                   groupCountZ);
1374 }
1375 
1376 void
vn_CmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset)1377 vn_CmdDispatchIndirect(VkCommandBuffer commandBuffer,
1378                        VkBuffer buffer,
1379                        VkDeviceSize offset)
1380 {
1381    VN_CMD_ENQUEUE(vkCmdDispatchIndirect, commandBuffer, buffer, offset);
1382 }
1383 
1384 void
vn_CmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)1385 vn_CmdCopyBuffer(VkCommandBuffer commandBuffer,
1386                  VkBuffer srcBuffer,
1387                  VkBuffer dstBuffer,
1388                  uint32_t regionCount,
1389                  const VkBufferCopy *pRegions)
1390 {
1391    VN_CMD_ENQUEUE(vkCmdCopyBuffer, commandBuffer, srcBuffer, dstBuffer,
1392                   regionCount, pRegions);
1393 }
1394 
1395 void
vn_CmdCopyBuffer2(VkCommandBuffer commandBuffer,const VkCopyBufferInfo2 * pCopyBufferInfo)1396 vn_CmdCopyBuffer2(VkCommandBuffer commandBuffer,
1397                   const VkCopyBufferInfo2 *pCopyBufferInfo)
1398 {
1399    VN_CMD_ENQUEUE(vkCmdCopyBuffer2, commandBuffer, pCopyBufferInfo);
1400 }
1401 
1402 void
vn_CmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)1403 vn_CmdCopyImage(VkCommandBuffer commandBuffer,
1404                 VkImage srcImage,
1405                 VkImageLayout srcImageLayout,
1406                 VkImage dstImage,
1407                 VkImageLayout dstImageLayout,
1408                 uint32_t regionCount,
1409                 const VkImageCopy *pRegions)
1410 {
1411    VN_CMD_ENQUEUE(vkCmdCopyImage, commandBuffer, srcImage, srcImageLayout,
1412                   dstImage, dstImageLayout, regionCount, pRegions);
1413 }
1414 
1415 void
vn_CmdCopyImage2(VkCommandBuffer commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)1416 vn_CmdCopyImage2(VkCommandBuffer commandBuffer,
1417                  const VkCopyImageInfo2 *pCopyImageInfo)
1418 {
1419    VN_CMD_ENQUEUE(vkCmdCopyImage2, commandBuffer, pCopyImageInfo);
1420 }
1421 
1422 void
vn_CmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)1423 vn_CmdBlitImage(VkCommandBuffer commandBuffer,
1424                 VkImage srcImage,
1425                 VkImageLayout srcImageLayout,
1426                 VkImage dstImage,
1427                 VkImageLayout dstImageLayout,
1428                 uint32_t regionCount,
1429                 const VkImageBlit *pRegions,
1430                 VkFilter filter)
1431 {
1432    VN_CMD_ENQUEUE(vkCmdBlitImage, commandBuffer, srcImage, srcImageLayout,
1433                   dstImage, dstImageLayout, regionCount, pRegions, filter);
1434 }
1435 
1436 void
vn_CmdBlitImage2(VkCommandBuffer commandBuffer,const VkBlitImageInfo2 * pBlitImageInfo)1437 vn_CmdBlitImage2(VkCommandBuffer commandBuffer,
1438                  const VkBlitImageInfo2 *pBlitImageInfo)
1439 {
1440    VN_CMD_ENQUEUE(vkCmdBlitImage2, commandBuffer, pBlitImageInfo);
1441 }
1442 
1443 void
vn_CmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)1444 vn_CmdCopyBufferToImage(VkCommandBuffer commandBuffer,
1445                         VkBuffer srcBuffer,
1446                         VkImage dstImage,
1447                         VkImageLayout dstImageLayout,
1448                         uint32_t regionCount,
1449                         const VkBufferImageCopy *pRegions)
1450 {
1451    VN_CMD_ENQUEUE(vkCmdCopyBufferToImage, commandBuffer, srcBuffer, dstImage,
1452                   dstImageLayout, regionCount, pRegions);
1453 }
1454 
1455 void
vn_CmdCopyBufferToImage2(VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo)1456 vn_CmdCopyBufferToImage2(
1457    VkCommandBuffer commandBuffer,
1458    const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo)
1459 {
1460    VN_CMD_ENQUEUE(vkCmdCopyBufferToImage2, commandBuffer,
1461                   pCopyBufferToImageInfo);
1462 }
1463 
1464 static bool
vn_needs_prime_blit(VkImage src_image,VkImageLayout src_image_layout)1465 vn_needs_prime_blit(VkImage src_image, VkImageLayout src_image_layout)
1466 {
1467    if (src_image_layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
1468        VN_PRESENT_SRC_INTERNAL_LAYOUT != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
1469 
1470       /* sanity check */
1471       ASSERTED const struct vn_image *img = vn_image_from_handle(src_image);
1472       assert(img->wsi.is_wsi && img->wsi.is_prime_blit_src);
1473       return true;
1474    }
1475 
1476    return false;
1477 }
1478 
1479 static void
vn_transition_prime_layout(struct vn_command_buffer * cmd,VkBuffer dst_buffer)1480 vn_transition_prime_layout(struct vn_command_buffer *cmd, VkBuffer dst_buffer)
1481 {
1482    const VkBufferMemoryBarrier buf_barrier = {
1483       .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
1484       .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
1485       .srcQueueFamilyIndex = cmd->pool->queue_family_index,
1486       .dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT,
1487       .buffer = dst_buffer,
1488       .size = VK_WHOLE_SIZE,
1489    };
1490    vn_cmd_encode_memory_barriers(cmd, VK_PIPELINE_STAGE_TRANSFER_BIT,
1491                                  VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 1,
1492                                  &buf_barrier, 0, NULL);
1493 }
1494 
1495 void
vn_CmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)1496 vn_CmdCopyImageToBuffer(VkCommandBuffer commandBuffer,
1497                         VkImage srcImage,
1498                         VkImageLayout srcImageLayout,
1499                         VkBuffer dstBuffer,
1500                         uint32_t regionCount,
1501                         const VkBufferImageCopy *pRegions)
1502 {
1503    struct vn_command_buffer *cmd =
1504       vn_command_buffer_from_handle(commandBuffer);
1505 
1506    bool prime_blit = vn_needs_prime_blit(srcImage, srcImageLayout);
1507    if (prime_blit)
1508       srcImageLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
1509 
1510    VN_CMD_ENQUEUE(vkCmdCopyImageToBuffer, commandBuffer, srcImage,
1511                   srcImageLayout, dstBuffer, regionCount, pRegions);
1512 
1513    if (prime_blit)
1514       vn_transition_prime_layout(cmd, dstBuffer);
1515 }
1516 
1517 void
vn_CmdCopyImageToBuffer2(VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)1518 vn_CmdCopyImageToBuffer2(
1519    VkCommandBuffer commandBuffer,
1520    const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo)
1521 {
1522    struct vn_command_buffer *cmd =
1523       vn_command_buffer_from_handle(commandBuffer);
1524    struct VkCopyImageToBufferInfo2 copy_info = *pCopyImageToBufferInfo;
1525 
1526    bool prime_blit =
1527       vn_needs_prime_blit(copy_info.srcImage, copy_info.srcImageLayout);
1528    if (prime_blit)
1529       copy_info.srcImageLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
1530 
1531    VN_CMD_ENQUEUE(vkCmdCopyImageToBuffer2, commandBuffer, &copy_info);
1532 
1533    if (prime_blit)
1534       vn_transition_prime_layout(cmd, copy_info.dstBuffer);
1535 }
1536 
1537 void
vn_CmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)1538 vn_CmdUpdateBuffer(VkCommandBuffer commandBuffer,
1539                    VkBuffer dstBuffer,
1540                    VkDeviceSize dstOffset,
1541                    VkDeviceSize dataSize,
1542                    const void *pData)
1543 {
1544    VN_CMD_ENQUEUE(vkCmdUpdateBuffer, commandBuffer, dstBuffer, dstOffset,
1545                   dataSize, pData);
1546 }
1547 
1548 void
vn_CmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)1549 vn_CmdFillBuffer(VkCommandBuffer commandBuffer,
1550                  VkBuffer dstBuffer,
1551                  VkDeviceSize dstOffset,
1552                  VkDeviceSize size,
1553                  uint32_t data)
1554 {
1555    VN_CMD_ENQUEUE(vkCmdFillBuffer, commandBuffer, dstBuffer, dstOffset, size,
1556                   data);
1557 }
1558 
1559 void
vn_CmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1560 vn_CmdClearColorImage(VkCommandBuffer commandBuffer,
1561                       VkImage image,
1562                       VkImageLayout imageLayout,
1563                       const VkClearColorValue *pColor,
1564                       uint32_t rangeCount,
1565                       const VkImageSubresourceRange *pRanges)
1566 {
1567    VN_CMD_ENQUEUE(vkCmdClearColorImage, commandBuffer, image, imageLayout,
1568                   pColor, rangeCount, pRanges);
1569 }
1570 
1571 void
vn_CmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1572 vn_CmdClearDepthStencilImage(VkCommandBuffer commandBuffer,
1573                              VkImage image,
1574                              VkImageLayout imageLayout,
1575                              const VkClearDepthStencilValue *pDepthStencil,
1576                              uint32_t rangeCount,
1577                              const VkImageSubresourceRange *pRanges)
1578 {
1579    VN_CMD_ENQUEUE(vkCmdClearDepthStencilImage, commandBuffer, image,
1580                   imageLayout, pDepthStencil, rangeCount, pRanges);
1581 }
1582 
1583 void
vn_CmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)1584 vn_CmdClearAttachments(VkCommandBuffer commandBuffer,
1585                        uint32_t attachmentCount,
1586                        const VkClearAttachment *pAttachments,
1587                        uint32_t rectCount,
1588                        const VkClearRect *pRects)
1589 {
1590    VN_CMD_ENQUEUE(vkCmdClearAttachments, commandBuffer, attachmentCount,
1591                   pAttachments, rectCount, pRects);
1592 }
1593 
1594 void
vn_CmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)1595 vn_CmdResolveImage(VkCommandBuffer commandBuffer,
1596                    VkImage srcImage,
1597                    VkImageLayout srcImageLayout,
1598                    VkImage dstImage,
1599                    VkImageLayout dstImageLayout,
1600                    uint32_t regionCount,
1601                    const VkImageResolve *pRegions)
1602 {
1603    VN_CMD_ENQUEUE(vkCmdResolveImage, commandBuffer, srcImage, srcImageLayout,
1604                   dstImage, dstImageLayout, regionCount, pRegions);
1605 }
1606 
1607 void
vn_CmdResolveImage2(VkCommandBuffer commandBuffer,const VkResolveImageInfo2 * pResolveImageInfo)1608 vn_CmdResolveImage2(VkCommandBuffer commandBuffer,
1609                     const VkResolveImageInfo2 *pResolveImageInfo)
1610 {
1611    VN_CMD_ENQUEUE(vkCmdResolveImage2, commandBuffer, pResolveImageInfo);
1612 }
1613 
1614 void
vn_CmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)1615 vn_CmdSetEvent(VkCommandBuffer commandBuffer,
1616                VkEvent event,
1617                VkPipelineStageFlags stageMask)
1618 {
1619    VN_CMD_ENQUEUE(vkCmdSetEvent, commandBuffer, event, stageMask);
1620 
1621    vn_event_feedback_cmd_record(commandBuffer, event, stageMask, VK_EVENT_SET,
1622                                 false);
1623 }
1624 
1625 static VkPipelineStageFlags2
vn_dependency_info_collect_src_stage_mask(const VkDependencyInfo * dep_info)1626 vn_dependency_info_collect_src_stage_mask(const VkDependencyInfo *dep_info)
1627 {
1628    VkPipelineStageFlags2 mask = 0;
1629 
1630    for (uint32_t i = 0; i < dep_info->memoryBarrierCount; i++)
1631       mask |= dep_info->pMemoryBarriers[i].srcStageMask;
1632 
1633    for (uint32_t i = 0; i < dep_info->bufferMemoryBarrierCount; i++)
1634       mask |= dep_info->pBufferMemoryBarriers[i].srcStageMask;
1635 
1636    for (uint32_t i = 0; i < dep_info->imageMemoryBarrierCount; i++)
1637       mask |= dep_info->pImageMemoryBarriers[i].srcStageMask;
1638 
1639    return mask;
1640 }
1641 
1642 void
vn_CmdSetEvent2(VkCommandBuffer commandBuffer,VkEvent event,const VkDependencyInfo * pDependencyInfo)1643 vn_CmdSetEvent2(VkCommandBuffer commandBuffer,
1644                 VkEvent event,
1645                 const VkDependencyInfo *pDependencyInfo)
1646 
1647 {
1648    struct vn_command_buffer *cmd =
1649       vn_command_buffer_from_handle(commandBuffer);
1650 
1651    pDependencyInfo = vn_cmd_fix_dependency_infos(cmd, 1, pDependencyInfo);
1652 
1653    VN_CMD_ENQUEUE(vkCmdSetEvent2, commandBuffer, event, pDependencyInfo);
1654 
1655    const VkPipelineStageFlags2 src_stage_mask =
1656       vn_dependency_info_collect_src_stage_mask(pDependencyInfo);
1657    vn_event_feedback_cmd_record(commandBuffer, event, src_stage_mask,
1658                                 VK_EVENT_SET, true);
1659 }
1660 
1661 void
vn_CmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)1662 vn_CmdResetEvent(VkCommandBuffer commandBuffer,
1663                  VkEvent event,
1664                  VkPipelineStageFlags stageMask)
1665 {
1666    VN_CMD_ENQUEUE(vkCmdResetEvent, commandBuffer, event, stageMask);
1667 
1668    vn_event_feedback_cmd_record(commandBuffer, event, stageMask,
1669                                 VK_EVENT_RESET, false);
1670 }
1671 
1672 void
vn_CmdResetEvent2(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags2 stageMask)1673 vn_CmdResetEvent2(VkCommandBuffer commandBuffer,
1674                   VkEvent event,
1675                   VkPipelineStageFlags2 stageMask)
1676 {
1677    VN_CMD_ENQUEUE(vkCmdResetEvent2, commandBuffer, event, stageMask);
1678    vn_event_feedback_cmd_record(commandBuffer, event, stageMask,
1679                                 VK_EVENT_RESET, true);
1680 }
1681 
1682 void
vn_CmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1683 vn_CmdWaitEvents(VkCommandBuffer commandBuffer,
1684                  uint32_t eventCount,
1685                  const VkEvent *pEvents,
1686                  VkPipelineStageFlags srcStageMask,
1687                  VkPipelineStageFlags dstStageMask,
1688                  uint32_t memoryBarrierCount,
1689                  const VkMemoryBarrier *pMemoryBarriers,
1690                  uint32_t bufferMemoryBarrierCount,
1691                  const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1692                  uint32_t imageMemoryBarrierCount,
1693                  const VkImageMemoryBarrier *pImageMemoryBarriers)
1694 {
1695    struct vn_command_buffer *cmd =
1696       vn_command_buffer_from_handle(commandBuffer);
1697    uint32_t transfer_count;
1698 
1699    pImageMemoryBarriers = vn_cmd_wait_events_fix_image_memory_barriers(
1700       cmd, pImageMemoryBarriers, imageMemoryBarrierCount, &transfer_count);
1701    imageMemoryBarrierCount -= transfer_count;
1702 
1703    VN_CMD_ENQUEUE(vkCmdWaitEvents, commandBuffer, eventCount, pEvents,
1704                   srcStageMask, dstStageMask, memoryBarrierCount,
1705                   pMemoryBarriers, bufferMemoryBarrierCount,
1706                   pBufferMemoryBarriers, imageMemoryBarrierCount,
1707                   pImageMemoryBarriers);
1708 
1709    if (transfer_count) {
1710       pImageMemoryBarriers += imageMemoryBarrierCount;
1711       vn_cmd_encode_memory_barriers(cmd, srcStageMask, dstStageMask, 0, NULL,
1712                                     transfer_count, pImageMemoryBarriers);
1713    }
1714 }
1715 
1716 void
vn_CmdWaitEvents2(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,const VkDependencyInfo * pDependencyInfos)1717 vn_CmdWaitEvents2(VkCommandBuffer commandBuffer,
1718                   uint32_t eventCount,
1719                   const VkEvent *pEvents,
1720                   const VkDependencyInfo *pDependencyInfos)
1721 {
1722    struct vn_command_buffer *cmd =
1723       vn_command_buffer_from_handle(commandBuffer);
1724 
1725    pDependencyInfos =
1726       vn_cmd_fix_dependency_infos(cmd, eventCount, pDependencyInfos);
1727 
1728    VN_CMD_ENQUEUE(vkCmdWaitEvents2, commandBuffer, eventCount, pEvents,
1729                   pDependencyInfos);
1730 }
1731 
1732 void
vn_CmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1733 vn_CmdPipelineBarrier(VkCommandBuffer commandBuffer,
1734                       VkPipelineStageFlags srcStageMask,
1735                       VkPipelineStageFlags dstStageMask,
1736                       VkDependencyFlags dependencyFlags,
1737                       uint32_t memoryBarrierCount,
1738                       const VkMemoryBarrier *pMemoryBarriers,
1739                       uint32_t bufferMemoryBarrierCount,
1740                       const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1741                       uint32_t imageMemoryBarrierCount,
1742                       const VkImageMemoryBarrier *pImageMemoryBarriers)
1743 {
1744    struct vn_command_buffer *cmd =
1745       vn_command_buffer_from_handle(commandBuffer);
1746 
1747    pImageMemoryBarriers = vn_cmd_pipeline_barrier_fix_image_memory_barriers(
1748       cmd, pImageMemoryBarriers, imageMemoryBarrierCount);
1749 
1750    VN_CMD_ENQUEUE(vkCmdPipelineBarrier, commandBuffer, srcStageMask,
1751                   dstStageMask, dependencyFlags, memoryBarrierCount,
1752                   pMemoryBarriers, bufferMemoryBarrierCount,
1753                   pBufferMemoryBarriers, imageMemoryBarrierCount,
1754                   pImageMemoryBarriers);
1755 }
1756 
1757 void
vn_CmdPipelineBarrier2(VkCommandBuffer commandBuffer,const VkDependencyInfo * pDependencyInfo)1758 vn_CmdPipelineBarrier2(VkCommandBuffer commandBuffer,
1759                        const VkDependencyInfo *pDependencyInfo)
1760 {
1761    struct vn_command_buffer *cmd =
1762       vn_command_buffer_from_handle(commandBuffer);
1763 
1764    pDependencyInfo = vn_cmd_fix_dependency_infos(cmd, 1, pDependencyInfo);
1765 
1766    VN_CMD_ENQUEUE(vkCmdPipelineBarrier2, commandBuffer, pDependencyInfo);
1767 }
1768 
1769 void
vn_CmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags)1770 vn_CmdBeginQuery(VkCommandBuffer commandBuffer,
1771                  VkQueryPool queryPool,
1772                  uint32_t query,
1773                  VkQueryControlFlags flags)
1774 {
1775    VN_CMD_ENQUEUE(vkCmdBeginQuery, commandBuffer, queryPool, query, flags);
1776 }
1777 
1778 static inline uint32_t
vn_cmd_get_query_count(VkCommandBuffer cmd_handle)1779 vn_cmd_get_query_count(VkCommandBuffer cmd_handle)
1780 {
1781    /* Per 1.3.255 spec "If queries are used while executing a render pass
1782     * instance that has multiview enabled, the query uses N consecutive
1783     * query indices in the query pool (starting at query) where N is the
1784     * number of bits set in the view mask in the subpass the query is used
1785     * in."
1786     */
1787    struct vn_command_buffer *cmd = vn_command_buffer_from_handle(cmd_handle);
1788    return cmd->builder.in_render_pass && cmd->builder.view_mask
1789              ? util_bitcount(cmd->builder.view_mask)
1790              : 1;
1791 }
1792 
1793 static void
vn_cmd_record_query(VkCommandBuffer cmd_handle,VkQueryPool pool_handle,uint32_t query,uint32_t query_count,bool copy)1794 vn_cmd_record_query(VkCommandBuffer cmd_handle,
1795                     VkQueryPool pool_handle,
1796                     uint32_t query,
1797                     uint32_t query_count,
1798                     bool copy)
1799 {
1800    struct vn_command_buffer *cmd = vn_command_buffer_from_handle(cmd_handle);
1801    struct vn_query_pool *query_pool = vn_query_pool_from_handle(pool_handle);
1802 
1803    if (unlikely(VN_PERF(NO_QUERY_FEEDBACK)))
1804       return;
1805 
1806    if (unlikely(!query_pool->fb_buf)) {
1807       if (vn_query_feedback_buffer_init_once(cmd->pool->device, query_pool) !=
1808           VK_SUCCESS) {
1809          cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1810          return;
1811       }
1812    }
1813 
1814    struct vn_cmd_query_record *record = vn_cmd_pool_alloc_query_record(
1815       cmd->pool, query_pool, query, query_count, copy);
1816    if (!record) {
1817       cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
1818       return;
1819    }
1820 
1821    list_addtail(&record->head, &cmd->builder.query_records);
1822 }
1823 
1824 void
vn_CmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query)1825 vn_CmdEndQuery(VkCommandBuffer commandBuffer,
1826                VkQueryPool queryPool,
1827                uint32_t query)
1828 {
1829    VN_CMD_ENQUEUE(vkCmdEndQuery, commandBuffer, queryPool, query);
1830 
1831    const uint32_t query_count = vn_cmd_get_query_count(commandBuffer);
1832    vn_cmd_record_query(commandBuffer, queryPool, query, query_count, true);
1833 }
1834 
1835 void
vn_CmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)1836 vn_CmdResetQueryPool(VkCommandBuffer commandBuffer,
1837                      VkQueryPool queryPool,
1838                      uint32_t firstQuery,
1839                      uint32_t queryCount)
1840 {
1841    VN_CMD_ENQUEUE(vkCmdResetQueryPool, commandBuffer, queryPool, firstQuery,
1842                   queryCount);
1843 
1844    vn_cmd_record_query(commandBuffer, queryPool, firstQuery, queryCount,
1845                        false);
1846 }
1847 
1848 void
vn_CmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query)1849 vn_CmdWriteTimestamp(VkCommandBuffer commandBuffer,
1850                      VkPipelineStageFlagBits pipelineStage,
1851                      VkQueryPool queryPool,
1852                      uint32_t query)
1853 {
1854    VN_CMD_ENQUEUE(vkCmdWriteTimestamp, commandBuffer, pipelineStage,
1855                   queryPool, query);
1856 
1857    const uint32_t query_count = vn_cmd_get_query_count(commandBuffer);
1858    vn_cmd_record_query(commandBuffer, queryPool, query, query_count, true);
1859 }
1860 
1861 void
vn_CmdWriteTimestamp2(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits2 stage,VkQueryPool queryPool,uint32_t query)1862 vn_CmdWriteTimestamp2(VkCommandBuffer commandBuffer,
1863                       VkPipelineStageFlagBits2 stage,
1864                       VkQueryPool queryPool,
1865                       uint32_t query)
1866 {
1867    VN_CMD_ENQUEUE(vkCmdWriteTimestamp2, commandBuffer, stage, queryPool,
1868                   query);
1869 
1870    const uint32_t query_count = vn_cmd_get_query_count(commandBuffer);
1871    vn_cmd_record_query(commandBuffer, queryPool, query, query_count, true);
1872 }
1873 
1874 void
vn_CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)1875 vn_CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
1876                            VkQueryPool queryPool,
1877                            uint32_t firstQuery,
1878                            uint32_t queryCount,
1879                            VkBuffer dstBuffer,
1880                            VkDeviceSize dstOffset,
1881                            VkDeviceSize stride,
1882                            VkQueryResultFlags flags)
1883 {
1884    VN_CMD_ENQUEUE(vkCmdCopyQueryPoolResults, commandBuffer, queryPool,
1885                   firstQuery, queryCount, dstBuffer, dstOffset, stride,
1886                   flags);
1887 }
1888 
1889 void
vn_CmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)1890 vn_CmdPushConstants(VkCommandBuffer commandBuffer,
1891                     VkPipelineLayout layout,
1892                     VkShaderStageFlags stageFlags,
1893                     uint32_t offset,
1894                     uint32_t size,
1895                     const void *pValues)
1896 {
1897    VN_CMD_ENQUEUE(vkCmdPushConstants, commandBuffer, layout, stageFlags,
1898                   offset, size, pValues);
1899 }
1900 
1901 void
vn_CmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)1902 vn_CmdBeginRenderPass(VkCommandBuffer commandBuffer,
1903                       const VkRenderPassBeginInfo *pRenderPassBegin,
1904                       VkSubpassContents contents)
1905 {
1906    struct vn_command_buffer *cmd =
1907       vn_command_buffer_from_handle(commandBuffer);
1908 
1909    vn_cmd_begin_render_pass(
1910       cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1911       vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1912       pRenderPassBegin);
1913 
1914    VN_CMD_ENQUEUE(vkCmdBeginRenderPass, commandBuffer, pRenderPassBegin,
1915                   contents);
1916 }
1917 
1918 void
vn_CmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)1919 vn_CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
1920 {
1921    vn_cmd_next_subpass(vn_command_buffer_from_handle(commandBuffer));
1922 
1923    VN_CMD_ENQUEUE(vkCmdNextSubpass, commandBuffer, contents);
1924 }
1925 
1926 void
vn_CmdEndRenderPass(VkCommandBuffer commandBuffer)1927 vn_CmdEndRenderPass(VkCommandBuffer commandBuffer)
1928 {
1929    VN_CMD_ENQUEUE(vkCmdEndRenderPass, commandBuffer);
1930 
1931    vn_cmd_end_render_pass(vn_command_buffer_from_handle(commandBuffer));
1932 }
1933 
1934 void
vn_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)1935 vn_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,
1936                        const VkRenderPassBeginInfo *pRenderPassBegin,
1937                        const VkSubpassBeginInfo *pSubpassBeginInfo)
1938 {
1939    struct vn_command_buffer *cmd =
1940       vn_command_buffer_from_handle(commandBuffer);
1941 
1942    vn_cmd_begin_render_pass(
1943       cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1944       vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1945       pRenderPassBegin);
1946 
1947    VN_CMD_ENQUEUE(vkCmdBeginRenderPass2, commandBuffer, pRenderPassBegin,
1948                   pSubpassBeginInfo);
1949 }
1950 
1951 void
vn_CmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfo * pSubpassBeginInfo,const VkSubpassEndInfo * pSubpassEndInfo)1952 vn_CmdNextSubpass2(VkCommandBuffer commandBuffer,
1953                    const VkSubpassBeginInfo *pSubpassBeginInfo,
1954                    const VkSubpassEndInfo *pSubpassEndInfo)
1955 {
1956    vn_cmd_next_subpass(vn_command_buffer_from_handle(commandBuffer));
1957 
1958    VN_CMD_ENQUEUE(vkCmdNextSubpass2, commandBuffer, pSubpassBeginInfo,
1959                   pSubpassEndInfo);
1960 }
1961 
1962 void
vn_CmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfo * pSubpassEndInfo)1963 vn_CmdEndRenderPass2(VkCommandBuffer commandBuffer,
1964                      const VkSubpassEndInfo *pSubpassEndInfo)
1965 {
1966    VN_CMD_ENQUEUE(vkCmdEndRenderPass2, commandBuffer, pSubpassEndInfo);
1967 
1968    vn_cmd_end_render_pass(vn_command_buffer_from_handle(commandBuffer));
1969 }
1970 
1971 void
vn_CmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)1972 vn_CmdExecuteCommands(VkCommandBuffer commandBuffer,
1973                       uint32_t commandBufferCount,
1974                       const VkCommandBuffer *pCommandBuffers)
1975 {
1976    VN_CMD_ENQUEUE(vkCmdExecuteCommands, commandBuffer, commandBufferCount,
1977                   pCommandBuffers);
1978 
1979    struct vn_command_buffer *primary_cmd =
1980       vn_command_buffer_from_handle(commandBuffer);
1981    for (uint32_t i = 0; i < commandBufferCount; i++) {
1982       struct vn_command_buffer *secondary_cmd =
1983          vn_command_buffer_from_handle(pCommandBuffers[i]);
1984       vn_cmd_merge_query_records(primary_cmd, secondary_cmd);
1985    }
1986 }
1987 
1988 void
vn_CmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)1989 vn_CmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
1990 {
1991    VN_CMD_ENQUEUE(vkCmdSetDeviceMask, commandBuffer, deviceMask);
1992 }
1993 
1994 void
vn_CmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)1995 vn_CmdDispatchBase(VkCommandBuffer commandBuffer,
1996                    uint32_t baseGroupX,
1997                    uint32_t baseGroupY,
1998                    uint32_t baseGroupZ,
1999                    uint32_t groupCountX,
2000                    uint32_t groupCountY,
2001                    uint32_t groupCountZ)
2002 {
2003    VN_CMD_ENQUEUE(vkCmdDispatchBase, commandBuffer, baseGroupX, baseGroupY,
2004                   baseGroupZ, groupCountX, groupCountY, groupCountZ);
2005 }
2006 
2007 void
vn_CmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)2008 vn_CmdSetLineStippleEXT(VkCommandBuffer commandBuffer,
2009                         uint32_t lineStippleFactor,
2010                         uint16_t lineStipplePattern)
2011 {
2012    VN_CMD_ENQUEUE(vkCmdSetLineStippleEXT, commandBuffer, lineStippleFactor,
2013                   lineStipplePattern);
2014 }
2015 
2016 void
vn_CmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags,uint32_t index)2017 vn_CmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,
2018                            VkQueryPool queryPool,
2019                            uint32_t query,
2020                            VkQueryControlFlags flags,
2021                            uint32_t index)
2022 {
2023    VN_CMD_ENQUEUE(vkCmdBeginQueryIndexedEXT, commandBuffer, queryPool, query,
2024                   flags, index);
2025 }
2026 
2027 void
vn_CmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,uint32_t index)2028 vn_CmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,
2029                          VkQueryPool queryPool,
2030                          uint32_t query,
2031                          uint32_t index)
2032 {
2033    VN_CMD_ENQUEUE(vkCmdEndQueryIndexedEXT, commandBuffer, queryPool, query,
2034                   index);
2035 
2036    const uint32_t query_count = vn_cmd_get_query_count(commandBuffer);
2037    vn_cmd_record_query(commandBuffer, queryPool, query, query_count, true);
2038 }
2039 
2040 void
vn_CmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes)2041 vn_CmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer,
2042                                       uint32_t firstBinding,
2043                                       uint32_t bindingCount,
2044                                       const VkBuffer *pBuffers,
2045                                       const VkDeviceSize *pOffsets,
2046                                       const VkDeviceSize *pSizes)
2047 {
2048    VN_CMD_ENQUEUE(vkCmdBindTransformFeedbackBuffersEXT, commandBuffer,
2049                   firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
2050 }
2051 
2052 void
vn_CmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer,uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VkBuffer * pCounterBuffers,const VkDeviceSize * pCounterBufferOffsets)2053 vn_CmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer,
2054                                 uint32_t firstCounterBuffer,
2055                                 uint32_t counterBufferCount,
2056                                 const VkBuffer *pCounterBuffers,
2057                                 const VkDeviceSize *pCounterBufferOffsets)
2058 {
2059    VN_CMD_ENQUEUE(vkCmdBeginTransformFeedbackEXT, commandBuffer,
2060                   firstCounterBuffer, counterBufferCount, pCounterBuffers,
2061                   pCounterBufferOffsets);
2062 }
2063 
2064 void
vn_CmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer,uint32_t firstCounterBuffer,uint32_t counterBufferCount,const VkBuffer * pCounterBuffers,const VkDeviceSize * pCounterBufferOffsets)2065 vn_CmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer,
2066                               uint32_t firstCounterBuffer,
2067                               uint32_t counterBufferCount,
2068                               const VkBuffer *pCounterBuffers,
2069                               const VkDeviceSize *pCounterBufferOffsets)
2070 {
2071    VN_CMD_ENQUEUE(vkCmdEndTransformFeedbackEXT, commandBuffer,
2072                   firstCounterBuffer, counterBufferCount, pCounterBuffers,
2073                   pCounterBufferOffsets);
2074 }
2075 
2076 void
vn_CmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer,uint32_t instanceCount,uint32_t firstInstance,VkBuffer counterBuffer,VkDeviceSize counterBufferOffset,uint32_t counterOffset,uint32_t vertexStride)2077 vn_CmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer,
2078                                uint32_t instanceCount,
2079                                uint32_t firstInstance,
2080                                VkBuffer counterBuffer,
2081                                VkDeviceSize counterBufferOffset,
2082                                uint32_t counterOffset,
2083                                uint32_t vertexStride)
2084 {
2085    VN_CMD_ENQUEUE(vkCmdDrawIndirectByteCountEXT, commandBuffer, instanceCount,
2086                   firstInstance, counterBuffer, counterBufferOffset,
2087                   counterOffset, vertexStride);
2088 }
2089 
2090 void
vn_CmdBindVertexBuffers2(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes,const VkDeviceSize * pStrides)2091 vn_CmdBindVertexBuffers2(VkCommandBuffer commandBuffer,
2092                          uint32_t firstBinding,
2093                          uint32_t bindingCount,
2094                          const VkBuffer *pBuffers,
2095                          const VkDeviceSize *pOffsets,
2096                          const VkDeviceSize *pSizes,
2097                          const VkDeviceSize *pStrides)
2098 {
2099    VN_CMD_ENQUEUE(vkCmdBindVertexBuffers2, commandBuffer, firstBinding,
2100                   bindingCount, pBuffers, pOffsets, pSizes, pStrides);
2101 }
2102 
2103 void
vn_CmdSetCullMode(VkCommandBuffer commandBuffer,VkCullModeFlags cullMode)2104 vn_CmdSetCullMode(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode)
2105 {
2106    VN_CMD_ENQUEUE(vkCmdSetCullMode, commandBuffer, cullMode);
2107 }
2108 
2109 void
vn_CmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthBoundsTestEnable)2110 vn_CmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,
2111                                VkBool32 depthBoundsTestEnable)
2112 {
2113    VN_CMD_ENQUEUE(vkCmdSetDepthBoundsTestEnable, commandBuffer,
2114                   depthBoundsTestEnable);
2115 }
2116 
2117 void
vn_CmdSetDepthCompareOp(VkCommandBuffer commandBuffer,VkCompareOp depthCompareOp)2118 vn_CmdSetDepthCompareOp(VkCommandBuffer commandBuffer,
2119                         VkCompareOp depthCompareOp)
2120 {
2121    VN_CMD_ENQUEUE(vkCmdSetDepthCompareOp, commandBuffer, depthCompareOp);
2122 }
2123 
2124 void
vn_CmdSetDepthTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthTestEnable)2125 vn_CmdSetDepthTestEnable(VkCommandBuffer commandBuffer,
2126                          VkBool32 depthTestEnable)
2127 {
2128    VN_CMD_ENQUEUE(vkCmdSetDepthTestEnable, commandBuffer, depthTestEnable);
2129 }
2130 
2131 void
vn_CmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,VkBool32 depthWriteEnable)2132 vn_CmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,
2133                           VkBool32 depthWriteEnable)
2134 {
2135    VN_CMD_ENQUEUE(vkCmdSetDepthWriteEnable, commandBuffer, depthWriteEnable);
2136 }
2137 
2138 void
vn_CmdSetFrontFace(VkCommandBuffer commandBuffer,VkFrontFace frontFace)2139 vn_CmdSetFrontFace(VkCommandBuffer commandBuffer, VkFrontFace frontFace)
2140 {
2141    VN_CMD_ENQUEUE(vkCmdSetFrontFace, commandBuffer, frontFace);
2142 }
2143 
2144 void
vn_CmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,VkPrimitiveTopology primitiveTopology)2145 vn_CmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,
2146                            VkPrimitiveTopology primitiveTopology)
2147 {
2148    VN_CMD_ENQUEUE(vkCmdSetPrimitiveTopology, commandBuffer,
2149                   primitiveTopology);
2150 }
2151 
2152 void
vn_CmdSetScissorWithCount(VkCommandBuffer commandBuffer,uint32_t scissorCount,const VkRect2D * pScissors)2153 vn_CmdSetScissorWithCount(VkCommandBuffer commandBuffer,
2154                           uint32_t scissorCount,
2155                           const VkRect2D *pScissors)
2156 {
2157    VN_CMD_ENQUEUE(vkCmdSetScissorWithCount, commandBuffer, scissorCount,
2158                   pScissors);
2159 }
2160 
2161 void
vn_CmdSetStencilOp(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,VkStencilOp failOp,VkStencilOp passOp,VkStencilOp depthFailOp,VkCompareOp compareOp)2162 vn_CmdSetStencilOp(VkCommandBuffer commandBuffer,
2163                    VkStencilFaceFlags faceMask,
2164                    VkStencilOp failOp,
2165                    VkStencilOp passOp,
2166                    VkStencilOp depthFailOp,
2167                    VkCompareOp compareOp)
2168 {
2169    VN_CMD_ENQUEUE(vkCmdSetStencilOp, commandBuffer, faceMask, failOp, passOp,
2170                   depthFailOp, compareOp);
2171 }
2172 
2173 void
vn_CmdSetStencilTestEnable(VkCommandBuffer commandBuffer,VkBool32 stencilTestEnable)2174 vn_CmdSetStencilTestEnable(VkCommandBuffer commandBuffer,
2175                            VkBool32 stencilTestEnable)
2176 {
2177    VN_CMD_ENQUEUE(vkCmdSetStencilTestEnable, commandBuffer,
2178                   stencilTestEnable);
2179 }
2180 
2181 void
vn_CmdSetViewportWithCount(VkCommandBuffer commandBuffer,uint32_t viewportCount,const VkViewport * pViewports)2182 vn_CmdSetViewportWithCount(VkCommandBuffer commandBuffer,
2183                            uint32_t viewportCount,
2184                            const VkViewport *pViewports)
2185 {
2186    VN_CMD_ENQUEUE(vkCmdSetViewportWithCount, commandBuffer, viewportCount,
2187                   pViewports);
2188 }
2189 
2190 void
vn_CmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,VkBool32 depthBiasEnable)2191 vn_CmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,
2192                          VkBool32 depthBiasEnable)
2193 {
2194    VN_CMD_ENQUEUE(vkCmdSetDepthBiasEnable, commandBuffer, depthBiasEnable);
2195 }
2196 
2197 void
vn_CmdSetLogicOpEXT(VkCommandBuffer commandBuffer,VkLogicOp logicOp)2198 vn_CmdSetLogicOpEXT(VkCommandBuffer commandBuffer, VkLogicOp logicOp)
2199 {
2200    VN_CMD_ENQUEUE(vkCmdSetLogicOpEXT, commandBuffer, logicOp);
2201 }
2202 
2203 void
vn_CmdSetColorWriteEnableEXT(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkBool32 * pColorWriteEnables)2204 vn_CmdSetColorWriteEnableEXT(VkCommandBuffer commandBuffer,
2205                              uint32_t attachmentCount,
2206                              const VkBool32 *pColorWriteEnables)
2207 {
2208    VN_CMD_ENQUEUE(vkCmdSetColorWriteEnableEXT, commandBuffer, attachmentCount,
2209                   pColorWriteEnables);
2210 }
2211 
2212 void
vn_CmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer,uint32_t patchControlPoints)2213 vn_CmdSetPatchControlPointsEXT(VkCommandBuffer commandBuffer,
2214                                uint32_t patchControlPoints)
2215 {
2216    VN_CMD_ENQUEUE(vkCmdSetPatchControlPointsEXT, commandBuffer,
2217                   patchControlPoints);
2218 }
2219 
2220 void
vn_CmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,VkBool32 primitiveRestartEnable)2221 vn_CmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,
2222                                 VkBool32 primitiveRestartEnable)
2223 {
2224    VN_CMD_ENQUEUE(vkCmdSetPrimitiveRestartEnable, commandBuffer,
2225                   primitiveRestartEnable);
2226 }
2227 
2228 void
vn_CmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer,VkBool32 rasterizerDiscardEnable)2229 vn_CmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer,
2230                                  VkBool32 rasterizerDiscardEnable)
2231 {
2232    VN_CMD_ENQUEUE(vkCmdSetRasterizerDiscardEnable, commandBuffer,
2233                   rasterizerDiscardEnable);
2234 }
2235 
2236 void
vn_CmdBeginConditionalRenderingEXT(VkCommandBuffer commandBuffer,const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin)2237 vn_CmdBeginConditionalRenderingEXT(
2238    VkCommandBuffer commandBuffer,
2239    const VkConditionalRenderingBeginInfoEXT *pConditionalRenderingBegin)
2240 {
2241    VN_CMD_ENQUEUE(vkCmdBeginConditionalRenderingEXT, commandBuffer,
2242                   pConditionalRenderingBegin);
2243 }
2244 
2245 void
vn_CmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer)2246 vn_CmdEndConditionalRenderingEXT(VkCommandBuffer commandBuffer)
2247 {
2248    VN_CMD_ENQUEUE(vkCmdEndConditionalRenderingEXT, commandBuffer);
2249 }
2250 
2251 void
vn_CmdDrawMultiEXT(VkCommandBuffer commandBuffer,uint32_t drawCount,const VkMultiDrawInfoEXT * pVertexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride)2252 vn_CmdDrawMultiEXT(VkCommandBuffer commandBuffer,
2253                    uint32_t drawCount,
2254                    const VkMultiDrawInfoEXT *pVertexInfo,
2255                    uint32_t instanceCount,
2256                    uint32_t firstInstance,
2257                    uint32_t stride)
2258 {
2259    VN_CMD_ENQUEUE(vkCmdDrawMultiEXT, commandBuffer, drawCount, pVertexInfo,
2260                   instanceCount, firstInstance, stride);
2261 }
2262 
2263 void
vn_CmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer,uint32_t drawCount,const VkMultiDrawIndexedInfoEXT * pIndexInfo,uint32_t instanceCount,uint32_t firstInstance,uint32_t stride,const int32_t * pVertexOffset)2264 vn_CmdDrawMultiIndexedEXT(VkCommandBuffer commandBuffer,
2265                           uint32_t drawCount,
2266                           const VkMultiDrawIndexedInfoEXT *pIndexInfo,
2267                           uint32_t instanceCount,
2268                           uint32_t firstInstance,
2269                           uint32_t stride,
2270                           const int32_t *pVertexOffset)
2271 {
2272    VN_CMD_ENQUEUE(vkCmdDrawMultiIndexedEXT, commandBuffer, drawCount,
2273                   pIndexInfo, instanceCount, firstInstance, stride,
2274                   pVertexOffset);
2275 }
2276 
2277 void
vn_CmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t set,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites)2278 vn_CmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
2279                            VkPipelineBindPoint pipelineBindPoint,
2280                            VkPipelineLayout layout,
2281                            uint32_t set,
2282                            uint32_t descriptorWriteCount,
2283                            const VkWriteDescriptorSet *pDescriptorWrites)
2284 {
2285    const uint32_t img_info_count = vn_descriptor_set_count_write_images(
2286       descriptorWriteCount, pDescriptorWrites);
2287 
2288    STACK_ARRAY(VkWriteDescriptorSet, writes, descriptorWriteCount);
2289    STACK_ARRAY(VkDescriptorImageInfo, img_infos, img_info_count);
2290    struct vn_descriptor_set_writes local = {
2291       .writes = writes,
2292       .img_infos = img_infos,
2293    };
2294    pDescriptorWrites = vn_descriptor_set_get_writes(
2295       descriptorWriteCount, pDescriptorWrites, layout, &local);
2296 
2297    VN_CMD_ENQUEUE(vkCmdPushDescriptorSetKHR, commandBuffer, pipelineBindPoint,
2298                   layout, set, descriptorWriteCount, pDescriptorWrites);
2299 
2300    STACK_ARRAY_FINISH(writes);
2301    STACK_ARRAY_FINISH(img_infos);
2302 }
2303 
2304 void
vn_CmdPushDescriptorSetWithTemplateKHR(VkCommandBuffer commandBuffer,VkDescriptorUpdateTemplate descriptorUpdateTemplate,VkPipelineLayout layout,uint32_t set,const void * pData)2305 vn_CmdPushDescriptorSetWithTemplateKHR(
2306    VkCommandBuffer commandBuffer,
2307    VkDescriptorUpdateTemplate descriptorUpdateTemplate,
2308    VkPipelineLayout layout,
2309    uint32_t set,
2310    const void *pData)
2311 {
2312    struct vn_descriptor_update_template *templ =
2313       vn_descriptor_update_template_from_handle(descriptorUpdateTemplate);
2314 
2315    STACK_ARRAY(VkWriteDescriptorSet, writes, templ->entry_count);
2316    STACK_ARRAY(VkDescriptorImageInfo, img_infos, templ->img_info_count);
2317    STACK_ARRAY(VkDescriptorBufferInfo, buf_infos, templ->buf_info_count);
2318    STACK_ARRAY(VkBufferView, bview_handles, templ->bview_count);
2319    STACK_ARRAY(VkWriteDescriptorSetInlineUniformBlock, iubs,
2320                templ->iub_count);
2321    struct vn_descriptor_set_update update = {
2322       .writes = writes,
2323       .img_infos = img_infos,
2324       .buf_infos = buf_infos,
2325       .bview_handles = bview_handles,
2326       .iubs = iubs,
2327    };
2328    vn_descriptor_set_fill_update_with_template(templ, VK_NULL_HANDLE, pData,
2329                                                &update);
2330 
2331    VN_CMD_ENQUEUE(vkCmdPushDescriptorSetKHR, commandBuffer,
2332                   templ->push.pipeline_bind_point, layout, set,
2333                   update.write_count, update.writes);
2334 
2335    STACK_ARRAY_FINISH(writes);
2336    STACK_ARRAY_FINISH(img_infos);
2337    STACK_ARRAY_FINISH(buf_infos);
2338    STACK_ARRAY_FINISH(bview_handles);
2339    STACK_ARRAY_FINISH(iubs);
2340 }
2341 
2342 void
vn_CmdSetVertexInputEXT(VkCommandBuffer commandBuffer,uint32_t vertexBindingDescriptionCount,const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions)2343 vn_CmdSetVertexInputEXT(
2344    VkCommandBuffer commandBuffer,
2345    uint32_t vertexBindingDescriptionCount,
2346    const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions,
2347    uint32_t vertexAttributeDescriptionCount,
2348    const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions)
2349 {
2350    VN_CMD_ENQUEUE(vkCmdSetVertexInputEXT, commandBuffer,
2351                   vertexBindingDescriptionCount, pVertexBindingDescriptions,
2352                   vertexAttributeDescriptionCount,
2353                   pVertexAttributeDescriptions);
2354 }
2355 
2356 void
vn_CmdSetAlphaToCoverageEnableEXT(VkCommandBuffer commandBuffer,VkBool32 alphaToCoverageEnable)2357 vn_CmdSetAlphaToCoverageEnableEXT(VkCommandBuffer commandBuffer,
2358                                   VkBool32 alphaToCoverageEnable)
2359 {
2360    VN_CMD_ENQUEUE(vkCmdSetAlphaToCoverageEnableEXT, commandBuffer,
2361                   alphaToCoverageEnable);
2362 }
2363 
2364 void
vn_CmdSetAlphaToOneEnableEXT(VkCommandBuffer commandBuffer,VkBool32 alphaToOneEnable)2365 vn_CmdSetAlphaToOneEnableEXT(VkCommandBuffer commandBuffer,
2366                              VkBool32 alphaToOneEnable)
2367 {
2368    VN_CMD_ENQUEUE(vkCmdSetAlphaToOneEnableEXT, commandBuffer,
2369                   alphaToOneEnable);
2370 }
2371 
2372 void
vn_CmdSetColorBlendAdvancedEXT(VkCommandBuffer commandBuffer,uint32_t firstAttachment,uint32_t attachmentCount,const VkColorBlendAdvancedEXT * pColorBlendAdvanced)2373 vn_CmdSetColorBlendAdvancedEXT(
2374    VkCommandBuffer commandBuffer,
2375    uint32_t firstAttachment,
2376    uint32_t attachmentCount,
2377    const VkColorBlendAdvancedEXT *pColorBlendAdvanced)
2378 {
2379    VN_CMD_ENQUEUE(vkCmdSetColorBlendAdvancedEXT, commandBuffer,
2380                   firstAttachment, attachmentCount, pColorBlendAdvanced);
2381 }
2382 
2383 void
vn_CmdSetColorBlendEnableEXT(VkCommandBuffer commandBuffer,uint32_t firstAttachment,uint32_t attachmentCount,const VkBool32 * pColorBlendEnables)2384 vn_CmdSetColorBlendEnableEXT(VkCommandBuffer commandBuffer,
2385                              uint32_t firstAttachment,
2386                              uint32_t attachmentCount,
2387                              const VkBool32 *pColorBlendEnables)
2388 {
2389    VN_CMD_ENQUEUE(vkCmdSetColorBlendEnableEXT, commandBuffer, firstAttachment,
2390                   attachmentCount, pColorBlendEnables);
2391 }
2392 
2393 void
vn_CmdSetColorBlendEquationEXT(VkCommandBuffer commandBuffer,uint32_t firstAttachment,uint32_t attachmentCount,const VkColorBlendEquationEXT * pColorBlendEquations)2394 vn_CmdSetColorBlendEquationEXT(
2395    VkCommandBuffer commandBuffer,
2396    uint32_t firstAttachment,
2397    uint32_t attachmentCount,
2398    const VkColorBlendEquationEXT *pColorBlendEquations)
2399 {
2400    VN_CMD_ENQUEUE(vkCmdSetColorBlendEquationEXT, commandBuffer,
2401                   firstAttachment, attachmentCount, pColorBlendEquations);
2402 }
2403 
2404 void
vn_CmdSetColorWriteMaskEXT(VkCommandBuffer commandBuffer,uint32_t firstAttachment,uint32_t attachmentCount,const VkColorComponentFlags * pColorWriteMasks)2405 vn_CmdSetColorWriteMaskEXT(VkCommandBuffer commandBuffer,
2406                            uint32_t firstAttachment,
2407                            uint32_t attachmentCount,
2408                            const VkColorComponentFlags *pColorWriteMasks)
2409 {
2410    VN_CMD_ENQUEUE(vkCmdSetColorWriteMaskEXT, commandBuffer, firstAttachment,
2411                   attachmentCount, pColorWriteMasks);
2412 }
2413 
2414 void
vn_CmdSetConservativeRasterizationModeEXT(VkCommandBuffer commandBuffer,VkConservativeRasterizationModeEXT conservativeRasterizationMode)2415 vn_CmdSetConservativeRasterizationModeEXT(
2416    VkCommandBuffer commandBuffer,
2417    VkConservativeRasterizationModeEXT conservativeRasterizationMode)
2418 {
2419    VN_CMD_ENQUEUE(vkCmdSetConservativeRasterizationModeEXT, commandBuffer,
2420                   conservativeRasterizationMode);
2421 }
2422 
2423 void
vn_CmdSetDepthClampEnableEXT(VkCommandBuffer commandBuffer,VkBool32 depthClampEnable)2424 vn_CmdSetDepthClampEnableEXT(VkCommandBuffer commandBuffer,
2425                              VkBool32 depthClampEnable)
2426 {
2427    VN_CMD_ENQUEUE(vkCmdSetDepthClampEnableEXT, commandBuffer,
2428                   depthClampEnable);
2429 }
2430 
2431 void
vn_CmdSetDepthClipEnableEXT(VkCommandBuffer commandBuffer,VkBool32 depthClipEnable)2432 vn_CmdSetDepthClipEnableEXT(VkCommandBuffer commandBuffer,
2433                             VkBool32 depthClipEnable)
2434 {
2435    VN_CMD_ENQUEUE(vkCmdSetDepthClipEnableEXT, commandBuffer, depthClipEnable);
2436 }
2437 
2438 void
vn_CmdSetDepthClipNegativeOneToOneEXT(VkCommandBuffer commandBuffer,VkBool32 negativeOneToOne)2439 vn_CmdSetDepthClipNegativeOneToOneEXT(VkCommandBuffer commandBuffer,
2440                                       VkBool32 negativeOneToOne)
2441 {
2442    VN_CMD_ENQUEUE(vkCmdSetDepthClipNegativeOneToOneEXT, commandBuffer,
2443                   negativeOneToOne);
2444 }
2445 
2446 void
vn_CmdSetExtraPrimitiveOverestimationSizeEXT(VkCommandBuffer commandBuffer,float extraPrimitiveOverestimationSize)2447 vn_CmdSetExtraPrimitiveOverestimationSizeEXT(
2448    VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize)
2449 {
2450    VN_CMD_ENQUEUE(vkCmdSetExtraPrimitiveOverestimationSizeEXT, commandBuffer,
2451                   extraPrimitiveOverestimationSize);
2452 }
2453 
2454 void
vn_CmdSetLineRasterizationModeEXT(VkCommandBuffer commandBuffer,VkLineRasterizationModeEXT lineRasterizationMode)2455 vn_CmdSetLineRasterizationModeEXT(
2456    VkCommandBuffer commandBuffer,
2457    VkLineRasterizationModeEXT lineRasterizationMode)
2458 {
2459    VN_CMD_ENQUEUE(vkCmdSetLineRasterizationModeEXT, commandBuffer,
2460                   lineRasterizationMode);
2461 }
2462 
2463 void
vn_CmdSetLineStippleEnableEXT(VkCommandBuffer commandBuffer,VkBool32 stippledLineEnable)2464 vn_CmdSetLineStippleEnableEXT(VkCommandBuffer commandBuffer,
2465                               VkBool32 stippledLineEnable)
2466 {
2467    VN_CMD_ENQUEUE(vkCmdSetLineStippleEnableEXT, commandBuffer,
2468                   stippledLineEnable);
2469 }
2470 
2471 void
vn_CmdSetLogicOpEnableEXT(VkCommandBuffer commandBuffer,VkBool32 logicOpEnable)2472 vn_CmdSetLogicOpEnableEXT(VkCommandBuffer commandBuffer,
2473                           VkBool32 logicOpEnable)
2474 {
2475    VN_CMD_ENQUEUE(vkCmdSetLogicOpEnableEXT, commandBuffer, logicOpEnable);
2476 }
2477 
2478 void
vn_CmdSetPolygonModeEXT(VkCommandBuffer commandBuffer,VkPolygonMode polygonMode)2479 vn_CmdSetPolygonModeEXT(VkCommandBuffer commandBuffer,
2480                         VkPolygonMode polygonMode)
2481 {
2482    VN_CMD_ENQUEUE(vkCmdSetPolygonModeEXT, commandBuffer, polygonMode);
2483 }
2484 
2485 void
vn_CmdSetProvokingVertexModeEXT(VkCommandBuffer commandBuffer,VkProvokingVertexModeEXT provokingVertexMode)2486 vn_CmdSetProvokingVertexModeEXT(VkCommandBuffer commandBuffer,
2487                                 VkProvokingVertexModeEXT provokingVertexMode)
2488 {
2489    VN_CMD_ENQUEUE(vkCmdSetProvokingVertexModeEXT, commandBuffer,
2490                   provokingVertexMode);
2491 }
2492 
2493 void
vn_CmdSetRasterizationSamplesEXT(VkCommandBuffer commandBuffer,VkSampleCountFlagBits rasterizationSamples)2494 vn_CmdSetRasterizationSamplesEXT(VkCommandBuffer commandBuffer,
2495                                  VkSampleCountFlagBits rasterizationSamples)
2496 {
2497    VN_CMD_ENQUEUE(vkCmdSetRasterizationSamplesEXT, commandBuffer,
2498                   rasterizationSamples);
2499 }
2500 
2501 void
vn_CmdSetRasterizationStreamEXT(VkCommandBuffer commandBuffer,uint32_t rasterizationStream)2502 vn_CmdSetRasterizationStreamEXT(VkCommandBuffer commandBuffer,
2503                                 uint32_t rasterizationStream)
2504 {
2505    VN_CMD_ENQUEUE(vkCmdSetRasterizationStreamEXT, commandBuffer,
2506                   rasterizationStream);
2507 }
2508 
2509 void
vn_CmdSetSampleLocationsEnableEXT(VkCommandBuffer commandBuffer,VkBool32 sampleLocationsEnable)2510 vn_CmdSetSampleLocationsEnableEXT(VkCommandBuffer commandBuffer,
2511                                   VkBool32 sampleLocationsEnable)
2512 {
2513    VN_CMD_ENQUEUE(vkCmdSetSampleLocationsEnableEXT, commandBuffer,
2514                   sampleLocationsEnable);
2515 }
2516 
2517 void
vn_CmdSetSampleMaskEXT(VkCommandBuffer commandBuffer,VkSampleCountFlagBits samples,const VkSampleMask * pSampleMask)2518 vn_CmdSetSampleMaskEXT(VkCommandBuffer commandBuffer,
2519                        VkSampleCountFlagBits samples,
2520                        const VkSampleMask *pSampleMask)
2521 {
2522    VN_CMD_ENQUEUE(vkCmdSetSampleMaskEXT, commandBuffer, samples, pSampleMask);
2523 }
2524 
2525 void
vn_CmdSetTessellationDomainOriginEXT(VkCommandBuffer commandBuffer,VkTessellationDomainOrigin domainOrigin)2526 vn_CmdSetTessellationDomainOriginEXT(VkCommandBuffer commandBuffer,
2527                                      VkTessellationDomainOrigin domainOrigin)
2528 {
2529    VN_CMD_ENQUEUE(vkCmdSetTessellationDomainOriginEXT, commandBuffer,
2530                   domainOrigin);
2531 }
2532 
2533 void
vn_CmdSetFragmentShadingRateKHR(VkCommandBuffer commandBuffer,const VkExtent2D * pFragmentSize,const VkFragmentShadingRateCombinerOpKHR combinerOps[2])2534 vn_CmdSetFragmentShadingRateKHR(
2535    VkCommandBuffer commandBuffer,
2536    const VkExtent2D *pFragmentSize,
2537    const VkFragmentShadingRateCombinerOpKHR combinerOps[2])
2538 {
2539    VN_CMD_ENQUEUE(vkCmdSetFragmentShadingRateKHR, commandBuffer,
2540                   pFragmentSize, combinerOps);
2541 }
2542