• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat.
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_descriptor_set.h"
12 
13 #include "venus-protocol/vn_protocol_driver_descriptor_pool.h"
14 #include "venus-protocol/vn_protocol_driver_descriptor_set.h"
15 #include "venus-protocol/vn_protocol_driver_descriptor_set_layout.h"
16 #include "venus-protocol/vn_protocol_driver_descriptor_update_template.h"
17 
18 #include "vn_device.h"
19 
20 static void
vn_descriptor_set_layout_destroy(struct vn_device * dev,struct vn_descriptor_set_layout * layout)21 vn_descriptor_set_layout_destroy(struct vn_device *dev,
22                                  struct vn_descriptor_set_layout *layout)
23 {
24    VkDevice dev_handle = vn_device_to_handle(dev);
25    VkDescriptorSetLayout layout_handle =
26       vn_descriptor_set_layout_to_handle(layout);
27    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
28 
29    vn_async_vkDestroyDescriptorSetLayout(dev->instance, dev_handle,
30                                          layout_handle, NULL);
31 
32    vn_object_base_fini(&layout->base);
33    vk_free(alloc, layout);
34 }
35 
36 static inline struct vn_descriptor_set_layout *
vn_descriptor_set_layout_ref(struct vn_device * dev,struct vn_descriptor_set_layout * layout)37 vn_descriptor_set_layout_ref(struct vn_device *dev,
38                              struct vn_descriptor_set_layout *layout)
39 {
40    vn_refcount_inc(&layout->refcount);
41    return layout;
42 }
43 
44 static inline void
vn_descriptor_set_layout_unref(struct vn_device * dev,struct vn_descriptor_set_layout * layout)45 vn_descriptor_set_layout_unref(struct vn_device *dev,
46                                struct vn_descriptor_set_layout *layout)
47 {
48    if (vn_refcount_dec(&layout->refcount))
49       vn_descriptor_set_layout_destroy(dev, layout);
50 }
51 
52 static void
vn_descriptor_set_destroy(struct vn_device * dev,struct vn_descriptor_set * set,const VkAllocationCallbacks * alloc)53 vn_descriptor_set_destroy(struct vn_device *dev,
54                           struct vn_descriptor_set *set,
55                           const VkAllocationCallbacks *alloc)
56 {
57    list_del(&set->head);
58 
59    vn_descriptor_set_layout_unref(dev, set->layout);
60 
61    vn_object_base_fini(&set->base);
62    vk_free(alloc, set);
63 }
64 
65 /* Mapping VkDescriptorType to array index */
66 static enum vn_descriptor_type
vn_descriptor_type_index(VkDescriptorType type)67 vn_descriptor_type_index(VkDescriptorType type)
68 {
69    switch (type) {
70    case VK_DESCRIPTOR_TYPE_SAMPLER:
71       return VN_DESCRIPTOR_TYPE_SAMPLER;
72    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
73       return VN_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
74    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
75       return VN_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
76    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
77       return VN_DESCRIPTOR_TYPE_STORAGE_IMAGE;
78    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
79       return VN_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
80    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
81       return VN_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
82    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
83       return VN_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
84    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
85       return VN_DESCRIPTOR_TYPE_STORAGE_BUFFER;
86    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
87       return VN_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
88    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
89       return VN_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
90    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
91       return VN_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
92    case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
93       return VN_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK;
94    default:
95       break;
96    }
97 
98    unreachable("bad VkDescriptorType");
99 }
100 
101 /* descriptor set layout commands */
102 
103 void
vn_GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)104 vn_GetDescriptorSetLayoutSupport(
105    VkDevice device,
106    const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
107    VkDescriptorSetLayoutSupport *pSupport)
108 {
109    struct vn_device *dev = vn_device_from_handle(device);
110 
111    /* TODO per-device cache */
112    vn_call_vkGetDescriptorSetLayoutSupport(dev->instance, device, pCreateInfo,
113                                            pSupport);
114 }
115 
116 static void
vn_descriptor_set_layout_init(struct vn_device * dev,const VkDescriptorSetLayoutCreateInfo * create_info,uint32_t last_binding,struct vn_descriptor_set_layout * layout)117 vn_descriptor_set_layout_init(
118    struct vn_device *dev,
119    const VkDescriptorSetLayoutCreateInfo *create_info,
120    uint32_t last_binding,
121    struct vn_descriptor_set_layout *layout)
122 {
123    VkDevice dev_handle = vn_device_to_handle(dev);
124    VkDescriptorSetLayout layout_handle =
125       vn_descriptor_set_layout_to_handle(layout);
126    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags =
127       vk_find_struct_const(create_info->pNext,
128                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
129 
130    /* 14.2.1. Descriptor Set Layout
131     *
132     * If bindingCount is zero or if this structure is not included in
133     * the pNext chain, the VkDescriptorBindingFlags for each descriptor
134     * set layout binding is considered to be zero.
135     */
136    if (binding_flags && !binding_flags->bindingCount)
137       binding_flags = NULL;
138 
139    layout->refcount = VN_REFCOUNT_INIT(1);
140    layout->last_binding = last_binding;
141 
142    for (uint32_t i = 0; i < create_info->bindingCount; i++) {
143       const VkDescriptorSetLayoutBinding *binding_info =
144          &create_info->pBindings[i];
145       struct vn_descriptor_set_layout_binding *binding =
146          &layout->bindings[binding_info->binding];
147 
148       if (binding_info->binding == last_binding) {
149          /* 14.2.1. Descriptor Set Layout
150           *
151           * VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT must only be
152           * used for the last binding in the descriptor set layout (i.e. the
153           * binding with the largest value of binding).
154           *
155           * 41. Features
156           *
157           * descriptorBindingVariableDescriptorCount indicates whether the
158           * implementation supports descriptor sets with a variable-sized last
159           * binding. If this feature is not enabled,
160           * VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT must not be
161           * used.
162           */
163          layout->has_variable_descriptor_count =
164             binding_flags &&
165             (binding_flags->pBindingFlags[i] &
166              VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT);
167       }
168 
169       binding->type = binding_info->descriptorType;
170       binding->count = binding_info->descriptorCount;
171 
172       switch (binding_info->descriptorType) {
173       case VK_DESCRIPTOR_TYPE_SAMPLER:
174       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
175          binding->has_immutable_samplers = binding_info->pImmutableSamplers;
176          break;
177       default:
178          break;
179       }
180    }
181 
182    vn_async_vkCreateDescriptorSetLayout(dev->instance, dev_handle,
183                                         create_info, NULL, &layout_handle);
184 }
185 
186 VkResult
vn_CreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)187 vn_CreateDescriptorSetLayout(
188    VkDevice device,
189    const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
190    const VkAllocationCallbacks *pAllocator,
191    VkDescriptorSetLayout *pSetLayout)
192 {
193    struct vn_device *dev = vn_device_from_handle(device);
194    /* ignore pAllocator as the layout is reference-counted */
195    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
196 
197    uint32_t last_binding = 0;
198    VkDescriptorSetLayoutBinding *local_bindings = NULL;
199    VkDescriptorSetLayoutCreateInfo local_create_info;
200    if (pCreateInfo->bindingCount) {
201       /* the encoder does not ignore
202        * VkDescriptorSetLayoutBinding::pImmutableSamplers when it should
203        */
204       const size_t binding_size =
205          sizeof(*pCreateInfo->pBindings) * pCreateInfo->bindingCount;
206       local_bindings = vk_alloc(alloc, binding_size, VN_DEFAULT_ALIGN,
207                                 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
208       if (!local_bindings)
209          return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
210 
211       memcpy(local_bindings, pCreateInfo->pBindings, binding_size);
212       for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
213          VkDescriptorSetLayoutBinding *binding = &local_bindings[i];
214 
215          if (last_binding < binding->binding)
216             last_binding = binding->binding;
217 
218          switch (binding->descriptorType) {
219          case VK_DESCRIPTOR_TYPE_SAMPLER:
220          case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
221             break;
222          default:
223             binding->pImmutableSamplers = NULL;
224             break;
225          }
226       }
227 
228       local_create_info = *pCreateInfo;
229       local_create_info.pBindings = local_bindings;
230       pCreateInfo = &local_create_info;
231    }
232 
233    const size_t layout_size =
234       offsetof(struct vn_descriptor_set_layout, bindings[last_binding + 1]);
235    /* allocated with the device scope */
236    struct vn_descriptor_set_layout *layout =
237       vk_zalloc(alloc, layout_size, VN_DEFAULT_ALIGN,
238                 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
239    if (!layout) {
240       vk_free(alloc, local_bindings);
241       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
242    }
243 
244    vn_object_base_init(&layout->base, VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
245                        &dev->base);
246 
247    vn_descriptor_set_layout_init(dev, pCreateInfo, last_binding, layout);
248 
249    vk_free(alloc, local_bindings);
250 
251    *pSetLayout = vn_descriptor_set_layout_to_handle(layout);
252 
253    return VK_SUCCESS;
254 }
255 
256 void
vn_DestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)257 vn_DestroyDescriptorSetLayout(VkDevice device,
258                               VkDescriptorSetLayout descriptorSetLayout,
259                               const VkAllocationCallbacks *pAllocator)
260 {
261    struct vn_device *dev = vn_device_from_handle(device);
262    struct vn_descriptor_set_layout *layout =
263       vn_descriptor_set_layout_from_handle(descriptorSetLayout);
264 
265    if (!layout)
266       return;
267 
268    vn_descriptor_set_layout_unref(dev, layout);
269 }
270 
271 /* descriptor pool commands */
272 
273 VkResult
vn_CreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)274 vn_CreateDescriptorPool(VkDevice device,
275                         const VkDescriptorPoolCreateInfo *pCreateInfo,
276                         const VkAllocationCallbacks *pAllocator,
277                         VkDescriptorPool *pDescriptorPool)
278 {
279    VN_TRACE_FUNC();
280    struct vn_device *dev = vn_device_from_handle(device);
281    const VkAllocationCallbacks *alloc =
282       pAllocator ? pAllocator : &dev->base.base.alloc;
283 
284    const VkDescriptorPoolInlineUniformBlockCreateInfo *iub_info =
285       vk_find_struct_const(pCreateInfo->pNext,
286                            DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO);
287 
288    struct vn_descriptor_pool *pool =
289       vk_zalloc(alloc, sizeof(*pool), VN_DEFAULT_ALIGN,
290                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
291    if (!pool)
292       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
293 
294    vn_object_base_init(&pool->base, VK_OBJECT_TYPE_DESCRIPTOR_POOL,
295                        &dev->base);
296 
297    pool->allocator = *alloc;
298 
299    /* Without VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, the set
300     * allocation must not fail due to a fragmented pool per spec. In this
301     * case, set allocation can be asynchronous with pool resource tracking.
302     */
303    pool->async_set_allocation =
304       !VN_PERF(NO_ASYNC_SET_ALLOC) &&
305       !(pCreateInfo->flags &
306         VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
307 
308    pool->max.set_count = pCreateInfo->maxSets;
309 
310    if (iub_info)
311       pool->max.iub_binding_count = iub_info->maxInlineUniformBlockBindings;
312 
313    for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
314       const VkDescriptorPoolSize *pool_size = &pCreateInfo->pPoolSizes[i];
315       const uint32_t type_index = vn_descriptor_type_index(pool_size->type);
316 
317       assert(type_index < VN_NUM_DESCRIPTOR_TYPES);
318 
319       pool->max.descriptor_counts[type_index] += pool_size->descriptorCount;
320 
321       assert((pCreateInfo->pPoolSizes[i].type !=
322               VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) ||
323              iub_info);
324    }
325 
326    list_inithead(&pool->descriptor_sets);
327 
328    VkDescriptorPool pool_handle = vn_descriptor_pool_to_handle(pool);
329    vn_async_vkCreateDescriptorPool(dev->instance, device, pCreateInfo, NULL,
330                                    &pool_handle);
331 
332    *pDescriptorPool = pool_handle;
333 
334    return VK_SUCCESS;
335 }
336 
337 void
vn_DestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)338 vn_DestroyDescriptorPool(VkDevice device,
339                          VkDescriptorPool descriptorPool,
340                          const VkAllocationCallbacks *pAllocator)
341 {
342    VN_TRACE_FUNC();
343    struct vn_device *dev = vn_device_from_handle(device);
344    struct vn_descriptor_pool *pool =
345       vn_descriptor_pool_from_handle(descriptorPool);
346    const VkAllocationCallbacks *alloc;
347 
348    if (!pool)
349       return;
350 
351    alloc = pAllocator ? pAllocator : &pool->allocator;
352 
353    /* We must emit vkDestroyDescriptorPool before freeing the sets in
354     * pool->descriptor_sets.  Otherwise, another thread might reuse their
355     * object ids while they still refer to the sets in the renderer.
356     */
357    vn_async_vkDestroyDescriptorPool(dev->instance, device, descriptorPool,
358                                     NULL);
359 
360    list_for_each_entry_safe(struct vn_descriptor_set, set,
361                             &pool->descriptor_sets, head)
362       vn_descriptor_set_destroy(dev, set, alloc);
363 
364    vn_object_base_fini(&pool->base);
365    vk_free(alloc, pool);
366 }
367 
368 static bool
vn_descriptor_pool_alloc_descriptors(struct vn_descriptor_pool * pool,const struct vn_descriptor_set_layout * layout,uint32_t last_binding_descriptor_count)369 vn_descriptor_pool_alloc_descriptors(
370    struct vn_descriptor_pool *pool,
371    const struct vn_descriptor_set_layout *layout,
372    uint32_t last_binding_descriptor_count)
373 {
374    struct vn_descriptor_pool_state recovery;
375 
376    if (!pool->async_set_allocation)
377       return true;
378 
379    if (pool->used.set_count == pool->max.set_count)
380       return false;
381 
382    /* backup current pool state to recovery */
383    recovery = pool->used;
384 
385    ++pool->used.set_count;
386 
387    for (uint32_t i = 0; i <= layout->last_binding; i++) {
388       const VkDescriptorType type = layout->bindings[i].type;
389       const uint32_t count = i == layout->last_binding
390                                 ? last_binding_descriptor_count
391                                 : layout->bindings[i].count;
392 
393       /* Allocation may fail if a call to vkAllocateDescriptorSets would cause
394        * the total number of inline uniform block bindings allocated from the
395        * pool to exceed the value of
396        * VkDescriptorPoolInlineUniformBlockCreateInfo::maxInlineUniformBlockBindings
397        * used to create the descriptor pool.
398        */
399       if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
400          if (++pool->used.iub_binding_count > pool->max.iub_binding_count) {
401             /* restore pool state before this allocation */
402             pool->used = recovery;
403             return false;
404          }
405       }
406 
407       const uint32_t type_index = vn_descriptor_type_index(type);
408       pool->used.descriptor_counts[type_index] += count;
409 
410       if (pool->used.descriptor_counts[type_index] >
411           pool->max.descriptor_counts[type_index]) {
412          /* restore pool state before this allocation */
413          pool->used = recovery;
414          return false;
415       }
416    }
417 
418    return true;
419 }
420 
421 static void
vn_descriptor_pool_free_descriptors(struct vn_descriptor_pool * pool,const struct vn_descriptor_set_layout * layout,uint32_t last_binding_descriptor_count)422 vn_descriptor_pool_free_descriptors(
423    struct vn_descriptor_pool *pool,
424    const struct vn_descriptor_set_layout *layout,
425    uint32_t last_binding_descriptor_count)
426 {
427    if (!pool->async_set_allocation)
428       return;
429 
430    for (uint32_t i = 0; i <= layout->last_binding; i++) {
431       const uint32_t count = i == layout->last_binding
432                                 ? last_binding_descriptor_count
433                                 : layout->bindings[i].count;
434 
435       pool->used.descriptor_counts[vn_descriptor_type_index(
436          layout->bindings[i].type)] -= count;
437 
438       if (layout->bindings[i].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
439          --pool->used.iub_binding_count;
440    }
441 
442    --pool->used.set_count;
443 }
444 
445 static void
vn_descriptor_pool_reset_descriptors(struct vn_descriptor_pool * pool)446 vn_descriptor_pool_reset_descriptors(struct vn_descriptor_pool *pool)
447 {
448    if (!pool->async_set_allocation)
449       return;
450 
451    memset(&pool->used, 0, sizeof(pool->used));
452 }
453 
454 VkResult
vn_ResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)455 vn_ResetDescriptorPool(VkDevice device,
456                        VkDescriptorPool descriptorPool,
457                        VkDescriptorPoolResetFlags flags)
458 {
459    VN_TRACE_FUNC();
460    struct vn_device *dev = vn_device_from_handle(device);
461    struct vn_descriptor_pool *pool =
462       vn_descriptor_pool_from_handle(descriptorPool);
463    const VkAllocationCallbacks *alloc = &pool->allocator;
464 
465    vn_async_vkResetDescriptorPool(dev->instance, device, descriptorPool,
466                                   flags);
467 
468    list_for_each_entry_safe(struct vn_descriptor_set, set,
469                             &pool->descriptor_sets, head)
470       vn_descriptor_set_destroy(dev, set, alloc);
471 
472    vn_descriptor_pool_reset_descriptors(pool);
473 
474    return VK_SUCCESS;
475 }
476 
477 /* descriptor set commands */
478 
479 VkResult
vn_AllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)480 vn_AllocateDescriptorSets(VkDevice device,
481                           const VkDescriptorSetAllocateInfo *pAllocateInfo,
482                           VkDescriptorSet *pDescriptorSets)
483 {
484    VN_TRACE_FUNC();
485    struct vn_device *dev = vn_device_from_handle(device);
486    struct vn_descriptor_pool *pool =
487       vn_descriptor_pool_from_handle(pAllocateInfo->descriptorPool);
488    const VkAllocationCallbacks *alloc = &pool->allocator;
489    const VkDescriptorSetVariableDescriptorCountAllocateInfo *variable_info =
490       NULL;
491    VkResult result;
492 
493    /* 14.2.3. Allocation of Descriptor Sets
494     *
495     * If descriptorSetCount is zero or this structure is not included in
496     * the pNext chain, then the variable lengths are considered to be zero.
497     */
498    variable_info = vk_find_struct_const(
499       pAllocateInfo->pNext,
500       DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
501 
502    if (variable_info && !variable_info->descriptorSetCount)
503       variable_info = NULL;
504 
505    for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
506       struct vn_descriptor_set_layout *layout =
507          vn_descriptor_set_layout_from_handle(pAllocateInfo->pSetLayouts[i]);
508       uint32_t last_binding_descriptor_count = 0;
509       struct vn_descriptor_set *set = NULL;
510 
511       /* 14.2.3. Allocation of Descriptor Sets
512        *
513        * If VkDescriptorSetAllocateInfo::pSetLayouts[i] does not include a
514        * variable count descriptor binding, then pDescriptorCounts[i] is
515        * ignored.
516        */
517       if (!layout->has_variable_descriptor_count) {
518          last_binding_descriptor_count =
519             layout->bindings[layout->last_binding].count;
520       } else if (variable_info) {
521          last_binding_descriptor_count = variable_info->pDescriptorCounts[i];
522       }
523 
524       if (!vn_descriptor_pool_alloc_descriptors(
525              pool, layout, last_binding_descriptor_count)) {
526          pDescriptorSets[i] = VK_NULL_HANDLE;
527          result = VK_ERROR_OUT_OF_POOL_MEMORY;
528          goto fail;
529       }
530 
531       set = vk_zalloc(alloc, sizeof(*set), VN_DEFAULT_ALIGN,
532                       VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
533       if (!set) {
534          vn_descriptor_pool_free_descriptors(pool, layout,
535                                              last_binding_descriptor_count);
536          pDescriptorSets[i] = VK_NULL_HANDLE;
537          result = VK_ERROR_OUT_OF_HOST_MEMORY;
538          goto fail;
539       }
540 
541       vn_object_base_init(&set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET,
542                           &dev->base);
543 
544       /* We might reorder vkCmdBindDescriptorSets after
545        * vkDestroyDescriptorSetLayout due to batching.  The spec says
546        *
547        *   VkDescriptorSetLayout objects may be accessed by commands that
548        *   operate on descriptor sets allocated using that layout, and those
549        *   descriptor sets must not be updated with vkUpdateDescriptorSets
550        *   after the descriptor set layout has been destroyed. Otherwise, a
551        *   VkDescriptorSetLayout object passed as a parameter to create
552        *   another object is not further accessed by that object after the
553        *   duration of the command it is passed into.
554        *
555        * It is ambiguous but the reordering is likely invalid.  Let's keep the
556        * layout alive with the set to defer vkDestroyDescriptorSetLayout.
557        */
558       set->layout = vn_descriptor_set_layout_ref(dev, layout);
559       set->last_binding_descriptor_count = last_binding_descriptor_count;
560       list_addtail(&set->head, &pool->descriptor_sets);
561 
562       VkDescriptorSet set_handle = vn_descriptor_set_to_handle(set);
563       pDescriptorSets[i] = set_handle;
564    }
565 
566    if (pool->async_set_allocation) {
567       vn_async_vkAllocateDescriptorSets(dev->instance, device, pAllocateInfo,
568                                         pDescriptorSets);
569    } else {
570       result = vn_call_vkAllocateDescriptorSets(
571          dev->instance, device, pAllocateInfo, pDescriptorSets);
572       if (result != VK_SUCCESS)
573          goto fail;
574    }
575 
576    return VK_SUCCESS;
577 
578 fail:
579    for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
580       struct vn_descriptor_set *set =
581          vn_descriptor_set_from_handle(pDescriptorSets[i]);
582       if (!set)
583          break;
584 
585       vn_descriptor_pool_free_descriptors(pool, set->layout,
586                                           set->last_binding_descriptor_count);
587 
588       vn_descriptor_set_destroy(dev, set, alloc);
589    }
590 
591    memset(pDescriptorSets, 0,
592           sizeof(*pDescriptorSets) * pAllocateInfo->descriptorSetCount);
593 
594    return vn_error(dev->instance, result);
595 }
596 
597 VkResult
vn_FreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)598 vn_FreeDescriptorSets(VkDevice device,
599                       VkDescriptorPool descriptorPool,
600                       uint32_t descriptorSetCount,
601                       const VkDescriptorSet *pDescriptorSets)
602 {
603    VN_TRACE_FUNC();
604    struct vn_device *dev = vn_device_from_handle(device);
605    struct vn_descriptor_pool *pool =
606       vn_descriptor_pool_from_handle(descriptorPool);
607    const VkAllocationCallbacks *alloc = &pool->allocator;
608 
609    vn_async_vkFreeDescriptorSets(dev->instance, device, descriptorPool,
610                                  descriptorSetCount, pDescriptorSets);
611 
612    for (uint32_t i = 0; i < descriptorSetCount; i++) {
613       struct vn_descriptor_set *set =
614          vn_descriptor_set_from_handle(pDescriptorSets[i]);
615 
616       if (!set)
617          continue;
618 
619       vn_descriptor_set_destroy(dev, set, alloc);
620    }
621 
622    return VK_SUCCESS;
623 }
624 
625 static struct vn_update_descriptor_sets *
vn_update_descriptor_sets_alloc(uint32_t write_count,uint32_t image_count,uint32_t buffer_count,uint32_t view_count,uint32_t iub_count,const VkAllocationCallbacks * alloc,VkSystemAllocationScope scope)626 vn_update_descriptor_sets_alloc(uint32_t write_count,
627                                 uint32_t image_count,
628                                 uint32_t buffer_count,
629                                 uint32_t view_count,
630                                 uint32_t iub_count,
631                                 const VkAllocationCallbacks *alloc,
632                                 VkSystemAllocationScope scope)
633 {
634    const size_t writes_offset = sizeof(struct vn_update_descriptor_sets);
635    const size_t images_offset =
636       writes_offset + sizeof(VkWriteDescriptorSet) * write_count;
637    const size_t buffers_offset =
638       images_offset + sizeof(VkDescriptorImageInfo) * image_count;
639    const size_t views_offset =
640       buffers_offset + sizeof(VkDescriptorBufferInfo) * buffer_count;
641    const size_t iubs_offset =
642       views_offset + sizeof(VkBufferView) * view_count;
643    const size_t alloc_size =
644       iubs_offset +
645       sizeof(VkWriteDescriptorSetInlineUniformBlock) * iub_count;
646 
647    void *storage = vk_alloc(alloc, alloc_size, VN_DEFAULT_ALIGN, scope);
648    if (!storage)
649       return NULL;
650 
651    struct vn_update_descriptor_sets *update = storage;
652    update->write_count = write_count;
653    update->writes = storage + writes_offset;
654    update->images = storage + images_offset;
655    update->buffers = storage + buffers_offset;
656    update->views = storage + views_offset;
657    update->iubs = storage + iubs_offset;
658 
659    return update;
660 }
661 
662 static struct vn_update_descriptor_sets *
vn_update_descriptor_sets_parse_writes(uint32_t write_count,const VkWriteDescriptorSet * writes,const VkAllocationCallbacks * alloc)663 vn_update_descriptor_sets_parse_writes(uint32_t write_count,
664                                        const VkWriteDescriptorSet *writes,
665                                        const VkAllocationCallbacks *alloc)
666 {
667    uint32_t img_count = 0;
668    for (uint32_t i = 0; i < write_count; i++) {
669       const VkWriteDescriptorSet *write = &writes[i];
670       switch (write->descriptorType) {
671       case VK_DESCRIPTOR_TYPE_SAMPLER:
672       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
673       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
674       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
675       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
676          img_count += write->descriptorCount;
677          break;
678       default:
679          break;
680       }
681    }
682 
683    struct vn_update_descriptor_sets *update =
684       vn_update_descriptor_sets_alloc(write_count, img_count, 0, 0, 0, alloc,
685                                       VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
686    if (!update)
687       return NULL;
688 
689    /* the encoder does not ignore
690     * VkWriteDescriptorSet::{pImageInfo,pBufferInfo,pTexelBufferView} when it
691     * should
692     *
693     * TODO make the encoder smarter
694     */
695    memcpy(update->writes, writes, sizeof(*writes) * write_count);
696    img_count = 0;
697    for (uint32_t i = 0; i < write_count; i++) {
698       const struct vn_descriptor_set *set =
699          vn_descriptor_set_from_handle(writes[i].dstSet);
700       const struct vn_descriptor_set_layout_binding *binding =
701          &set->layout->bindings[writes[i].dstBinding];
702       VkWriteDescriptorSet *write = &update->writes[i];
703       VkDescriptorImageInfo *imgs = &update->images[img_count];
704 
705       switch (write->descriptorType) {
706       case VK_DESCRIPTOR_TYPE_SAMPLER:
707       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
708       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
709       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
710       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
711          memcpy(imgs, write->pImageInfo,
712                 sizeof(*imgs) * write->descriptorCount);
713          img_count += write->descriptorCount;
714 
715          for (uint32_t j = 0; j < write->descriptorCount; j++) {
716             switch (write->descriptorType) {
717             case VK_DESCRIPTOR_TYPE_SAMPLER:
718                imgs[j].imageView = VK_NULL_HANDLE;
719                break;
720             case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
721                if (binding->has_immutable_samplers)
722                   imgs[j].sampler = VK_NULL_HANDLE;
723                break;
724             case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
725             case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
726             case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
727                imgs[j].sampler = VK_NULL_HANDLE;
728                break;
729             default:
730                break;
731             }
732          }
733 
734          write->pImageInfo = imgs;
735          write->pBufferInfo = NULL;
736          write->pTexelBufferView = NULL;
737          break;
738       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
739       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
740          write->pImageInfo = NULL;
741          write->pBufferInfo = NULL;
742          break;
743       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
744       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
745       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
746       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
747          write->pImageInfo = NULL;
748          write->pTexelBufferView = NULL;
749          break;
750       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
751       default:
752          write->pImageInfo = NULL;
753          write->pBufferInfo = NULL;
754          write->pTexelBufferView = NULL;
755          break;
756       }
757    }
758 
759    return update;
760 }
761 
762 void
vn_UpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)763 vn_UpdateDescriptorSets(VkDevice device,
764                         uint32_t descriptorWriteCount,
765                         const VkWriteDescriptorSet *pDescriptorWrites,
766                         uint32_t descriptorCopyCount,
767                         const VkCopyDescriptorSet *pDescriptorCopies)
768 {
769    VN_TRACE_FUNC();
770    struct vn_device *dev = vn_device_from_handle(device);
771    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
772 
773    struct vn_update_descriptor_sets *update =
774       vn_update_descriptor_sets_parse_writes(descriptorWriteCount,
775                                              pDescriptorWrites, alloc);
776    if (!update) {
777       /* TODO update one-by-one? */
778       vn_log(dev->instance, "TODO descriptor set update ignored due to OOM");
779       return;
780    }
781 
782    vn_async_vkUpdateDescriptorSets(dev->instance, device, update->write_count,
783                                    update->writes, descriptorCopyCount,
784                                    pDescriptorCopies);
785 
786    vk_free(alloc, update);
787 }
788 
789 /* descriptor update template commands */
790 
791 static struct vn_update_descriptor_sets *
vn_update_descriptor_sets_parse_template(const VkDescriptorUpdateTemplateCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_descriptor_update_template_entry * entries)792 vn_update_descriptor_sets_parse_template(
793    const VkDescriptorUpdateTemplateCreateInfo *create_info,
794    const VkAllocationCallbacks *alloc,
795    struct vn_descriptor_update_template_entry *entries)
796 {
797    uint32_t img_count = 0;
798    uint32_t buf_count = 0;
799    uint32_t view_count = 0;
800    uint32_t iub_count = 0;
801    for (uint32_t i = 0; i < create_info->descriptorUpdateEntryCount; i++) {
802       const VkDescriptorUpdateTemplateEntry *entry =
803          &create_info->pDescriptorUpdateEntries[i];
804 
805       switch (entry->descriptorType) {
806       case VK_DESCRIPTOR_TYPE_SAMPLER:
807       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
808       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
809       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
810       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
811          img_count += entry->descriptorCount;
812          break;
813       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
814       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
815          view_count += entry->descriptorCount;
816          break;
817       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
818       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
819       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
820       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
821          buf_count += entry->descriptorCount;
822          break;
823       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
824          iub_count += 1;
825          break;
826       default:
827          unreachable("unhandled descriptor type");
828          break;
829       }
830    }
831 
832    struct vn_update_descriptor_sets *update = vn_update_descriptor_sets_alloc(
833       create_info->descriptorUpdateEntryCount, img_count, buf_count,
834       view_count, iub_count, alloc, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
835    if (!update)
836       return NULL;
837 
838    img_count = 0;
839    buf_count = 0;
840    view_count = 0;
841    iub_count = 0;
842    for (uint32_t i = 0; i < create_info->descriptorUpdateEntryCount; i++) {
843       const VkDescriptorUpdateTemplateEntry *entry =
844          &create_info->pDescriptorUpdateEntries[i];
845       VkWriteDescriptorSet *write = &update->writes[i];
846 
847       write->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
848       write->pNext = NULL;
849       write->dstBinding = entry->dstBinding;
850       write->dstArrayElement = entry->dstArrayElement;
851       write->descriptorCount = entry->descriptorCount;
852       write->descriptorType = entry->descriptorType;
853 
854       entries[i].offset = entry->offset;
855       entries[i].stride = entry->stride;
856 
857       switch (entry->descriptorType) {
858       case VK_DESCRIPTOR_TYPE_SAMPLER:
859       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
860       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
861       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
862       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
863          write->pImageInfo = &update->images[img_count];
864          write->pBufferInfo = NULL;
865          write->pTexelBufferView = NULL;
866          img_count += entry->descriptorCount;
867          break;
868       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
869       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
870          write->pImageInfo = NULL;
871          write->pBufferInfo = NULL;
872          write->pTexelBufferView = &update->views[view_count];
873          view_count += entry->descriptorCount;
874          break;
875       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
876       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
877       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
878       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
879          write->pImageInfo = NULL;
880          write->pBufferInfo = &update->buffers[buf_count];
881          write->pTexelBufferView = NULL;
882          buf_count += entry->descriptorCount;
883          break;
884       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
885          write->pImageInfo = NULL;
886          write->pBufferInfo = NULL;
887          write->pTexelBufferView = NULL;
888          VkWriteDescriptorSetInlineUniformBlock *iub_data =
889             &update->iubs[iub_count];
890          iub_data->sType =
891             VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK;
892          iub_data->pNext = write->pNext;
893          iub_data->dataSize = entry->descriptorCount;
894          write->pNext = iub_data;
895          iub_count += 1;
896          break;
897       default:
898          break;
899       }
900    }
901 
902    return update;
903 }
904 
905 VkResult
vn_CreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)906 vn_CreateDescriptorUpdateTemplate(
907    VkDevice device,
908    const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
909    const VkAllocationCallbacks *pAllocator,
910    VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
911 {
912    VN_TRACE_FUNC();
913    struct vn_device *dev = vn_device_from_handle(device);
914    const VkAllocationCallbacks *alloc =
915       pAllocator ? pAllocator : &dev->base.base.alloc;
916 
917    const size_t templ_size =
918       offsetof(struct vn_descriptor_update_template,
919                entries[pCreateInfo->descriptorUpdateEntryCount + 1]);
920    struct vn_descriptor_update_template *templ = vk_zalloc(
921       alloc, templ_size, VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
922    if (!templ)
923       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
924 
925    vn_object_base_init(&templ->base,
926                        VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, &dev->base);
927 
928    templ->update = vn_update_descriptor_sets_parse_template(
929       pCreateInfo, alloc, templ->entries);
930    if (!templ->update) {
931       vk_free(alloc, templ);
932       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
933    }
934 
935    mtx_init(&templ->mutex, mtx_plain);
936 
937    /* no host object */
938    VkDescriptorUpdateTemplate templ_handle =
939       vn_descriptor_update_template_to_handle(templ);
940    *pDescriptorUpdateTemplate = templ_handle;
941 
942    return VK_SUCCESS;
943 }
944 
945 void
vn_DestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)946 vn_DestroyDescriptorUpdateTemplate(
947    VkDevice device,
948    VkDescriptorUpdateTemplate descriptorUpdateTemplate,
949    const VkAllocationCallbacks *pAllocator)
950 {
951    VN_TRACE_FUNC();
952    struct vn_device *dev = vn_device_from_handle(device);
953    struct vn_descriptor_update_template *templ =
954       vn_descriptor_update_template_from_handle(descriptorUpdateTemplate);
955    const VkAllocationCallbacks *alloc =
956       pAllocator ? pAllocator : &dev->base.base.alloc;
957 
958    if (!templ)
959       return;
960 
961    /* no host object */
962    vk_free(alloc, templ->update);
963    mtx_destroy(&templ->mutex);
964 
965    vn_object_base_fini(&templ->base);
966    vk_free(alloc, templ);
967 }
968 
969 void
vn_UpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)970 vn_UpdateDescriptorSetWithTemplate(
971    VkDevice device,
972    VkDescriptorSet descriptorSet,
973    VkDescriptorUpdateTemplate descriptorUpdateTemplate,
974    const void *pData)
975 {
976    VN_TRACE_FUNC();
977    struct vn_device *dev = vn_device_from_handle(device);
978    struct vn_descriptor_set *set =
979       vn_descriptor_set_from_handle(descriptorSet);
980    struct vn_descriptor_update_template *templ =
981       vn_descriptor_update_template_from_handle(descriptorUpdateTemplate);
982    struct vn_update_descriptor_sets *update = templ->update;
983 
984    /* duplicate update instead to avoid locking? */
985    mtx_lock(&templ->mutex);
986 
987    for (uint32_t i = 0; i < update->write_count; i++) {
988       const struct vn_descriptor_update_template_entry *entry =
989          &templ->entries[i];
990       const struct vn_descriptor_set_layout_binding *binding =
991          &set->layout->bindings[update->writes[i].dstBinding];
992       VkWriteDescriptorSet *write = &update->writes[i];
993 
994       write->dstSet = vn_descriptor_set_to_handle(set);
995 
996       switch (write->descriptorType) {
997       case VK_DESCRIPTOR_TYPE_SAMPLER:
998       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
999       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1000       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1001       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1002          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1003             const bool need_sampler =
1004                (write->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER ||
1005                 write->descriptorType ==
1006                    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
1007                !binding->has_immutable_samplers;
1008             const bool need_view =
1009                write->descriptorType != VK_DESCRIPTOR_TYPE_SAMPLER;
1010             const VkDescriptorImageInfo *src =
1011                pData + entry->offset + entry->stride * j;
1012             VkDescriptorImageInfo *dst =
1013                (VkDescriptorImageInfo *)&write->pImageInfo[j];
1014 
1015             dst->sampler = need_sampler ? src->sampler : VK_NULL_HANDLE;
1016             dst->imageView = need_view ? src->imageView : VK_NULL_HANDLE;
1017             dst->imageLayout = src->imageLayout;
1018          }
1019          break;
1020       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1021       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1022          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1023             const VkBufferView *src =
1024                pData + entry->offset + entry->stride * j;
1025             VkBufferView *dst = (VkBufferView *)&write->pTexelBufferView[j];
1026             *dst = *src;
1027          }
1028          break;
1029       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1030       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1031       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1032       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1033          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1034             const VkDescriptorBufferInfo *src =
1035                pData + entry->offset + entry->stride * j;
1036             VkDescriptorBufferInfo *dst =
1037                (VkDescriptorBufferInfo *)&write->pBufferInfo[j];
1038             *dst = *src;
1039          }
1040          break;
1041       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:;
1042          VkWriteDescriptorSetInlineUniformBlock *iub_data =
1043             (VkWriteDescriptorSetInlineUniformBlock *)vk_find_struct_const(
1044                write->pNext, WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
1045          iub_data->pData = pData + entry->offset;
1046          break;
1047       default:
1048          unreachable("unhandled descriptor type");
1049          break;
1050       }
1051    }
1052 
1053    vn_async_vkUpdateDescriptorSets(dev->instance, device, update->write_count,
1054                                    update->writes, 0, NULL);
1055 
1056    mtx_unlock(&templ->mutex);
1057 }
1058