• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2019 Google LLC
3  * SPDX-License-Identifier: MIT
4  *
5  * based in part on anv and radv which are:
6  * Copyright © 2015 Intel Corporation
7  * Copyright © 2016 Red Hat.
8  * Copyright © 2016 Bas Nieuwenhuizen
9  */
10 
11 #include "vn_descriptor_set.h"
12 
13 #include "venus-protocol/vn_protocol_driver_descriptor_pool.h"
14 #include "venus-protocol/vn_protocol_driver_descriptor_set.h"
15 #include "venus-protocol/vn_protocol_driver_descriptor_set_layout.h"
16 #include "venus-protocol/vn_protocol_driver_descriptor_update_template.h"
17 
18 #include "vn_device.h"
19 #include "vn_pipeline.h"
20 
21 void
vn_descriptor_set_layout_destroy(struct vn_device * dev,struct vn_descriptor_set_layout * layout)22 vn_descriptor_set_layout_destroy(struct vn_device *dev,
23                                  struct vn_descriptor_set_layout *layout)
24 {
25    VkDevice dev_handle = vn_device_to_handle(dev);
26    VkDescriptorSetLayout layout_handle =
27       vn_descriptor_set_layout_to_handle(layout);
28    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
29 
30    vn_async_vkDestroyDescriptorSetLayout(dev->primary_ring, dev_handle,
31                                          layout_handle, NULL);
32 
33    vn_object_base_fini(&layout->base);
34    vk_free(alloc, layout);
35 }
36 
37 static void
vn_descriptor_set_destroy(struct vn_device * dev,struct vn_descriptor_set * set,const VkAllocationCallbacks * alloc)38 vn_descriptor_set_destroy(struct vn_device *dev,
39                           struct vn_descriptor_set *set,
40                           const VkAllocationCallbacks *alloc)
41 {
42    list_del(&set->head);
43 
44    vn_descriptor_set_layout_unref(dev, set->layout);
45 
46    vn_object_base_fini(&set->base);
47    vk_free(alloc, set);
48 }
49 
50 /* Mapping VkDescriptorType to array index */
51 static enum vn_descriptor_type
vn_descriptor_type_index(VkDescriptorType type)52 vn_descriptor_type_index(VkDescriptorType type)
53 {
54    switch (type) {
55    case VK_DESCRIPTOR_TYPE_SAMPLER:
56       return VN_DESCRIPTOR_TYPE_SAMPLER;
57    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
58       return VN_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
59    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
60       return VN_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
61    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
62       return VN_DESCRIPTOR_TYPE_STORAGE_IMAGE;
63    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
64       return VN_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
65    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
66       return VN_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
67    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
68       return VN_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
69    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
70       return VN_DESCRIPTOR_TYPE_STORAGE_BUFFER;
71    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
72       return VN_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
73    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
74       return VN_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
75    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
76       return VN_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
77    case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
78       return VN_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK;
79    case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
80       return VN_DESCRIPTOR_TYPE_MUTABLE_EXT;
81    default:
82       break;
83    }
84 
85    unreachable("bad VkDescriptorType");
86 }
87 
88 /* descriptor set layout commands */
89 
90 void
vn_GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)91 vn_GetDescriptorSetLayoutSupport(
92    VkDevice device,
93    const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
94    VkDescriptorSetLayoutSupport *pSupport)
95 {
96    struct vn_device *dev = vn_device_from_handle(device);
97 
98    /* TODO per-device cache */
99    vn_call_vkGetDescriptorSetLayoutSupport(dev->primary_ring, device,
100                                            pCreateInfo, pSupport);
101 }
102 
103 static void
vn_descriptor_set_layout_init(struct vn_device * dev,const VkDescriptorSetLayoutCreateInfo * create_info,uint32_t last_binding,struct vn_descriptor_set_layout * layout)104 vn_descriptor_set_layout_init(
105    struct vn_device *dev,
106    const VkDescriptorSetLayoutCreateInfo *create_info,
107    uint32_t last_binding,
108    struct vn_descriptor_set_layout *layout)
109 {
110    VkDevice dev_handle = vn_device_to_handle(dev);
111    VkDescriptorSetLayout layout_handle =
112       vn_descriptor_set_layout_to_handle(layout);
113    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags =
114       vk_find_struct_const(create_info->pNext,
115                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
116 
117    const VkMutableDescriptorTypeCreateInfoEXT *mutable_descriptor_info =
118       vk_find_struct_const(create_info->pNext,
119                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
120 
121    /* 14.2.1. Descriptor Set Layout
122     *
123     * If bindingCount is zero or if this structure is not included in
124     * the pNext chain, the VkDescriptorBindingFlags for each descriptor
125     * set layout binding is considered to be zero.
126     */
127    if (binding_flags && !binding_flags->bindingCount)
128       binding_flags = NULL;
129 
130    layout->is_push_descriptor =
131       create_info->flags &
132       VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
133 
134    layout->refcount = VN_REFCOUNT_INIT(1);
135    layout->last_binding = last_binding;
136 
137    for (uint32_t i = 0; i < create_info->bindingCount; i++) {
138       const VkDescriptorSetLayoutBinding *binding_info =
139          &create_info->pBindings[i];
140       struct vn_descriptor_set_layout_binding *binding =
141          &layout->bindings[binding_info->binding];
142 
143       if (binding_info->binding == last_binding) {
144          /* 14.2.1. Descriptor Set Layout
145           *
146           * VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT must only be
147           * used for the last binding in the descriptor set layout (i.e. the
148           * binding with the largest value of binding).
149           *
150           * 41. Features
151           *
152           * descriptorBindingVariableDescriptorCount indicates whether the
153           * implementation supports descriptor sets with a variable-sized last
154           * binding. If this feature is not enabled,
155           * VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT must not be
156           * used.
157           */
158          layout->has_variable_descriptor_count =
159             binding_flags &&
160             (binding_flags->pBindingFlags[i] &
161              VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT);
162       }
163 
164       binding->type = binding_info->descriptorType;
165       binding->count = binding_info->descriptorCount;
166 
167       switch (binding_info->descriptorType) {
168       case VK_DESCRIPTOR_TYPE_SAMPLER:
169       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
170          binding->has_immutable_samplers = binding_info->pImmutableSamplers;
171          break;
172       case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
173          assert(mutable_descriptor_info->mutableDescriptorTypeListCount &&
174                 mutable_descriptor_info->pMutableDescriptorTypeLists[i]
175                    .descriptorTypeCount);
176 
177          const VkMutableDescriptorTypeListEXT *list =
178             &mutable_descriptor_info->pMutableDescriptorTypeLists[i];
179          for (uint32_t j = 0; j < list->descriptorTypeCount; j++) {
180             BITSET_SET(binding->mutable_descriptor_types,
181                        vn_descriptor_type_index(list->pDescriptorTypes[j]));
182          }
183          break;
184 
185       default:
186          break;
187       }
188    }
189 
190    vn_async_vkCreateDescriptorSetLayout(dev->primary_ring, dev_handle,
191                                         create_info, NULL, &layout_handle);
192 }
193 
194 VkResult
vn_CreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)195 vn_CreateDescriptorSetLayout(
196    VkDevice device,
197    const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
198    const VkAllocationCallbacks *pAllocator,
199    VkDescriptorSetLayout *pSetLayout)
200 {
201    struct vn_device *dev = vn_device_from_handle(device);
202    /* ignore pAllocator as the layout is reference-counted */
203    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
204 
205    uint32_t last_binding = 0;
206    VkDescriptorSetLayoutBinding *local_bindings = NULL;
207    VkDescriptorSetLayoutCreateInfo local_create_info;
208    if (pCreateInfo->bindingCount) {
209       /* the encoder does not ignore
210        * VkDescriptorSetLayoutBinding::pImmutableSamplers when it should
211        */
212       const size_t binding_size =
213          sizeof(*pCreateInfo->pBindings) * pCreateInfo->bindingCount;
214       local_bindings = vk_alloc(alloc, binding_size, VN_DEFAULT_ALIGN,
215                                 VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
216       if (!local_bindings)
217          return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
218 
219       memcpy(local_bindings, pCreateInfo->pBindings, binding_size);
220       for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
221          VkDescriptorSetLayoutBinding *binding = &local_bindings[i];
222 
223          if (last_binding < binding->binding)
224             last_binding = binding->binding;
225 
226          switch (binding->descriptorType) {
227          case VK_DESCRIPTOR_TYPE_SAMPLER:
228          case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
229             break;
230          default:
231             binding->pImmutableSamplers = NULL;
232             break;
233          }
234       }
235 
236       local_create_info = *pCreateInfo;
237       local_create_info.pBindings = local_bindings;
238       pCreateInfo = &local_create_info;
239    }
240 
241    const size_t layout_size =
242       offsetof(struct vn_descriptor_set_layout, bindings[last_binding + 1]);
243    /* allocated with the device scope */
244    struct vn_descriptor_set_layout *layout =
245       vk_zalloc(alloc, layout_size, VN_DEFAULT_ALIGN,
246                 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
247    if (!layout) {
248       vk_free(alloc, local_bindings);
249       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
250    }
251 
252    vn_object_base_init(&layout->base, VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
253                        &dev->base);
254 
255    vn_descriptor_set_layout_init(dev, pCreateInfo, last_binding, layout);
256 
257    vk_free(alloc, local_bindings);
258 
259    *pSetLayout = vn_descriptor_set_layout_to_handle(layout);
260 
261    return VK_SUCCESS;
262 }
263 
264 void
vn_DestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)265 vn_DestroyDescriptorSetLayout(VkDevice device,
266                               VkDescriptorSetLayout descriptorSetLayout,
267                               const VkAllocationCallbacks *pAllocator)
268 {
269    struct vn_device *dev = vn_device_from_handle(device);
270    struct vn_descriptor_set_layout *layout =
271       vn_descriptor_set_layout_from_handle(descriptorSetLayout);
272 
273    if (!layout)
274       return;
275 
276    vn_descriptor_set_layout_unref(dev, layout);
277 }
278 
279 /* descriptor pool commands */
280 
281 VkResult
vn_CreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)282 vn_CreateDescriptorPool(VkDevice device,
283                         const VkDescriptorPoolCreateInfo *pCreateInfo,
284                         const VkAllocationCallbacks *pAllocator,
285                         VkDescriptorPool *pDescriptorPool)
286 {
287    VN_TRACE_FUNC();
288    struct vn_device *dev = vn_device_from_handle(device);
289    const VkAllocationCallbacks *alloc =
290       pAllocator ? pAllocator : &dev->base.base.alloc;
291 
292    const VkDescriptorPoolInlineUniformBlockCreateInfo *iub_info =
293       vk_find_struct_const(pCreateInfo->pNext,
294                            DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO);
295 
296    uint32_t mutable_states_count = 0;
297    for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
298       const VkDescriptorPoolSize *pool_size = &pCreateInfo->pPoolSizes[i];
299       if (pool_size->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
300          mutable_states_count++;
301    }
302    struct vn_descriptor_pool *pool;
303    struct vn_descriptor_pool_state_mutable *mutable_states;
304 
305    VK_MULTIALLOC(ma);
306    vk_multialloc_add(&ma, &pool, __typeof__(*pool), 1);
307    vk_multialloc_add(&ma, &mutable_states, __typeof__(*mutable_states),
308                      mutable_states_count);
309 
310    if (!vk_multialloc_zalloc(&ma, alloc, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT))
311       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
312 
313    vn_object_base_init(&pool->base, VK_OBJECT_TYPE_DESCRIPTOR_POOL,
314                        &dev->base);
315 
316    pool->allocator = *alloc;
317    pool->mutable_states = mutable_states;
318 
319    const VkMutableDescriptorTypeCreateInfoEXT *mutable_descriptor_info =
320       vk_find_struct_const(pCreateInfo->pNext,
321                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
322 
323    /* Without VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, the set
324     * allocation must not fail due to a fragmented pool per spec. In this
325     * case, set allocation can be asynchronous with pool resource tracking.
326     */
327    pool->async_set_allocation =
328       !VN_PERF(NO_ASYNC_SET_ALLOC) &&
329       !(pCreateInfo->flags &
330         VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
331 
332    pool->max.set_count = pCreateInfo->maxSets;
333 
334    if (iub_info)
335       pool->max.iub_binding_count = iub_info->maxInlineUniformBlockBindings;
336 
337    uint32_t next_mutable_state = 0;
338    for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
339       const VkDescriptorPoolSize *pool_size = &pCreateInfo->pPoolSizes[i];
340       const uint32_t type_index = vn_descriptor_type_index(pool_size->type);
341 
342       assert(type_index < VN_NUM_DESCRIPTOR_TYPES);
343 
344       if (pool_size->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
345          struct vn_descriptor_pool_state_mutable *mutable_state = NULL;
346          BITSET_DECLARE(mutable_types, VN_NUM_DESCRIPTOR_TYPES);
347          if (!mutable_descriptor_info ||
348              i >= mutable_descriptor_info->mutableDescriptorTypeListCount) {
349             BITSET_ONES(mutable_types);
350          } else {
351             const VkMutableDescriptorTypeListEXT *list =
352                &mutable_descriptor_info->pMutableDescriptorTypeLists[i];
353 
354             for (uint32_t j = 0; j < list->descriptorTypeCount; j++) {
355                BITSET_SET(mutable_types, vn_descriptor_type_index(
356                                             list->pDescriptorTypes[j]));
357             }
358          }
359          for (uint32_t j = 0; j < next_mutable_state; j++) {
360             if (BITSET_EQUAL(mutable_types, pool->mutable_states[j].types)) {
361                mutable_state = &pool->mutable_states[j];
362                break;
363             }
364          }
365 
366          if (!mutable_state) {
367             /* The application must ensure that partial overlap does not
368              * exist in pPoolSizes. so this entry must have a disjoint set
369              * of types.
370              */
371             mutable_state = &pool->mutable_states[next_mutable_state++];
372             BITSET_COPY(mutable_state->types, mutable_types);
373          }
374 
375          mutable_state->max += pool_size->descriptorCount;
376       } else {
377          pool->max.descriptor_counts[type_index] +=
378             pool_size->descriptorCount;
379       }
380 
381       assert((pCreateInfo->pPoolSizes[i].type !=
382               VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) ||
383              iub_info);
384    }
385 
386    pool->mutable_states_count = next_mutable_state;
387    list_inithead(&pool->descriptor_sets);
388 
389    VkDescriptorPool pool_handle = vn_descriptor_pool_to_handle(pool);
390    vn_async_vkCreateDescriptorPool(dev->primary_ring, device, pCreateInfo,
391                                    NULL, &pool_handle);
392 
393    vn_tls_set_async_pipeline_create();
394 
395    *pDescriptorPool = pool_handle;
396 
397    return VK_SUCCESS;
398 }
399 
400 void
vn_DestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)401 vn_DestroyDescriptorPool(VkDevice device,
402                          VkDescriptorPool descriptorPool,
403                          const VkAllocationCallbacks *pAllocator)
404 {
405    VN_TRACE_FUNC();
406    struct vn_device *dev = vn_device_from_handle(device);
407    struct vn_descriptor_pool *pool =
408       vn_descriptor_pool_from_handle(descriptorPool);
409    const VkAllocationCallbacks *alloc;
410 
411    if (!pool)
412       return;
413 
414    alloc = pAllocator ? pAllocator : &pool->allocator;
415 
416    vn_async_vkDestroyDescriptorPool(dev->primary_ring, device, descriptorPool,
417                                     NULL);
418 
419    list_for_each_entry_safe(struct vn_descriptor_set, set,
420                             &pool->descriptor_sets, head)
421       vn_descriptor_set_destroy(dev, set, alloc);
422 
423    vn_object_base_fini(&pool->base);
424    vk_free(alloc, pool);
425 }
426 
427 static struct vn_descriptor_pool_state_mutable *
vn_find_mutable_state_for_binding(const struct vn_descriptor_pool * pool,const struct vn_descriptor_set_layout_binding * binding)428 vn_find_mutable_state_for_binding(
429    const struct vn_descriptor_pool *pool,
430    const struct vn_descriptor_set_layout_binding *binding)
431 {
432    for (uint32_t i = 0; i < pool->mutable_states_count; i++) {
433       struct vn_descriptor_pool_state_mutable *mutable_state =
434          &pool->mutable_states[i];
435       BITSET_DECLARE(shared_types, VN_NUM_DESCRIPTOR_TYPES);
436       BITSET_AND(shared_types, mutable_state->types,
437                  binding->mutable_descriptor_types);
438 
439       if (BITSET_EQUAL(shared_types, binding->mutable_descriptor_types)) {
440          /* The application must ensure that partial overlap does not
441           * exist in pPoolSizes. so there should be at most 1
442           * matching pool entry for a binding.
443           */
444          return mutable_state;
445       }
446    }
447    return NULL;
448 }
449 
450 static void
vn_pool_restore_mutable_states(struct vn_descriptor_pool * pool,const struct vn_descriptor_set_layout * layout,uint32_t max_binding_index,uint32_t last_binding_descriptor_count)451 vn_pool_restore_mutable_states(struct vn_descriptor_pool *pool,
452                                const struct vn_descriptor_set_layout *layout,
453                                uint32_t max_binding_index,
454                                uint32_t last_binding_descriptor_count)
455 {
456    for (uint32_t i = 0; i <= max_binding_index; i++) {
457       if (layout->bindings[i].type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT)
458          continue;
459 
460       const uint32_t count = i == layout->last_binding
461                                 ? last_binding_descriptor_count
462                                 : layout->bindings[i].count;
463 
464       struct vn_descriptor_pool_state_mutable *mutable_state =
465          vn_find_mutable_state_for_binding(pool, &layout->bindings[i]);
466       assert(mutable_state && mutable_state->used >= count);
467       mutable_state->used -= count;
468    }
469 }
470 
471 static bool
vn_descriptor_pool_alloc_descriptors(struct vn_descriptor_pool * pool,const struct vn_descriptor_set_layout * layout,uint32_t last_binding_descriptor_count)472 vn_descriptor_pool_alloc_descriptors(
473    struct vn_descriptor_pool *pool,
474    const struct vn_descriptor_set_layout *layout,
475    uint32_t last_binding_descriptor_count)
476 {
477    struct vn_descriptor_pool_state recovery;
478 
479    if (!pool->async_set_allocation)
480       return true;
481 
482    if (pool->used.set_count == pool->max.set_count)
483       return false;
484 
485    /* backup current pool state to recovery */
486    recovery = pool->used;
487 
488    ++pool->used.set_count;
489 
490    uint32_t binding_index = 0;
491 
492    while (binding_index <= layout->last_binding) {
493       const VkDescriptorType type = layout->bindings[binding_index].type;
494       const uint32_t count = binding_index == layout->last_binding
495                                 ? last_binding_descriptor_count
496                                 : layout->bindings[binding_index].count;
497 
498       /* Allocation may fail if a call to vkAllocateDescriptorSets would cause
499        * the total number of inline uniform block bindings allocated from the
500        * pool to exceed the value of
501        * VkDescriptorPoolInlineUniformBlockCreateInfo::maxInlineUniformBlockBindings
502        * used to create the descriptor pool.
503        *
504        * If descriptorCount is zero this binding entry is reserved and the
505        * resource must not be accessed from any stage via this binding within
506        * any pipeline using the set layout.
507        */
508       if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK && count != 0) {
509          if (++pool->used.iub_binding_count > pool->max.iub_binding_count)
510             goto fail;
511       }
512 
513       if (type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
514          /* A mutable descriptor can be allocated if below are satisfied:
515           * - vn_descriptor_pool_state_mutable::types is a superset
516           * - vn_descriptor_pool_state_mutable::{max - used} is enough
517           */
518          struct vn_descriptor_pool_state_mutable *mutable_state =
519             vn_find_mutable_state_for_binding(
520                pool, &layout->bindings[binding_index]);
521 
522          if (mutable_state &&
523              mutable_state->max - mutable_state->used >= count) {
524             mutable_state->used += count;
525          } else
526             goto fail;
527       } else {
528          const uint32_t type_index = vn_descriptor_type_index(type);
529          pool->used.descriptor_counts[type_index] += count;
530 
531          if (pool->used.descriptor_counts[type_index] >
532              pool->max.descriptor_counts[type_index])
533             goto fail;
534       }
535       binding_index++;
536    }
537 
538    return true;
539 
540 fail:
541    /* restore pool state before this allocation */
542    pool->used = recovery;
543    if (binding_index > 0) {
544       vn_pool_restore_mutable_states(pool, layout, binding_index - 1,
545                                      last_binding_descriptor_count);
546    }
547    return false;
548 }
549 
550 static void
vn_descriptor_pool_free_descriptors(struct vn_descriptor_pool * pool,const struct vn_descriptor_set_layout * layout,uint32_t last_binding_descriptor_count)551 vn_descriptor_pool_free_descriptors(
552    struct vn_descriptor_pool *pool,
553    const struct vn_descriptor_set_layout *layout,
554    uint32_t last_binding_descriptor_count)
555 {
556    if (!pool->async_set_allocation)
557       return;
558 
559    vn_pool_restore_mutable_states(pool, layout, layout->last_binding,
560                                   last_binding_descriptor_count);
561 
562    for (uint32_t i = 0; i <= layout->last_binding; i++) {
563       const uint32_t count = i == layout->last_binding
564                                 ? last_binding_descriptor_count
565                                 : layout->bindings[i].count;
566 
567       if (layout->bindings[i].type != VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
568          pool->used.descriptor_counts[vn_descriptor_type_index(
569             layout->bindings[i].type)] -= count;
570 
571          if (layout->bindings[i].type ==
572              VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
573             --pool->used.iub_binding_count;
574       }
575    }
576 
577    --pool->used.set_count;
578 }
579 
580 static void
vn_descriptor_pool_reset_descriptors(struct vn_descriptor_pool * pool)581 vn_descriptor_pool_reset_descriptors(struct vn_descriptor_pool *pool)
582 {
583    if (!pool->async_set_allocation)
584       return;
585 
586    memset(&pool->used, 0, sizeof(pool->used));
587 
588    for (uint32_t i = 0; i < pool->mutable_states_count; i++)
589       pool->mutable_states[i].used = 0;
590 }
591 
592 VkResult
vn_ResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)593 vn_ResetDescriptorPool(VkDevice device,
594                        VkDescriptorPool descriptorPool,
595                        VkDescriptorPoolResetFlags flags)
596 {
597    VN_TRACE_FUNC();
598    struct vn_device *dev = vn_device_from_handle(device);
599    struct vn_descriptor_pool *pool =
600       vn_descriptor_pool_from_handle(descriptorPool);
601    const VkAllocationCallbacks *alloc = &pool->allocator;
602 
603    vn_async_vkResetDescriptorPool(dev->primary_ring, device, descriptorPool,
604                                   flags);
605 
606    list_for_each_entry_safe(struct vn_descriptor_set, set,
607                             &pool->descriptor_sets, head)
608       vn_descriptor_set_destroy(dev, set, alloc);
609 
610    vn_descriptor_pool_reset_descriptors(pool);
611 
612    return VK_SUCCESS;
613 }
614 
615 /* descriptor set commands */
616 
617 VkResult
vn_AllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)618 vn_AllocateDescriptorSets(VkDevice device,
619                           const VkDescriptorSetAllocateInfo *pAllocateInfo,
620                           VkDescriptorSet *pDescriptorSets)
621 {
622    struct vn_device *dev = vn_device_from_handle(device);
623    struct vn_descriptor_pool *pool =
624       vn_descriptor_pool_from_handle(pAllocateInfo->descriptorPool);
625    const VkAllocationCallbacks *alloc = &pool->allocator;
626    const VkDescriptorSetVariableDescriptorCountAllocateInfo *variable_info =
627       NULL;
628    VkResult result;
629 
630    /* 14.2.3. Allocation of Descriptor Sets
631     *
632     * If descriptorSetCount is zero or this structure is not included in
633     * the pNext chain, then the variable lengths are considered to be zero.
634     */
635    variable_info = vk_find_struct_const(
636       pAllocateInfo->pNext,
637       DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
638 
639    if (variable_info && !variable_info->descriptorSetCount)
640       variable_info = NULL;
641 
642    for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
643       struct vn_descriptor_set_layout *layout =
644          vn_descriptor_set_layout_from_handle(pAllocateInfo->pSetLayouts[i]);
645       uint32_t last_binding_descriptor_count = 0;
646       struct vn_descriptor_set *set = NULL;
647 
648       /* 14.2.3. Allocation of Descriptor Sets
649        *
650        * If VkDescriptorSetAllocateInfo::pSetLayouts[i] does not include a
651        * variable count descriptor binding, then pDescriptorCounts[i] is
652        * ignored.
653        */
654       if (!layout->has_variable_descriptor_count) {
655          last_binding_descriptor_count =
656             layout->bindings[layout->last_binding].count;
657       } else if (variable_info) {
658          last_binding_descriptor_count = variable_info->pDescriptorCounts[i];
659       }
660 
661       if (!vn_descriptor_pool_alloc_descriptors(
662              pool, layout, last_binding_descriptor_count)) {
663          pDescriptorSets[i] = VK_NULL_HANDLE;
664          result = VK_ERROR_OUT_OF_POOL_MEMORY;
665          goto fail;
666       }
667 
668       set = vk_zalloc(alloc, sizeof(*set), VN_DEFAULT_ALIGN,
669                       VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
670       if (!set) {
671          vn_descriptor_pool_free_descriptors(pool, layout,
672                                              last_binding_descriptor_count);
673          pDescriptorSets[i] = VK_NULL_HANDLE;
674          result = VK_ERROR_OUT_OF_HOST_MEMORY;
675          goto fail;
676       }
677 
678       vn_object_base_init(&set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET,
679                           &dev->base);
680 
681       /* We might reorder vkCmdBindDescriptorSets after
682        * vkDestroyDescriptorSetLayout due to batching.  The spec says
683        *
684        *   VkDescriptorSetLayout objects may be accessed by commands that
685        *   operate on descriptor sets allocated using that layout, and those
686        *   descriptor sets must not be updated with vkUpdateDescriptorSets
687        *   after the descriptor set layout has been destroyed. Otherwise, a
688        *   VkDescriptorSetLayout object passed as a parameter to create
689        *   another object is not further accessed by that object after the
690        *   duration of the command it is passed into.
691        *
692        * It is ambiguous but the reordering is likely invalid.  Let's keep the
693        * layout alive with the set to defer vkDestroyDescriptorSetLayout.
694        */
695       set->layout = vn_descriptor_set_layout_ref(dev, layout);
696       set->last_binding_descriptor_count = last_binding_descriptor_count;
697       list_addtail(&set->head, &pool->descriptor_sets);
698 
699       VkDescriptorSet set_handle = vn_descriptor_set_to_handle(set);
700       pDescriptorSets[i] = set_handle;
701    }
702 
703    if (pool->async_set_allocation) {
704       vn_async_vkAllocateDescriptorSets(dev->primary_ring, device,
705                                         pAllocateInfo, pDescriptorSets);
706    } else {
707       result = vn_call_vkAllocateDescriptorSets(
708          dev->primary_ring, device, pAllocateInfo, pDescriptorSets);
709       if (result != VK_SUCCESS)
710          goto fail;
711    }
712 
713    return VK_SUCCESS;
714 
715 fail:
716    for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
717       struct vn_descriptor_set *set =
718          vn_descriptor_set_from_handle(pDescriptorSets[i]);
719       if (!set)
720          break;
721 
722       vn_descriptor_pool_free_descriptors(pool, set->layout,
723                                           set->last_binding_descriptor_count);
724 
725       vn_descriptor_set_destroy(dev, set, alloc);
726    }
727 
728    memset(pDescriptorSets, 0,
729           sizeof(*pDescriptorSets) * pAllocateInfo->descriptorSetCount);
730 
731    return vn_error(dev->instance, result);
732 }
733 
734 VkResult
vn_FreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)735 vn_FreeDescriptorSets(VkDevice device,
736                       VkDescriptorPool descriptorPool,
737                       uint32_t descriptorSetCount,
738                       const VkDescriptorSet *pDescriptorSets)
739 {
740    struct vn_device *dev = vn_device_from_handle(device);
741    struct vn_descriptor_pool *pool =
742       vn_descriptor_pool_from_handle(descriptorPool);
743    const VkAllocationCallbacks *alloc = &pool->allocator;
744 
745    vn_async_vkFreeDescriptorSets(dev->primary_ring, device, descriptorPool,
746                                  descriptorSetCount, pDescriptorSets);
747 
748    for (uint32_t i = 0; i < descriptorSetCount; i++) {
749       struct vn_descriptor_set *set =
750          vn_descriptor_set_from_handle(pDescriptorSets[i]);
751 
752       if (!set)
753          continue;
754 
755       vn_descriptor_set_destroy(dev, set, alloc);
756    }
757 
758    return VK_SUCCESS;
759 }
760 
761 static struct vn_update_descriptor_sets *
vn_update_descriptor_sets_alloc(uint32_t write_count,uint32_t image_count,uint32_t buffer_count,uint32_t view_count,uint32_t iub_count,const VkAllocationCallbacks * alloc,VkSystemAllocationScope scope)762 vn_update_descriptor_sets_alloc(uint32_t write_count,
763                                 uint32_t image_count,
764                                 uint32_t buffer_count,
765                                 uint32_t view_count,
766                                 uint32_t iub_count,
767                                 const VkAllocationCallbacks *alloc,
768                                 VkSystemAllocationScope scope)
769 {
770    const size_t writes_offset = sizeof(struct vn_update_descriptor_sets);
771    const size_t images_offset =
772       writes_offset + sizeof(VkWriteDescriptorSet) * write_count;
773    const size_t buffers_offset =
774       images_offset + sizeof(VkDescriptorImageInfo) * image_count;
775    const size_t views_offset =
776       buffers_offset + sizeof(VkDescriptorBufferInfo) * buffer_count;
777    const size_t iubs_offset =
778       views_offset + sizeof(VkBufferView) * view_count;
779    const size_t alloc_size =
780       iubs_offset +
781       sizeof(VkWriteDescriptorSetInlineUniformBlock) * iub_count;
782 
783    void *storage = vk_alloc(alloc, alloc_size, VN_DEFAULT_ALIGN, scope);
784    if (!storage)
785       return NULL;
786 
787    struct vn_update_descriptor_sets *update = storage;
788    update->write_count = write_count;
789    update->writes = storage + writes_offset;
790    update->images = storage + images_offset;
791    update->buffers = storage + buffers_offset;
792    update->views = storage + views_offset;
793    update->iubs = storage + iubs_offset;
794 
795    return update;
796 }
797 
798 bool
vn_should_sanitize_descriptor_set_writes(uint32_t write_count,const VkWriteDescriptorSet * writes,VkPipelineLayout pipeline_layout_handle)799 vn_should_sanitize_descriptor_set_writes(
800    uint32_t write_count,
801    const VkWriteDescriptorSet *writes,
802    VkPipelineLayout pipeline_layout_handle)
803 {
804    /* the encoder does not ignore
805     * VkWriteDescriptorSet::{pImageInfo,pBufferInfo,pTexelBufferView} when it
806     * should
807     *
808     * TODO make the encoder smarter
809     */
810    const struct vn_pipeline_layout *pipeline_layout =
811       vn_pipeline_layout_from_handle(pipeline_layout_handle);
812    for (uint32_t i = 0; i < write_count; i++) {
813       const struct vn_descriptor_set_layout *set_layout =
814          pipeline_layout
815             ? pipeline_layout->push_descriptor_set_layout
816             : vn_descriptor_set_from_handle(writes[i].dstSet)->layout;
817       const struct vn_descriptor_set_layout_binding *binding =
818          &set_layout->bindings[writes[i].dstBinding];
819       const VkWriteDescriptorSet *write = &writes[i];
820       const VkDescriptorImageInfo *imgs = write->pImageInfo;
821 
822       switch (write->descriptorType) {
823       case VK_DESCRIPTOR_TYPE_SAMPLER:
824       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
825       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
826       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
827       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
828          if (write->pBufferInfo != NULL || write->pTexelBufferView != NULL)
829             return true;
830 
831          for (uint32_t j = 0; j < write->descriptorCount; j++) {
832             switch (write->descriptorType) {
833             case VK_DESCRIPTOR_TYPE_SAMPLER:
834                if (imgs[j].imageView != VK_NULL_HANDLE)
835                   return true;
836                FALLTHROUGH;
837             case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
838                if (binding->has_immutable_samplers &&
839                    imgs[j].sampler != VK_NULL_HANDLE)
840                   return true;
841                break;
842             case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
843             case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
844             case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
845                if (imgs[j].sampler != VK_NULL_HANDLE)
846                   return true;
847                break;
848             default:
849                break;
850             }
851          }
852          break;
853       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
854       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
855          if (write->pImageInfo != NULL || write->pBufferInfo != NULL)
856             return true;
857 
858          break;
859       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
860       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
861       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
862       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
863          if (write->pImageInfo != NULL || write->pTexelBufferView != NULL)
864             return true;
865 
866          break;
867       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
868       case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
869       default:
870          if (write->pImageInfo != NULL || write->pBufferInfo != NULL ||
871              write->pTexelBufferView != NULL)
872             return true;
873 
874          break;
875       }
876    }
877 
878    return false;
879 }
880 
881 struct vn_update_descriptor_sets *
vn_update_descriptor_sets_parse_writes(uint32_t write_count,const VkWriteDescriptorSet * writes,const VkAllocationCallbacks * alloc,VkPipelineLayout pipeline_layout_handle)882 vn_update_descriptor_sets_parse_writes(uint32_t write_count,
883                                        const VkWriteDescriptorSet *writes,
884                                        const VkAllocationCallbacks *alloc,
885                                        VkPipelineLayout pipeline_layout_handle)
886 {
887    uint32_t img_count = 0;
888    for (uint32_t i = 0; i < write_count; i++) {
889       const VkWriteDescriptorSet *write = &writes[i];
890       switch (write->descriptorType) {
891       case VK_DESCRIPTOR_TYPE_SAMPLER:
892       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
893       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
894       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
895       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
896          img_count += write->descriptorCount;
897          break;
898       default:
899          break;
900       }
901    }
902 
903    struct vn_update_descriptor_sets *update =
904       vn_update_descriptor_sets_alloc(write_count, img_count, 0, 0, 0, alloc,
905                                       VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
906    if (!update)
907       return NULL;
908 
909    /* the encoder does not ignore
910     * VkWriteDescriptorSet::{pImageInfo,pBufferInfo,pTexelBufferView} when it
911     * should
912     *
913     * TODO make the encoder smarter
914     */
915    memcpy(update->writes, writes, sizeof(*writes) * write_count);
916    img_count = 0;
917    const struct vn_pipeline_layout *pipeline_layout =
918       vn_pipeline_layout_from_handle(pipeline_layout_handle);
919    for (uint32_t i = 0; i < write_count; i++) {
920       const struct vn_descriptor_set_layout *set_layout =
921          pipeline_layout
922             ? pipeline_layout->push_descriptor_set_layout
923             : vn_descriptor_set_from_handle(writes[i].dstSet)->layout;
924       const struct vn_descriptor_set_layout_binding *binding =
925          &set_layout->bindings[writes[i].dstBinding];
926       VkWriteDescriptorSet *write = &update->writes[i];
927       VkDescriptorImageInfo *imgs = &update->images[img_count];
928 
929       switch (write->descriptorType) {
930       case VK_DESCRIPTOR_TYPE_SAMPLER:
931       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
932       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
933       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
934       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
935          memcpy(imgs, write->pImageInfo,
936                 sizeof(*imgs) * write->descriptorCount);
937          img_count += write->descriptorCount;
938 
939          for (uint32_t j = 0; j < write->descriptorCount; j++) {
940             switch (write->descriptorType) {
941             case VK_DESCRIPTOR_TYPE_SAMPLER:
942                imgs[j].imageView = VK_NULL_HANDLE;
943                FALLTHROUGH;
944             case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
945                if (binding->has_immutable_samplers)
946                   imgs[j].sampler = VK_NULL_HANDLE;
947                break;
948             case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
949             case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
950             case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
951                imgs[j].sampler = VK_NULL_HANDLE;
952                break;
953             default:
954                break;
955             }
956          }
957 
958          write->pImageInfo = imgs;
959          write->pBufferInfo = NULL;
960          write->pTexelBufferView = NULL;
961          break;
962       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
963       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
964          write->pImageInfo = NULL;
965          write->pBufferInfo = NULL;
966          break;
967       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
968       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
969       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
970       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
971          write->pImageInfo = NULL;
972          write->pTexelBufferView = NULL;
973          break;
974       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
975       case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
976       default:
977          write->pImageInfo = NULL;
978          write->pBufferInfo = NULL;
979          write->pTexelBufferView = NULL;
980          break;
981       }
982    }
983 
984    return update;
985 }
986 
987 void
vn_UpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)988 vn_UpdateDescriptorSets(VkDevice device,
989                         uint32_t descriptorWriteCount,
990                         const VkWriteDescriptorSet *pDescriptorWrites,
991                         uint32_t descriptorCopyCount,
992                         const VkCopyDescriptorSet *pDescriptorCopies)
993 {
994    struct vn_device *dev = vn_device_from_handle(device);
995    const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
996 
997    struct vn_update_descriptor_sets *update =
998       vn_update_descriptor_sets_parse_writes(
999          descriptorWriteCount, pDescriptorWrites, alloc, VK_NULL_HANDLE);
1000    if (!update) {
1001       /* TODO update one-by-one? */
1002       vn_log(dev->instance, "TODO descriptor set update ignored due to OOM");
1003       return;
1004    }
1005 
1006    vn_async_vkUpdateDescriptorSets(dev->primary_ring, device,
1007                                    update->write_count, update->writes,
1008                                    descriptorCopyCount, pDescriptorCopies);
1009 
1010    vk_free(alloc, update);
1011 }
1012 
1013 /* descriptor update template commands */
1014 
1015 static struct vn_update_descriptor_sets *
vn_update_descriptor_sets_parse_template(const VkDescriptorUpdateTemplateCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_descriptor_update_template_entry * entries)1016 vn_update_descriptor_sets_parse_template(
1017    const VkDescriptorUpdateTemplateCreateInfo *create_info,
1018    const VkAllocationCallbacks *alloc,
1019    struct vn_descriptor_update_template_entry *entries)
1020 {
1021    uint32_t img_count = 0;
1022    uint32_t buf_count = 0;
1023    uint32_t view_count = 0;
1024    uint32_t iub_count = 0;
1025    for (uint32_t i = 0; i < create_info->descriptorUpdateEntryCount; i++) {
1026       const VkDescriptorUpdateTemplateEntry *entry =
1027          &create_info->pDescriptorUpdateEntries[i];
1028 
1029       switch (entry->descriptorType) {
1030       case VK_DESCRIPTOR_TYPE_SAMPLER:
1031       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1032       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1033       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1034       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1035          img_count += entry->descriptorCount;
1036          break;
1037       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1038       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1039          view_count += entry->descriptorCount;
1040          break;
1041       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1042       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1043       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1044       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1045          buf_count += entry->descriptorCount;
1046          break;
1047       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
1048          iub_count += 1;
1049          break;
1050       case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
1051          break;
1052       default:
1053          unreachable("unhandled descriptor type");
1054          break;
1055       }
1056    }
1057 
1058    struct vn_update_descriptor_sets *update = vn_update_descriptor_sets_alloc(
1059       create_info->descriptorUpdateEntryCount, img_count, buf_count,
1060       view_count, iub_count, alloc, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1061    if (!update)
1062       return NULL;
1063 
1064    img_count = 0;
1065    buf_count = 0;
1066    view_count = 0;
1067    iub_count = 0;
1068    for (uint32_t i = 0; i < create_info->descriptorUpdateEntryCount; i++) {
1069       const VkDescriptorUpdateTemplateEntry *entry =
1070          &create_info->pDescriptorUpdateEntries[i];
1071       VkWriteDescriptorSet *write = &update->writes[i];
1072 
1073       write->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1074       write->pNext = NULL;
1075       write->dstBinding = entry->dstBinding;
1076       write->dstArrayElement = entry->dstArrayElement;
1077       write->descriptorCount = entry->descriptorCount;
1078       write->descriptorType = entry->descriptorType;
1079 
1080       entries[i].offset = entry->offset;
1081       entries[i].stride = entry->stride;
1082 
1083       switch (entry->descriptorType) {
1084       case VK_DESCRIPTOR_TYPE_SAMPLER:
1085       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1086       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1087       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1088       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1089          write->pImageInfo = &update->images[img_count];
1090          write->pBufferInfo = NULL;
1091          write->pTexelBufferView = NULL;
1092          img_count += entry->descriptorCount;
1093          break;
1094       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1095       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1096          write->pImageInfo = NULL;
1097          write->pBufferInfo = NULL;
1098          write->pTexelBufferView = &update->views[view_count];
1099          view_count += entry->descriptorCount;
1100          break;
1101       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1102       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1103       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1104       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1105          write->pImageInfo = NULL;
1106          write->pBufferInfo = &update->buffers[buf_count];
1107          write->pTexelBufferView = NULL;
1108          buf_count += entry->descriptorCount;
1109          break;
1110       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
1111          write->pImageInfo = NULL;
1112          write->pBufferInfo = NULL;
1113          write->pTexelBufferView = NULL;
1114          VkWriteDescriptorSetInlineUniformBlock *iub_data =
1115             &update->iubs[iub_count];
1116          iub_data->sType =
1117             VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK;
1118          iub_data->pNext = write->pNext;
1119          iub_data->dataSize = entry->descriptorCount;
1120          write->pNext = iub_data;
1121          iub_count += 1;
1122          break;
1123       default:
1124          break;
1125       }
1126    }
1127 
1128    return update;
1129 }
1130 
1131 VkResult
vn_CreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)1132 vn_CreateDescriptorUpdateTemplate(
1133    VkDevice device,
1134    const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
1135    const VkAllocationCallbacks *pAllocator,
1136    VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
1137 {
1138    VN_TRACE_FUNC();
1139    struct vn_device *dev = vn_device_from_handle(device);
1140    const VkAllocationCallbacks *alloc =
1141       pAllocator ? pAllocator : &dev->base.base.alloc;
1142 
1143    const size_t templ_size =
1144       offsetof(struct vn_descriptor_update_template,
1145                entries[pCreateInfo->descriptorUpdateEntryCount + 1]);
1146    struct vn_descriptor_update_template *templ = vk_zalloc(
1147       alloc, templ_size, VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1148    if (!templ)
1149       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1150 
1151    vn_object_base_init(&templ->base,
1152                        VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, &dev->base);
1153 
1154    templ->update = vn_update_descriptor_sets_parse_template(
1155       pCreateInfo, alloc, templ->entries);
1156    if (!templ->update) {
1157       vk_free(alloc, templ);
1158       return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1159    }
1160 
1161    templ->is_push_descriptor =
1162       pCreateInfo->templateType ==
1163       VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
1164    if (templ->is_push_descriptor) {
1165       templ->pipeline_bind_point = pCreateInfo->pipelineBindPoint;
1166       templ->pipeline_layout =
1167          vn_pipeline_layout_from_handle(pCreateInfo->pipelineLayout);
1168    }
1169 
1170    mtx_init(&templ->mutex, mtx_plain);
1171 
1172    /* no host object */
1173    VkDescriptorUpdateTemplate templ_handle =
1174       vn_descriptor_update_template_to_handle(templ);
1175    *pDescriptorUpdateTemplate = templ_handle;
1176 
1177    return VK_SUCCESS;
1178 }
1179 
1180 void
vn_DestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)1181 vn_DestroyDescriptorUpdateTemplate(
1182    VkDevice device,
1183    VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1184    const VkAllocationCallbacks *pAllocator)
1185 {
1186    VN_TRACE_FUNC();
1187    struct vn_device *dev = vn_device_from_handle(device);
1188    struct vn_descriptor_update_template *templ =
1189       vn_descriptor_update_template_from_handle(descriptorUpdateTemplate);
1190    const VkAllocationCallbacks *alloc =
1191       pAllocator ? pAllocator : &dev->base.base.alloc;
1192 
1193    if (!templ)
1194       return;
1195 
1196    /* no host object */
1197    vk_free(alloc, templ->update);
1198    mtx_destroy(&templ->mutex);
1199 
1200    vn_object_base_fini(&templ->base);
1201    vk_free(alloc, templ);
1202 }
1203 
1204 struct vn_update_descriptor_sets *
vn_update_descriptor_set_with_template_locked(struct vn_descriptor_update_template * templ,struct vn_descriptor_set * set,const void * data)1205 vn_update_descriptor_set_with_template_locked(
1206    struct vn_descriptor_update_template *templ,
1207    struct vn_descriptor_set *set,
1208    const void *data)
1209 {
1210    struct vn_update_descriptor_sets *update = templ->update;
1211 
1212    for (uint32_t i = 0; i < update->write_count; i++) {
1213       const struct vn_descriptor_update_template_entry *entry =
1214          &templ->entries[i];
1215 
1216       const struct vn_descriptor_set_layout *set_layout =
1217          templ->is_push_descriptor
1218             ? templ->pipeline_layout->push_descriptor_set_layout
1219             : set->layout;
1220       const struct vn_descriptor_set_layout_binding *binding =
1221          &set_layout->bindings[update->writes[i].dstBinding];
1222 
1223       VkWriteDescriptorSet *write = &update->writes[i];
1224 
1225       write->dstSet = templ->is_push_descriptor
1226                          ? VK_NULL_HANDLE
1227                          : vn_descriptor_set_to_handle(set);
1228 
1229       switch (write->descriptorType) {
1230       case VK_DESCRIPTOR_TYPE_SAMPLER:
1231       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1232       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1233       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1234       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1235          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1236             const bool need_sampler =
1237                (write->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER ||
1238                 write->descriptorType ==
1239                    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
1240                !binding->has_immutable_samplers;
1241             const bool need_view =
1242                write->descriptorType != VK_DESCRIPTOR_TYPE_SAMPLER;
1243             const VkDescriptorImageInfo *src =
1244                data + entry->offset + entry->stride * j;
1245             VkDescriptorImageInfo *dst =
1246                (VkDescriptorImageInfo *)&write->pImageInfo[j];
1247 
1248             dst->sampler = need_sampler ? src->sampler : VK_NULL_HANDLE;
1249             dst->imageView = need_view ? src->imageView : VK_NULL_HANDLE;
1250             dst->imageLayout = src->imageLayout;
1251          }
1252          break;
1253       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1254       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1255          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1256             const VkBufferView *src =
1257                data + entry->offset + entry->stride * j;
1258             VkBufferView *dst = (VkBufferView *)&write->pTexelBufferView[j];
1259             *dst = *src;
1260          }
1261          break;
1262       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1263       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1264       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1265       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1266          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1267             const VkDescriptorBufferInfo *src =
1268                data + entry->offset + entry->stride * j;
1269             VkDescriptorBufferInfo *dst =
1270                (VkDescriptorBufferInfo *)&write->pBufferInfo[j];
1271             *dst = *src;
1272          }
1273          break;
1274       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:;
1275          VkWriteDescriptorSetInlineUniformBlock *iub_data =
1276             (VkWriteDescriptorSetInlineUniformBlock *)vk_find_struct_const(
1277                write->pNext, WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
1278          iub_data->pData = data + entry->offset;
1279          break;
1280       case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
1281          break;
1282       default:
1283          unreachable("unhandled descriptor type");
1284          break;
1285       }
1286    }
1287    return update;
1288 }
1289 
1290 void
vn_UpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)1291 vn_UpdateDescriptorSetWithTemplate(
1292    VkDevice device,
1293    VkDescriptorSet descriptorSet,
1294    VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1295    const void *pData)
1296 {
1297    struct vn_device *dev = vn_device_from_handle(device);
1298    struct vn_descriptor_update_template *templ =
1299       vn_descriptor_update_template_from_handle(descriptorUpdateTemplate);
1300    struct vn_descriptor_set *set =
1301       vn_descriptor_set_from_handle(descriptorSet);
1302    mtx_lock(&templ->mutex);
1303 
1304    struct vn_update_descriptor_sets *update =
1305       vn_update_descriptor_set_with_template_locked(templ, set, pData);
1306 
1307    vn_async_vkUpdateDescriptorSets(dev->primary_ring, device,
1308                                    update->write_count, update->writes, 0,
1309                                    NULL);
1310 
1311    mtx_unlock(&templ->mutex);
1312 }
1313