• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3  * SPDX-License-Identifier: MIT
4  */
5 #include "nvk_descriptor_set.h"
6 
7 #include "nvk_buffer.h"
8 #include "nvk_buffer_view.h"
9 #include "nvk_descriptor_set_layout.h"
10 #include "nvk_device.h"
11 #include "nvk_entrypoints.h"
12 #include "nvk_image_view.h"
13 #include "nvk_physical_device.h"
14 #include "nvk_sampler.h"
15 
16 #include "nouveau_bo.h"
17 
18 static inline uint32_t
align_u32(uint32_t v,uint32_t a)19 align_u32(uint32_t v, uint32_t a)
20 {
21    assert(a != 0 && a == (a & -a));
22    return (v + a - 1) & ~(a - 1);
23 }
24 
25 static inline void *
desc_ubo_data(struct nvk_descriptor_set * set,uint32_t binding,uint32_t elem,uint32_t * size_out)26 desc_ubo_data(struct nvk_descriptor_set *set, uint32_t binding,
27               uint32_t elem, uint32_t *size_out)
28 {
29    const struct nvk_descriptor_set_binding_layout *binding_layout =
30       &set->layout->binding[binding];
31 
32    uint32_t offset = binding_layout->offset + elem * binding_layout->stride;
33    assert(offset < set->size);
34 
35    if (size_out != NULL)
36       *size_out = set->size - offset;
37 
38    return (char *)set->mapped_ptr + offset;
39 }
40 
41 static void
write_desc(struct nvk_descriptor_set * set,uint32_t binding,uint32_t elem,const void * desc_data,size_t desc_size)42 write_desc(struct nvk_descriptor_set *set, uint32_t binding, uint32_t elem,
43            const void *desc_data, size_t desc_size)
44 {
45    ASSERTED uint32_t dst_size;
46    void *dst = desc_ubo_data(set, binding, elem, &dst_size);
47    assert(desc_size <= dst_size);
48    memcpy(dst, desc_data, desc_size);
49 }
50 
51 static void
write_image_view_desc(struct nvk_descriptor_set * set,const VkDescriptorImageInfo * const info,uint32_t binding,uint32_t elem,VkDescriptorType descriptor_type)52 write_image_view_desc(struct nvk_descriptor_set *set,
53                       const VkDescriptorImageInfo *const info,
54                       uint32_t binding, uint32_t elem,
55                       VkDescriptorType descriptor_type)
56 {
57    struct nvk_image_descriptor desc[3] = { };
58    uint8_t plane_count = 1;
59 
60    if (descriptor_type != VK_DESCRIPTOR_TYPE_SAMPLER &&
61        info && info->imageView != VK_NULL_HANDLE) {
62       VK_FROM_HANDLE(nvk_image_view, view, info->imageView);
63       plane_count = view->plane_count;
64       if (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
65          /* Storage images are always single plane */
66          assert(plane_count == 1);
67          uint8_t plane = 0;
68 
69          assert(view->planes[plane].storage_desc_index > 0);
70          assert(view->planes[plane].storage_desc_index < (1 << 20));
71 
72          desc[plane].image_index = view->planes[plane].storage_desc_index;
73       } else {
74          for (uint8_t plane = 0; plane < plane_count; plane++) {
75             assert(view->planes[plane].sampled_desc_index > 0);
76             assert(view->planes[plane].sampled_desc_index < (1 << 20));
77             desc[plane].image_index = view->planes[plane].sampled_desc_index;
78          }
79       }
80    }
81 
82    if (descriptor_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
83        descriptor_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
84       const struct nvk_descriptor_set_binding_layout *binding_layout =
85          &set->layout->binding[binding];
86 
87       struct nvk_sampler *sampler;
88       if (binding_layout->immutable_samplers) {
89          sampler = binding_layout->immutable_samplers[elem];
90       } else {
91          sampler = nvk_sampler_from_handle(info->sampler);
92       }
93 
94       plane_count = MAX2(plane_count, sampler->plane_count);
95 
96       for (uint8_t plane = 0; plane < plane_count; plane++) {
97          /* We need to replicate the last sampler plane out to all image
98           * planes due to sampler table entry limitations. See
99           * nvk_CreateSampler in nvk_sampler.c for more details.
100           */
101          uint8_t sampler_plane = MIN2(plane, sampler->plane_count - 1);
102          assert(sampler->planes[sampler_plane].desc_index < (1 << 12));
103          desc[plane].sampler_index = sampler->planes[sampler_plane].desc_index;
104       }
105    }
106    write_desc(set, binding, elem, desc, sizeof(desc[0]) * plane_count);
107 }
108 
109 static void
write_buffer_desc(struct nvk_descriptor_set * set,const VkDescriptorBufferInfo * const info,uint32_t binding,uint32_t elem)110 write_buffer_desc(struct nvk_descriptor_set *set,
111                   const VkDescriptorBufferInfo *const info,
112                   uint32_t binding, uint32_t elem)
113 {
114    VK_FROM_HANDLE(nvk_buffer, buffer, info->buffer);
115 
116    const struct nvk_addr_range addr_range =
117       nvk_buffer_addr_range(buffer, info->offset, info->range);
118    assert(addr_range.range <= UINT32_MAX);
119 
120    const struct nvk_buffer_address desc = {
121       .base_addr = addr_range.addr,
122       .size = addr_range.range,
123    };
124    write_desc(set, binding, elem, &desc, sizeof(desc));
125 }
126 
127 static void
write_dynamic_buffer_desc(struct nvk_descriptor_set * set,const VkDescriptorBufferInfo * const info,uint32_t binding,uint32_t elem)128 write_dynamic_buffer_desc(struct nvk_descriptor_set *set,
129                           const VkDescriptorBufferInfo *const info,
130                           uint32_t binding, uint32_t elem)
131 {
132    VK_FROM_HANDLE(nvk_buffer, buffer, info->buffer);
133    const struct nvk_descriptor_set_binding_layout *binding_layout =
134       &set->layout->binding[binding];
135 
136    const struct nvk_addr_range addr_range =
137       nvk_buffer_addr_range(buffer, info->offset, info->range);
138    assert(addr_range.range <= UINT32_MAX);
139 
140    struct nvk_buffer_address *desc =
141       &set->dynamic_buffers[binding_layout->dynamic_buffer_index + elem];
142    *desc = (struct nvk_buffer_address){
143       .base_addr = addr_range.addr,
144       .size = addr_range.range,
145    };
146 }
147 
148 static void
write_buffer_view_desc(struct nvk_descriptor_set * set,const VkBufferView bufferView,uint32_t binding,uint32_t elem)149 write_buffer_view_desc(struct nvk_descriptor_set *set,
150                        const VkBufferView bufferView,
151                        uint32_t binding, uint32_t elem)
152 {
153    struct nvk_image_descriptor desc = { };
154    if (bufferView != VK_NULL_HANDLE) {
155       VK_FROM_HANDLE(nvk_buffer_view, view, bufferView);
156 
157       assert(view->desc_index < (1 << 20));
158       desc.image_index = view->desc_index;
159    }
160    write_desc(set, binding, elem, &desc, sizeof(desc));
161 }
162 
163 static void
write_inline_uniform_data(struct nvk_descriptor_set * set,const VkWriteDescriptorSetInlineUniformBlock * info,uint32_t binding,uint32_t offset)164 write_inline_uniform_data(struct nvk_descriptor_set *set,
165                           const VkWriteDescriptorSetInlineUniformBlock *info,
166                           uint32_t binding, uint32_t offset)
167 {
168    assert(set->layout->binding[binding].stride == 1);
169    write_desc(set, binding, offset, info->pData, info->dataSize);
170 }
171 
172 VKAPI_ATTR void VKAPI_CALL
nvk_UpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)173 nvk_UpdateDescriptorSets(VkDevice device,
174                          uint32_t descriptorWriteCount,
175                          const VkWriteDescriptorSet *pDescriptorWrites,
176                          uint32_t descriptorCopyCount,
177                          const VkCopyDescriptorSet *pDescriptorCopies)
178 {
179    for (uint32_t w = 0; w < descriptorWriteCount; w++) {
180       const VkWriteDescriptorSet *write = &pDescriptorWrites[w];
181       VK_FROM_HANDLE(nvk_descriptor_set, set, write->dstSet);
182 
183       switch (write->descriptorType) {
184       case VK_DESCRIPTOR_TYPE_SAMPLER:
185       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
186       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
187       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
188       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
189          for (uint32_t j = 0; j < write->descriptorCount; j++) {
190             write_image_view_desc(set, write->pImageInfo + j,
191                                   write->dstBinding,
192                                   write->dstArrayElement + j,
193                                   write->descriptorType);
194          }
195          break;
196 
197       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
198       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
199          for (uint32_t j = 0; j < write->descriptorCount; j++) {
200             write_buffer_view_desc(set, write->pTexelBufferView[j],
201                                    write->dstBinding, write->dstArrayElement + j);
202          }
203          break;
204 
205       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
206       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
207          for (uint32_t j = 0; j < write->descriptorCount; j++) {
208             write_buffer_desc(set, write->pBufferInfo + j, write->dstBinding,
209                               write->dstArrayElement + j);
210          }
211          break;
212 
213       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
214       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
215          for (uint32_t j = 0; j < write->descriptorCount; j++) {
216             write_dynamic_buffer_desc(set, write->pBufferInfo + j,
217                                       write->dstBinding,
218                                       write->dstArrayElement + j);
219          }
220          break;
221 
222       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
223          const VkWriteDescriptorSetInlineUniformBlock *write_inline =
224             vk_find_struct_const(write->pNext,
225                                  WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
226          assert(write_inline->dataSize == write->descriptorCount);
227          write_inline_uniform_data(set, write_inline, write->dstBinding,
228                                    write->dstArrayElement);
229          break;
230       }
231 
232       default:
233          break;
234       }
235    }
236 
237    for (uint32_t i = 0; i < descriptorCopyCount; i++) {
238       const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
239       VK_FROM_HANDLE(nvk_descriptor_set, src, copy->srcSet);
240       VK_FROM_HANDLE(nvk_descriptor_set, dst, copy->dstSet);
241 
242       const struct nvk_descriptor_set_binding_layout *src_binding_layout =
243          &src->layout->binding[copy->srcBinding];
244       const struct nvk_descriptor_set_binding_layout *dst_binding_layout =
245          &dst->layout->binding[copy->dstBinding];
246 
247       if (dst_binding_layout->stride > 0 && src_binding_layout->stride > 0) {
248          for (uint32_t j = 0; j < copy->descriptorCount; j++) {
249             ASSERTED uint32_t dst_max_size, src_max_size;
250             void *dst_map = desc_ubo_data(dst, copy->dstBinding,
251                                           copy->dstArrayElement + j,
252                                           &dst_max_size);
253             const void *src_map = desc_ubo_data(src, copy->srcBinding,
254                                                 copy->srcArrayElement + j,
255                                                 &src_max_size);
256             const uint32_t copy_size = MIN2(dst_binding_layout->stride,
257                                             src_binding_layout->stride);
258             assert(copy_size <= dst_max_size && copy_size <= src_max_size);
259             memcpy(dst_map, src_map, copy_size);
260          }
261       }
262 
263       switch (src_binding_layout->type) {
264       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
265       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
266          const uint32_t dst_dyn_start =
267             dst_binding_layout->dynamic_buffer_index + copy->dstArrayElement;
268          const uint32_t src_dyn_start =
269             src_binding_layout->dynamic_buffer_index + copy->srcArrayElement;
270          typed_memcpy(&dst->dynamic_buffers[dst_dyn_start],
271                       &src->dynamic_buffers[src_dyn_start],
272                       copy->descriptorCount);
273          break;
274       }
275       default:
276          break;
277       }
278    }
279 }
280 
281 void
nvk_push_descriptor_set_update(struct nvk_push_descriptor_set * push_set,struct nvk_descriptor_set_layout * layout,uint32_t write_count,const VkWriteDescriptorSet * writes)282 nvk_push_descriptor_set_update(struct nvk_push_descriptor_set *push_set,
283                                struct nvk_descriptor_set_layout *layout,
284                                uint32_t write_count,
285                                const VkWriteDescriptorSet *writes)
286 {
287    assert(layout->non_variable_descriptor_buffer_size < sizeof(push_set->data));
288    struct nvk_descriptor_set set = {
289       .layout = layout,
290       .size = sizeof(push_set->data),
291       .mapped_ptr = push_set->data,
292    };
293 
294    for (uint32_t w = 0; w < write_count; w++) {
295       const VkWriteDescriptorSet *write = &writes[w];
296       assert(write->dstSet == VK_NULL_HANDLE);
297 
298       switch (write->descriptorType) {
299       case VK_DESCRIPTOR_TYPE_SAMPLER:
300       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
301       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
302       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
303       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
304          for (uint32_t j = 0; j < write->descriptorCount; j++) {
305             write_image_view_desc(&set, write->pImageInfo + j,
306                                   write->dstBinding,
307                                   write->dstArrayElement + j,
308                                   write->descriptorType);
309          }
310          break;
311 
312       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
313       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
314          for (uint32_t j = 0; j < write->descriptorCount; j++) {
315             write_buffer_view_desc(&set, write->pTexelBufferView[j],
316                                    write->dstBinding, write->dstArrayElement + j);
317          }
318          break;
319 
320       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
321       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
322          for (uint32_t j = 0; j < write->descriptorCount; j++) {
323             write_buffer_desc(&set, write->pBufferInfo + j, write->dstBinding,
324                               write->dstArrayElement + j);
325          }
326          break;
327 
328       default:
329          break;
330       }
331    }
332 }
333 
334 static void
nvk_descriptor_set_destroy(struct nvk_device * dev,struct nvk_descriptor_pool * pool,struct nvk_descriptor_set * set,bool free_bo)335 nvk_descriptor_set_destroy(struct nvk_device *dev,
336                            struct nvk_descriptor_pool *pool,
337                            struct nvk_descriptor_set *set, bool free_bo)
338 {
339    if (free_bo) {
340       for (int i = 0; i < pool->entry_count; ++i) {
341          if (pool->entries[i].set == set) {
342             memmove(&pool->entries[i], &pool->entries[i + 1],
343                     sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
344             --pool->entry_count;
345             break;
346          }
347       }
348 
349       if (pool->entry_count == 0)
350          pool->current_offset = 0;
351    }
352 
353    vk_descriptor_set_layout_unref(&dev->vk, &set->layout->vk);
354 
355    vk_object_free(&dev->vk, NULL, set);
356 }
357 
358 static void
nvk_destroy_descriptor_pool(struct nvk_device * dev,const VkAllocationCallbacks * pAllocator,struct nvk_descriptor_pool * pool)359 nvk_destroy_descriptor_pool(struct nvk_device *dev,
360                             const VkAllocationCallbacks *pAllocator,
361                             struct nvk_descriptor_pool *pool)
362 {
363    for (int i = 0; i < pool->entry_count; ++i) {
364       nvk_descriptor_set_destroy(dev, pool, pool->entries[i].set, false);
365    }
366 
367    if (pool->bo) {
368       nouveau_ws_bo_unmap(pool->bo, pool->mapped_ptr);
369       nouveau_ws_bo_destroy(pool->bo);
370    }
371 
372    vk_object_free(&dev->vk, pAllocator, pool);
373 }
374 
375 VKAPI_ATTR VkResult VKAPI_CALL
nvk_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)376 nvk_CreateDescriptorPool(VkDevice _device,
377                          const VkDescriptorPoolCreateInfo *pCreateInfo,
378                          const VkAllocationCallbacks *pAllocator,
379                          VkDescriptorPool *pDescriptorPool)
380 {
381    VK_FROM_HANDLE(nvk_device, dev, _device);
382    struct nvk_physical_device *pdev = nvk_device_physical(dev);
383    struct nvk_descriptor_pool *pool;
384    uint64_t size = sizeof(struct nvk_descriptor_pool);
385    uint64_t bo_size = 0;
386 
387    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
388       vk_find_struct_const(pCreateInfo->pNext,
389                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
390 
391    uint32_t max_align = 0;
392    for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
393       const VkMutableDescriptorTypeListEXT *type_list = NULL;
394       if (pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT &&
395           mutable_info && i < mutable_info->mutableDescriptorTypeListCount)
396             type_list = &mutable_info->pMutableDescriptorTypeLists[i];
397 
398       uint32_t stride, alignment;
399       nvk_descriptor_stride_align_for_type(pdev,
400                                            pCreateInfo->pPoolSizes[i].type,
401                                            type_list, &stride, &alignment);
402       max_align = MAX2(max_align, alignment);
403    }
404 
405    for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
406       const VkMutableDescriptorTypeListEXT *type_list = NULL;
407       if (pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT &&
408           mutable_info && i < mutable_info->mutableDescriptorTypeListCount)
409             type_list = &mutable_info->pMutableDescriptorTypeLists[i];
410 
411       uint32_t stride, alignment;
412       nvk_descriptor_stride_align_for_type(pdev,
413                                            pCreateInfo->pPoolSizes[i].type,
414                                            type_list, &stride, &alignment);
415       bo_size += MAX2(stride, max_align) *
416                  pCreateInfo->pPoolSizes[i].descriptorCount;
417    }
418 
419    /* Individual descriptor sets are aligned to the min UBO alignment to
420     * ensure that we don't end up with unaligned data access in any shaders.
421     * This means that each descriptor buffer allocated may burn up to 16B of
422     * extra space to get the right alignment.  (Technically, it's at most 28B
423     * because we're always going to start at least 4B aligned but we're being
424     * conservative here.)  Allocate enough extra space that we can chop it
425     * into maxSets pieces and align each one of them to 32B.
426     */
427    bo_size += nvk_min_cbuf_alignment(&pdev->info) * pCreateInfo->maxSets;
428 
429    uint64_t entries_size = sizeof(struct nvk_descriptor_pool_entry) *
430                            pCreateInfo->maxSets;
431    size += entries_size;
432 
433    pool = vk_object_zalloc(&dev->vk, pAllocator, size,
434                            VK_OBJECT_TYPE_DESCRIPTOR_POOL);
435    if (!pool)
436       return vk_error(dev, VK_ERROR_OUT_OF_HOST_MEMORY);
437 
438    if (bo_size) {
439       uint32_t flags = NOUVEAU_WS_BO_GART | NOUVEAU_WS_BO_NO_SHARE;
440       pool->bo = nouveau_ws_bo_new_mapped(dev->ws_dev, bo_size, 0, flags,
441                                           NOUVEAU_WS_BO_WR,
442                                           (void **)&pool->mapped_ptr);
443       if (!pool->bo) {
444          nvk_destroy_descriptor_pool(dev, pAllocator, pool);
445          return vk_error(dev, VK_ERROR_OUT_OF_DEVICE_MEMORY);
446       }
447    }
448 
449    pool->size = bo_size;
450    pool->max_entry_count = pCreateInfo->maxSets;
451 
452    *pDescriptorPool = nvk_descriptor_pool_to_handle(pool);
453    return VK_SUCCESS;
454 }
455 
456 static VkResult
nvk_descriptor_set_create(struct nvk_device * dev,struct nvk_descriptor_pool * pool,struct nvk_descriptor_set_layout * layout,uint32_t variable_count,struct nvk_descriptor_set ** out_set)457 nvk_descriptor_set_create(struct nvk_device *dev,
458                           struct nvk_descriptor_pool *pool,
459                           struct nvk_descriptor_set_layout *layout,
460                           uint32_t variable_count,
461                           struct nvk_descriptor_set **out_set)
462 {
463    struct nvk_physical_device *pdev = nvk_device_physical(dev);
464    struct nvk_descriptor_set *set;
465 
466    uint32_t mem_size = sizeof(struct nvk_descriptor_set) +
467       layout->dynamic_buffer_count * sizeof(struct nvk_buffer_address);
468 
469    set = vk_object_zalloc(&dev->vk, NULL, mem_size,
470                           VK_OBJECT_TYPE_DESCRIPTOR_SET);
471    if (!set)
472       return vk_error(dev, VK_ERROR_OUT_OF_HOST_MEMORY);
473 
474    if (pool->entry_count == pool->max_entry_count)
475       return VK_ERROR_OUT_OF_POOL_MEMORY;
476 
477    set->size = layout->non_variable_descriptor_buffer_size;
478 
479    if (layout->binding_count > 0 &&
480        (layout->binding[layout->binding_count - 1].flags &
481         VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)) {
482       uint32_t stride = layout->binding[layout->binding_count-1].stride;
483       set->size += stride * variable_count;
484    }
485 
486    set->size = align64(set->size, nvk_min_cbuf_alignment(&pdev->info));
487 
488    if (set->size > 0) {
489       if (pool->current_offset + set->size > pool->size)
490          return VK_ERROR_OUT_OF_POOL_MEMORY;
491 
492       set->mapped_ptr = (uint32_t *)(pool->mapped_ptr + pool->current_offset);
493       set->addr = pool->bo->offset + pool->current_offset;
494    }
495 
496    assert(pool->current_offset % nvk_min_cbuf_alignment(&pdev->info) == 0);
497    pool->entries[pool->entry_count].offset = pool->current_offset;
498    pool->entries[pool->entry_count].size = set->size;
499    pool->entries[pool->entry_count].set = set;
500    pool->current_offset += set->size;
501    pool->entry_count++;
502 
503    vk_descriptor_set_layout_ref(&layout->vk);
504    set->layout = layout;
505 
506    for (uint32_t b = 0; b < layout->binding_count; b++) {
507       if (layout->binding[b].type != VK_DESCRIPTOR_TYPE_SAMPLER &&
508           layout->binding[b].type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
509          continue;
510 
511       if (layout->binding[b].immutable_samplers == NULL)
512          continue;
513 
514       uint32_t array_size = layout->binding[b].array_size;
515       if (layout->binding[b].flags &
516           VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
517          array_size = variable_count;
518 
519       for (uint32_t j = 0; j < array_size; j++)
520          write_image_view_desc(set, NULL, b, j, layout->binding[b].type);
521    }
522 
523    *out_set = set;
524 
525    return VK_SUCCESS;
526 }
527 
528 VKAPI_ATTR VkResult VKAPI_CALL
nvk_AllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)529 nvk_AllocateDescriptorSets(VkDevice device,
530                            const VkDescriptorSetAllocateInfo *pAllocateInfo,
531                            VkDescriptorSet *pDescriptorSets)
532 {
533    VK_FROM_HANDLE(nvk_device, dev, device);
534    VK_FROM_HANDLE(nvk_descriptor_pool, pool, pAllocateInfo->descriptorPool);
535 
536    VkResult result = VK_SUCCESS;
537    uint32_t i;
538 
539    struct nvk_descriptor_set *set = NULL;
540 
541    const VkDescriptorSetVariableDescriptorCountAllocateInfo *var_desc_count =
542       vk_find_struct_const(pAllocateInfo->pNext,
543                            DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
544 
545    /* allocate a set of buffers for each shader to contain descriptors */
546    for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
547       VK_FROM_HANDLE(nvk_descriptor_set_layout, layout,
548                      pAllocateInfo->pSetLayouts[i]);
549       /* If descriptorSetCount is zero or this structure is not included in
550        * the pNext chain, then the variable lengths are considered to be zero.
551        */
552       const uint32_t variable_count =
553          var_desc_count && var_desc_count->descriptorSetCount > 0 ?
554          var_desc_count->pDescriptorCounts[i] : 0;
555 
556       result = nvk_descriptor_set_create(dev, pool, layout,
557                                          variable_count, &set);
558       if (result != VK_SUCCESS)
559          break;
560 
561       pDescriptorSets[i] = nvk_descriptor_set_to_handle(set);
562    }
563 
564    if (result != VK_SUCCESS) {
565       nvk_FreeDescriptorSets(device, pAllocateInfo->descriptorPool, i, pDescriptorSets);
566       for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
567          pDescriptorSets[i] = VK_NULL_HANDLE;
568       }
569    }
570    return result;
571 }
572 
573 VKAPI_ATTR VkResult VKAPI_CALL
nvk_FreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)574 nvk_FreeDescriptorSets(VkDevice device,
575                        VkDescriptorPool descriptorPool,
576                        uint32_t descriptorSetCount,
577                        const VkDescriptorSet *pDescriptorSets)
578 {
579    VK_FROM_HANDLE(nvk_device, dev, device);
580    VK_FROM_HANDLE(nvk_descriptor_pool, pool, descriptorPool);
581 
582    for (uint32_t i = 0; i < descriptorSetCount; i++) {
583       VK_FROM_HANDLE(nvk_descriptor_set, set, pDescriptorSets[i]);
584 
585       if (set)
586          nvk_descriptor_set_destroy(dev, pool, set, true);
587    }
588    return VK_SUCCESS;
589 }
590 
591 VKAPI_ATTR void VKAPI_CALL
nvk_DestroyDescriptorPool(VkDevice device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)592 nvk_DestroyDescriptorPool(VkDevice device,
593                           VkDescriptorPool _pool,
594                           const VkAllocationCallbacks *pAllocator)
595 {
596    VK_FROM_HANDLE(nvk_device, dev, device);
597    VK_FROM_HANDLE(nvk_descriptor_pool, pool, _pool);
598 
599    if (!_pool)
600       return;
601 
602    nvk_destroy_descriptor_pool(dev, pAllocator, pool);
603 }
604 
605 VKAPI_ATTR VkResult VKAPI_CALL
nvk_ResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)606 nvk_ResetDescriptorPool(VkDevice device,
607                         VkDescriptorPool descriptorPool,
608                         VkDescriptorPoolResetFlags flags)
609 {
610    VK_FROM_HANDLE(nvk_device, dev, device);
611    VK_FROM_HANDLE(nvk_descriptor_pool, pool, descriptorPool);
612 
613    for (int i = 0; i < pool->entry_count; ++i) {
614       nvk_descriptor_set_destroy(dev, pool, pool->entries[i].set, false);
615    }
616    pool->entry_count = 0;
617    pool->current_offset = 0;
618 
619    return VK_SUCCESS;
620 }
621 
622 static void
nvk_descriptor_set_write_template(struct nvk_descriptor_set * set,const struct vk_descriptor_update_template * template,const void * data)623 nvk_descriptor_set_write_template(struct nvk_descriptor_set *set,
624                                   const struct vk_descriptor_update_template *template,
625                                   const void *data)
626 {
627    for (uint32_t i = 0; i < template->entry_count; i++) {
628       const struct vk_descriptor_template_entry *entry =
629          &template->entries[i];
630 
631       switch (entry->type) {
632       case VK_DESCRIPTOR_TYPE_SAMPLER:
633       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
634       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
635       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
636       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
637          for (uint32_t j = 0; j < entry->array_count; j++) {
638             const VkDescriptorImageInfo *info =
639                data + entry->offset + j * entry->stride;
640 
641             write_image_view_desc(set, info,
642                                   entry->binding,
643                                   entry->array_element + j,
644                                   entry->type);
645          }
646          break;
647 
648       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
649       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
650          for (uint32_t j = 0; j < entry->array_count; j++) {
651             const VkBufferView *bview =
652                data + entry->offset + j * entry->stride;
653 
654             write_buffer_view_desc(set, *bview,
655                                    entry->binding,
656                                    entry->array_element + j);
657          }
658          break;
659 
660       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
661       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
662          for (uint32_t j = 0; j < entry->array_count; j++) {
663             const VkDescriptorBufferInfo *info =
664                data + entry->offset + j * entry->stride;
665 
666             write_buffer_desc(set, info,
667                               entry->binding,
668                               entry->array_element + j);
669          }
670          break;
671 
672       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
673       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
674          for (uint32_t j = 0; j < entry->array_count; j++) {
675             const VkDescriptorBufferInfo *info =
676                data + entry->offset + j * entry->stride;
677 
678             write_dynamic_buffer_desc(set, info,
679                                       entry->binding,
680                                       entry->array_element + j);
681          }
682          break;
683 
684       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
685          write_desc(set,
686                     entry->binding,
687                     entry->array_element,
688                     data + entry->offset,
689                     entry->array_count);
690          break;
691 
692       default:
693          break;
694       }
695    }
696 }
697 
698 VKAPI_ATTR void VKAPI_CALL
nvk_UpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)699 nvk_UpdateDescriptorSetWithTemplate(VkDevice device,
700                                     VkDescriptorSet descriptorSet,
701                                     VkDescriptorUpdateTemplate descriptorUpdateTemplate,
702                                     const void *pData)
703 {
704    VK_FROM_HANDLE(nvk_descriptor_set, set, descriptorSet);
705    VK_FROM_HANDLE(vk_descriptor_update_template, template,
706                   descriptorUpdateTemplate);
707 
708    nvk_descriptor_set_write_template(set, template, pData);
709 }
710 
711 void
nvk_push_descriptor_set_update_template(struct nvk_push_descriptor_set * push_set,struct nvk_descriptor_set_layout * layout,const struct vk_descriptor_update_template * template,const void * data)712 nvk_push_descriptor_set_update_template(
713    struct nvk_push_descriptor_set *push_set,
714    struct nvk_descriptor_set_layout *layout,
715    const struct vk_descriptor_update_template *template,
716    const void *data)
717 {
718    struct nvk_descriptor_set tmp_set = {
719       .layout = layout,
720       .size = sizeof(push_set->data),
721       .mapped_ptr = push_set->data,
722    };
723    nvk_descriptor_set_write_template(&tmp_set, template, data);
724 }
725