1 /*
2 * Copyright © 2024 Collabora Ltd.
3 * SPDX-License-Identifier: MIT
4 */
5
6 #include <assert.h>
7 #include <fcntl.h>
8 #include <stdbool.h>
9 #include <string.h>
10 #include <unistd.h>
11
12 #include "util/mesa-sha1.h"
13 #include "vk_alloc.h"
14 #include "vk_descriptor_update_template.h"
15 #include "vk_descriptors.h"
16 #include "vk_format.h"
17 #include "vk_log.h"
18 #include "vk_util.h"
19
20 #include "util/bitset.h"
21
22 #include "genxml/gen_macros.h"
23
24 #include "panvk_buffer.h"
25 #include "panvk_buffer_view.h"
26 #include "panvk_descriptor_set.h"
27 #include "panvk_descriptor_set_layout.h"
28 #include "panvk_device.h"
29 #include "panvk_entrypoints.h"
30 #include "panvk_image.h"
31 #include "panvk_image_view.h"
32 #include "panvk_macros.h"
33 #include "panvk_priv_bo.h"
34 #include "panvk_sampler.h"
35
36 static void *
get_desc_slot_ptr(struct panvk_descriptor_set * set,uint32_t binding,uint32_t elem,VkDescriptorType type)37 get_desc_slot_ptr(struct panvk_descriptor_set *set, uint32_t binding,
38 uint32_t elem, VkDescriptorType type)
39 {
40 const struct panvk_descriptor_set_binding_layout *binding_layout =
41 &set->layout->bindings[binding];
42
43 uint32_t offset = panvk_get_desc_index(binding_layout, elem, type);
44
45 assert(offset < set->layout->desc_count);
46
47 return (char *)set->descs.host + offset * PANVK_DESCRIPTOR_SIZE;
48 }
49
50 #define write_desc(set, binding, elem, desc, type) \
51 do { \
52 static_assert(sizeof(*(desc)) == PANVK_DESCRIPTOR_SIZE, \
53 "wrong descriptor size"); \
54 void *__dst = get_desc_slot_ptr(set, binding, elem, type); \
55 memcpy(__dst, (desc), PANVK_DESCRIPTOR_SIZE); \
56 } while (0)
57
58 static void
write_sampler_desc(struct panvk_descriptor_set * set,const VkDescriptorImageInfo * const pImageInfo,uint32_t binding,uint32_t elem,bool write_immutable)59 write_sampler_desc(struct panvk_descriptor_set *set,
60 const VkDescriptorImageInfo *const pImageInfo,
61 uint32_t binding, uint32_t elem, bool write_immutable)
62 {
63 const struct panvk_descriptor_set_binding_layout *binding_layout =
64 &set->layout->bindings[binding];
65
66 if (binding_layout->immutable_samplers && !write_immutable)
67 return;
68
69 const struct mali_sampler_packed *sampler_desc;
70
71 if (binding_layout->immutable_samplers) {
72 sampler_desc = &binding_layout->immutable_samplers[elem];
73 } else {
74 struct panvk_sampler *sampler = panvk_sampler_from_handle(
75 pImageInfo ? pImageInfo->sampler : VK_NULL_HANDLE);
76
77 sampler_desc = sampler ? &sampler->desc : NULL;
78 }
79
80 if (sampler_desc)
81 write_desc(set, binding, elem, sampler_desc, VK_DESCRIPTOR_TYPE_SAMPLER);
82 }
83
84 static void
write_image_view_desc(struct panvk_descriptor_set * set,const VkDescriptorImageInfo * const pImageInfo,uint32_t binding,uint32_t elem,VkDescriptorType type)85 write_image_view_desc(struct panvk_descriptor_set *set,
86 const VkDescriptorImageInfo *const pImageInfo,
87 uint32_t binding, uint32_t elem, VkDescriptorType type)
88 {
89 if (pImageInfo && pImageInfo->imageView != VK_NULL_HANDLE) {
90 VK_FROM_HANDLE(panvk_image_view, view, pImageInfo->imageView);
91
92 #if PAN_ARCH <= 7
93 if (type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
94 write_desc(set, binding, elem, &view->descs.img_attrib_buf, type);
95 else
96 write_desc(set, binding, elem, &view->descs.tex, type);
97 #else
98 write_desc(set, binding, elem, &view->descs.tex, type);
99 #endif
100 }
101 }
102
103 static void
write_buffer_desc(struct panvk_descriptor_set * set,const VkDescriptorBufferInfo * const info,uint32_t binding,uint32_t elem,VkDescriptorType type)104 write_buffer_desc(struct panvk_descriptor_set *set,
105 const VkDescriptorBufferInfo *const info, uint32_t binding,
106 uint32_t elem, VkDescriptorType type)
107 {
108 VK_FROM_HANDLE(panvk_buffer, buffer, info->buffer);
109 const uint64_t range = panvk_buffer_range(buffer, info->offset, info->range);
110 assert(range <= UINT32_MAX);
111
112 #if PAN_ARCH <= 7
113 if (type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) {
114 struct panvk_ssbo_addr desc = {
115 .base_addr = panvk_buffer_gpu_ptr(buffer, info->offset),
116 .size = range,
117 };
118
119 write_desc(set, binding, elem, &desc, type);
120 } else {
121 struct {
122 struct mali_uniform_buffer_packed ubo;
123 uint32_t pad[6];
124 } padded_desc = {0};
125
126 pan_pack(&padded_desc.ubo, UNIFORM_BUFFER, cfg) {
127 cfg.pointer = panvk_buffer_gpu_ptr(buffer, info->offset);
128 cfg.entries = DIV_ROUND_UP(range, 16);
129 }
130
131 write_desc(set, binding, elem, &padded_desc, type);
132 }
133 #else
134 struct mali_buffer_packed desc;
135
136 pan_pack(&desc, BUFFER, cfg) {
137 cfg.address = panvk_buffer_gpu_ptr(buffer, info->offset);
138 cfg.size = range;
139 }
140 write_desc(set, binding, elem, &desc, type);
141 #endif
142 }
143
144 static void
write_dynamic_buffer_desc(struct panvk_descriptor_set * set,const VkDescriptorBufferInfo * const info,uint32_t binding,uint32_t elem)145 write_dynamic_buffer_desc(struct panvk_descriptor_set *set,
146 const VkDescriptorBufferInfo *const info,
147 uint32_t binding, uint32_t elem)
148 {
149 VK_FROM_HANDLE(panvk_buffer, buffer, info->buffer);
150 const struct panvk_descriptor_set_binding_layout *binding_layout =
151 &set->layout->bindings[binding];
152 uint32_t dyn_buf_idx = binding_layout->desc_idx + elem;
153 const uint64_t range = panvk_buffer_range(buffer, info->offset, info->range);
154
155 assert(range <= UINT32_MAX);
156 assert(dyn_buf_idx < ARRAY_SIZE(set->dyn_bufs));
157
158 set->dyn_bufs[dyn_buf_idx].dev_addr =
159 panvk_buffer_gpu_ptr(buffer, info->offset);
160 set->dyn_bufs[dyn_buf_idx].size = range;
161 }
162
163 static void
write_buffer_view_desc(struct panvk_descriptor_set * set,const VkBufferView bufferView,uint32_t binding,uint32_t elem,VkDescriptorType type)164 write_buffer_view_desc(struct panvk_descriptor_set *set,
165 const VkBufferView bufferView, uint32_t binding,
166 uint32_t elem, VkDescriptorType type)
167 {
168 if (bufferView != VK_NULL_HANDLE) {
169 VK_FROM_HANDLE(panvk_buffer_view, view, bufferView);
170
171 #if PAN_ARCH <= 7
172 if (type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
173 write_desc(set, binding, elem, &view->descs.img_attrib_buf, type);
174 else
175 write_desc(set, binding, elem, &view->descs.tex, type);
176 #else
177 write_desc(set, binding, elem, &view->descs.tex, type);
178 #endif
179 }
180 }
181
182 static void
panvk_desc_pool_free_set(struct panvk_descriptor_pool * pool,struct panvk_descriptor_set * set)183 panvk_desc_pool_free_set(struct panvk_descriptor_pool *pool,
184 struct panvk_descriptor_set *set)
185 {
186 uintptr_t set_idx = set - pool->sets;
187 assert(set_idx < pool->max_sets);
188
189 if (!BITSET_TEST(pool->free_sets, set_idx)) {
190 if (set->desc_count)
191 util_vma_heap_free(&pool->desc_heap, set->descs.dev,
192 set->desc_count * PANVK_DESCRIPTOR_SIZE);
193
194 BITSET_SET(pool->free_sets, set_idx);
195
196 /* Discard constness to call vk_descriptor_set_layout_unref(). */
197 struct panvk_descriptor_set_layout *set_layout =
198 (struct panvk_descriptor_set_layout *)set->layout;
199
200 vk_descriptor_set_layout_unref(pool->base.device, &set_layout->vk);
201 vk_object_base_finish(&set->base);
202 memset(set, 0, sizeof(*set));
203 }
204 }
205
206 static void
panvk_destroy_descriptor_pool(struct panvk_device * device,const VkAllocationCallbacks * pAllocator,struct panvk_descriptor_pool * pool)207 panvk_destroy_descriptor_pool(struct panvk_device *device,
208 const VkAllocationCallbacks *pAllocator,
209 struct panvk_descriptor_pool *pool)
210 {
211 for (uint32_t i = 0; i < pool->max_sets; i++)
212 panvk_desc_pool_free_set(pool, &pool->sets[i]);
213
214 if (pool->desc_bo) {
215 util_vma_heap_finish(&pool->desc_heap);
216 panvk_priv_bo_unref(pool->desc_bo);
217 }
218
219 vk_object_free(&device->vk, pAllocator, pool);
220 }
221
222 VkResult
panvk_per_arch(CreateDescriptorPool)223 panvk_per_arch(CreateDescriptorPool)(
224 VkDevice _device, const VkDescriptorPoolCreateInfo *pCreateInfo,
225 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool)
226 {
227 VK_FROM_HANDLE(panvk_device, device, _device);
228
229 VK_MULTIALLOC(ma);
230 VK_MULTIALLOC_DECL(&ma, struct panvk_descriptor_pool, pool, 1);
231 VK_MULTIALLOC_DECL(&ma, BITSET_WORD, free_sets,
232 BITSET_WORDS(pCreateInfo->maxSets));
233 VK_MULTIALLOC_DECL(&ma, struct panvk_descriptor_set, sets,
234 pCreateInfo->maxSets);
235
236 if (!vk_object_multizalloc(&device->vk, &ma, pAllocator,
237 VK_OBJECT_TYPE_DESCRIPTOR_POOL))
238 return panvk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
239
240 uint32_t desc_count = 0;
241 for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
242 if (!vk_descriptor_type_is_dynamic(pCreateInfo->pPoolSizes[i].type))
243 desc_count += panvk_get_desc_stride(pCreateInfo->pPoolSizes[i].type) *
244 pCreateInfo->pPoolSizes[i].descriptorCount;
245 }
246
247 /* initialize to all ones to indicate all sets are free */
248 BITSET_SET_RANGE(free_sets, 0, pCreateInfo->maxSets - 1);
249 pool->free_sets = free_sets;
250 pool->sets = sets;
251 pool->max_sets = pCreateInfo->maxSets;
252
253 if (desc_count) {
254 /* adjust desc_count to account for 1 dummy sampler per descriptor set */
255 desc_count += pool->max_sets;
256
257 uint64_t pool_size = desc_count * PANVK_DESCRIPTOR_SIZE;
258 VkResult result = panvk_priv_bo_create(device, pool_size, 0,
259 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
260 &pool->desc_bo);
261 if (result != VK_SUCCESS) {
262 panvk_destroy_descriptor_pool(device, pAllocator, pool);
263 return result;
264 }
265 uint64_t bo_size = pool->desc_bo->bo->size;
266 assert(pool_size <= bo_size);
267 util_vma_heap_init(&pool->desc_heap, pool->desc_bo->addr.dev, bo_size);
268 }
269
270 *pDescriptorPool = panvk_descriptor_pool_to_handle(pool);
271 return VK_SUCCESS;
272 }
273
274 void
panvk_per_arch(DestroyDescriptorPool)275 panvk_per_arch(DestroyDescriptorPool)(VkDevice _device, VkDescriptorPool _pool,
276 const VkAllocationCallbacks *pAllocator)
277 {
278 VK_FROM_HANDLE(panvk_device, device, _device);
279 VK_FROM_HANDLE(panvk_descriptor_pool, pool, _pool);
280
281 if (pool)
282 panvk_destroy_descriptor_pool(device, pAllocator, pool);
283 }
284
285 static void
desc_set_write_immutable_samplers(struct panvk_descriptor_set * set,uint32_t variable_count)286 desc_set_write_immutable_samplers(struct panvk_descriptor_set *set,
287 uint32_t variable_count)
288 {
289 const struct panvk_descriptor_set_layout *layout = set->layout;
290
291 for (uint32_t b = 0; b < layout->binding_count; b++) {
292 if (layout->bindings[b].type != VK_DESCRIPTOR_TYPE_SAMPLER &&
293 layout->bindings[b].type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
294 continue;
295
296 if (layout->bindings[b].immutable_samplers == NULL)
297 continue;
298
299 uint32_t array_size = layout->bindings[b].desc_count;
300
301 if (layout->bindings[b].flags &
302 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)
303 array_size = variable_count;
304
305 for (uint32_t j = 0; j < array_size; j++) {
306 write_desc(set, b, j, &layout->bindings[b].immutable_samplers[j],
307 VK_DESCRIPTOR_TYPE_SAMPLER);
308 }
309 }
310 }
311
312 static VkResult
panvk_desc_pool_allocate_set(struct panvk_descriptor_pool * pool,struct panvk_descriptor_set_layout * layout,uint32_t variable_count,struct panvk_descriptor_set ** out)313 panvk_desc_pool_allocate_set(struct panvk_descriptor_pool *pool,
314 struct panvk_descriptor_set_layout *layout,
315 uint32_t variable_count,
316 struct panvk_descriptor_set **out)
317 {
318 uint32_t num_descs = layout->desc_count;
319
320 if (layout->binding_count) {
321 uint32_t last_binding = layout->binding_count - 1;
322
323 if ((layout->bindings[last_binding].flags &
324 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT) &&
325 !vk_descriptor_type_is_dynamic(layout->bindings[last_binding].type)) {
326 uint32_t desc_stride =
327 panvk_get_desc_stride(layout->bindings[last_binding].type);
328
329 num_descs -= layout->bindings[last_binding].desc_count * desc_stride;
330 num_descs += variable_count * desc_stride;
331 }
332 }
333
334 uint64_t descs_size = num_descs * PANVK_DESCRIPTOR_SIZE;
335 uint32_t first_free_set =
336 __bitset_ffs(pool->free_sets, BITSET_WORDS(pool->max_sets));
337 if (first_free_set == 0 || pool->desc_heap.free_size < descs_size)
338 return panvk_error(pool, VK_ERROR_OUT_OF_POOL_MEMORY);
339
340 uint64_t descs_dev_addr = 0;
341 if (num_descs) {
342 descs_dev_addr = util_vma_heap_alloc(&pool->desc_heap, descs_size,
343 PANVK_DESCRIPTOR_SIZE);
344 if (!descs_dev_addr)
345 return panvk_error(pool, VK_ERROR_FRAGMENTED_POOL);
346 }
347 struct panvk_descriptor_set *set = &pool->sets[first_free_set - 1];
348
349 vk_object_base_init(pool->base.device, &set->base,
350 VK_OBJECT_TYPE_DESCRIPTOR_SET);
351 vk_descriptor_set_layout_ref(&layout->vk);
352 set->layout = layout;
353 set->desc_count = num_descs;
354 if (pool->desc_bo) {
355 set->descs.dev = descs_dev_addr;
356 set->descs.host =
357 pool->desc_bo->addr.host + set->descs.dev - pool->desc_bo->addr.dev;
358 }
359 desc_set_write_immutable_samplers(set, variable_count);
360 BITSET_CLEAR(pool->free_sets, first_free_set - 1);
361
362 *out = set;
363 return VK_SUCCESS;
364 }
365
366 VkResult
panvk_per_arch(AllocateDescriptorSets)367 panvk_per_arch(AllocateDescriptorSets)(
368 VkDevice _device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
369 VkDescriptorSet *pDescriptorSets)
370 {
371 VK_FROM_HANDLE(panvk_descriptor_pool, pool, pAllocateInfo->descriptorPool);
372 VkResult result = VK_SUCCESS;
373 unsigned i;
374
375 struct panvk_descriptor_set *set = NULL;
376
377 const VkDescriptorSetVariableDescriptorCountAllocateInfo *var_desc_count =
378 vk_find_struct_const(
379 pAllocateInfo->pNext,
380 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
381
382 /* allocate a set of buffers for each shader to contain descriptors */
383 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
384 VK_FROM_HANDLE(panvk_descriptor_set_layout, layout,
385 pAllocateInfo->pSetLayouts[i]);
386 /* If descriptorSetCount is zero or this structure is not included in
387 * the pNext chain, then the variable lengths are considered to be zero.
388 */
389 const uint32_t variable_count =
390 var_desc_count && var_desc_count->descriptorSetCount > 0
391 ? var_desc_count->pDescriptorCounts[i]
392 : 0;
393
394 result = panvk_desc_pool_allocate_set(pool, layout, variable_count, &set);
395 if (result != VK_SUCCESS)
396 goto err_free_sets;
397
398 pDescriptorSets[i] = panvk_descriptor_set_to_handle(set);
399 }
400
401 return VK_SUCCESS;
402
403 err_free_sets:
404 panvk_per_arch(FreeDescriptorSets)(_device, pAllocateInfo->descriptorPool, i,
405 pDescriptorSets);
406 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++)
407 pDescriptorSets[i] = VK_NULL_HANDLE;
408
409 return result;
410 }
411
412 VkResult
panvk_per_arch(FreeDescriptorSets)413 panvk_per_arch(FreeDescriptorSets)(VkDevice _device,
414 VkDescriptorPool descriptorPool,
415 uint32_t descriptorSetCount,
416 const VkDescriptorSet *pDescriptorSets)
417 {
418 VK_FROM_HANDLE(panvk_descriptor_pool, pool, descriptorPool);
419
420 for (unsigned i = 0; i < descriptorSetCount; i++) {
421 VK_FROM_HANDLE(panvk_descriptor_set, set, pDescriptorSets[i]);
422
423 if (set)
424 panvk_desc_pool_free_set(pool, set);
425 }
426 return VK_SUCCESS;
427 }
428
429 VkResult
panvk_per_arch(ResetDescriptorPool)430 panvk_per_arch(ResetDescriptorPool)(VkDevice _device, VkDescriptorPool _pool,
431 VkDescriptorPoolResetFlags flags)
432 {
433 VK_FROM_HANDLE(panvk_descriptor_pool, pool, _pool);
434
435 for (uint32_t i = 0; i < pool->max_sets; i++)
436 panvk_desc_pool_free_set(pool, &pool->sets[i]);
437
438 BITSET_SET_RANGE(pool->free_sets, 0, pool->max_sets - 1);
439 return VK_SUCCESS;
440 }
441
442 VkResult
panvk_per_arch(descriptor_set_write)443 panvk_per_arch(descriptor_set_write)(struct panvk_descriptor_set *set,
444 const VkWriteDescriptorSet *write,
445 bool write_immutable_samplers)
446 {
447 switch (write->descriptorType) {
448 case VK_DESCRIPTOR_TYPE_SAMPLER:
449 for (uint32_t j = 0; j < write->descriptorCount; j++) {
450 write_sampler_desc(set, write->pImageInfo + j, write->dstBinding,
451 write->dstArrayElement + j,
452 write_immutable_samplers);
453 }
454 break;
455
456 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
457 for (uint32_t j = 0; j < write->descriptorCount; j++) {
458 write_sampler_desc(set, write->pImageInfo + j, write->dstBinding,
459 write->dstArrayElement + j,
460 write_immutable_samplers);
461 write_image_view_desc(set, write->pImageInfo + j, write->dstBinding,
462 write->dstArrayElement + j,
463 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
464 }
465 break;
466
467 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
468 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
469 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
470 for (uint32_t j = 0; j < write->descriptorCount; j++) {
471 write_image_view_desc(set, write->pImageInfo + j, write->dstBinding,
472 write->dstArrayElement + j,
473 write->descriptorType);
474 }
475 break;
476
477 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
478 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
479 for (uint32_t j = 0; j < write->descriptorCount; j++) {
480 write_buffer_view_desc(set, write->pTexelBufferView[j],
481 write->dstBinding, write->dstArrayElement + j,
482 write->descriptorType);
483 }
484 break;
485
486 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
487 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
488 for (uint32_t j = 0; j < write->descriptorCount; j++) {
489 write_buffer_desc(set, write->pBufferInfo + j, write->dstBinding,
490 write->dstArrayElement + j, write->descriptorType);
491 }
492 break;
493
494 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
495 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
496 for (uint32_t j = 0; j < write->descriptorCount; j++) {
497 write_dynamic_buffer_desc(set, write->pBufferInfo + j,
498 write->dstBinding,
499 write->dstArrayElement + j);
500 }
501 break;
502
503 default:
504 unreachable("Unsupported descriptor type");
505 }
506 return VK_SUCCESS;
507 }
508
509 static VkResult
panvk_descriptor_set_copy(const VkCopyDescriptorSet * copy)510 panvk_descriptor_set_copy(const VkCopyDescriptorSet *copy)
511 {
512 VK_FROM_HANDLE(panvk_descriptor_set, src_set, copy->srcSet);
513 VK_FROM_HANDLE(panvk_descriptor_set, dst_set, copy->dstSet);
514
515 const struct panvk_descriptor_set_binding_layout *dst_binding_layout =
516 &dst_set->layout->bindings[copy->dstBinding];
517 const struct panvk_descriptor_set_binding_layout *src_binding_layout =
518 &src_set->layout->bindings[copy->srcBinding];
519
520 assert(dst_binding_layout->type == src_binding_layout->type);
521
522 switch (src_binding_layout->type) {
523 case VK_DESCRIPTOR_TYPE_SAMPLER:
524 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
525 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
526 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
527 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
528 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
529 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
530 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
531 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
532 for (uint32_t i = 0; i < copy->descriptorCount; i++) {
533 void *dst = get_desc_slot_ptr(dst_set, copy->dstBinding,
534 copy->dstArrayElement + i,
535 dst_binding_layout->type);
536 const void *src = get_desc_slot_ptr(src_set, copy->srcBinding,
537 copy->srcArrayElement + i,
538 src_binding_layout->type);
539
540 memcpy(dst, src,
541 PANVK_DESCRIPTOR_SIZE *
542 panvk_get_desc_stride(src_binding_layout->type));
543 }
544 break;
545
546 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
547 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
548 uint32_t dst_dyn_buf_idx =
549 dst_binding_layout->desc_idx + copy->dstArrayElement;
550 uint32_t src_dyn_buf_idx =
551 src_binding_layout->desc_idx + copy->srcArrayElement;
552
553 memcpy(
554 &dst_set->dyn_bufs[dst_dyn_buf_idx],
555 &src_set->dyn_bufs[src_dyn_buf_idx],
556 copy->descriptorCount * sizeof(dst_set->dyn_bufs[dst_dyn_buf_idx]));
557 break;
558 }
559
560 default:
561 unreachable("Unsupported descriptor type");
562 }
563
564 return VK_SUCCESS;
565 }
566
567 void
panvk_per_arch(UpdateDescriptorSets)568 panvk_per_arch(UpdateDescriptorSets)(
569 VkDevice _device, uint32_t descriptorWriteCount,
570 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
571 const VkCopyDescriptorSet *pDescriptorCopies)
572 {
573 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
574 VK_FROM_HANDLE(panvk_descriptor_set, set, pDescriptorWrites[i].dstSet);
575
576 panvk_per_arch(descriptor_set_write)(set, &pDescriptorWrites[i], false);
577 }
578
579 for (uint32_t i = 0; i < descriptorCopyCount; i++)
580 panvk_descriptor_set_copy(&pDescriptorCopies[i]);
581 }
582
583 void
panvk_per_arch(descriptor_set_write_template)584 panvk_per_arch(descriptor_set_write_template)(
585 struct panvk_descriptor_set *set,
586 const struct vk_descriptor_update_template *template, const void *data,
587 bool write_immutable_samplers)
588 {
589 for (uint32_t i = 0; i < template->entry_count; i++) {
590 const struct vk_descriptor_template_entry *entry = &template->entries[i];
591
592 switch (entry->type) {
593 case VK_DESCRIPTOR_TYPE_SAMPLER:
594 for (uint32_t j = 0; j < entry->array_count; j++) {
595 const VkDescriptorImageInfo *info =
596 data + entry->offset + j * entry->stride;
597
598 write_sampler_desc(set, info, entry->binding,
599 entry->array_element + j,
600 write_immutable_samplers);
601 }
602 break;
603
604 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
605 for (uint32_t j = 0; j < entry->array_count; j++) {
606 const VkDescriptorImageInfo *info =
607 data + entry->offset + j * entry->stride;
608 write_sampler_desc(set, info, entry->binding,
609 entry->array_element + j,
610 write_immutable_samplers);
611 write_image_view_desc(set, info, entry->binding,
612 entry->array_element + j,
613 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
614 }
615 break;
616
617 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
618 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
619 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
620 for (uint32_t j = 0; j < entry->array_count; j++) {
621 const VkDescriptorImageInfo *info =
622 data + entry->offset + j * entry->stride;
623
624 write_image_view_desc(set, info, entry->binding,
625 entry->array_element + j, entry->type);
626 }
627 break;
628
629 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
630 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
631 for (uint32_t j = 0; j < entry->array_count; j++) {
632 const VkBufferView *bview =
633 data + entry->offset + j * entry->stride;
634
635 write_buffer_view_desc(set, *bview, entry->binding,
636 entry->array_element + j, entry->type);
637 }
638 break;
639
640 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
641 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
642 for (uint32_t j = 0; j < entry->array_count; j++) {
643 const VkDescriptorBufferInfo *info =
644 data + entry->offset + j * entry->stride;
645
646 write_buffer_desc(set, info, entry->binding,
647 entry->array_element + j, entry->type);
648 }
649 break;
650
651 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
652 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
653 for (uint32_t j = 0; j < entry->array_count; j++) {
654 const VkDescriptorBufferInfo *info =
655 data + entry->offset + j * entry->stride;
656
657 write_dynamic_buffer_desc(set, info, entry->binding,
658 entry->array_element + j);
659 }
660 break;
661 default:
662 unreachable("Unsupported descriptor type");
663 }
664 }
665 }
666
667 void
panvk_per_arch(UpdateDescriptorSetWithTemplate)668 panvk_per_arch(UpdateDescriptorSetWithTemplate)(
669 VkDevice _device, VkDescriptorSet descriptorSet,
670 VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData)
671 {
672 VK_FROM_HANDLE(panvk_descriptor_set, set, descriptorSet);
673 VK_FROM_HANDLE(vk_descriptor_update_template, template,
674 descriptorUpdateTemplate);
675
676 panvk_per_arch(descriptor_set_write_template)(set, template, pData, false);
677 }
678