1 /*
2 * Copyright © 2019 Raspberry Pi
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "vk_util.h"
25
26 #include "v3dv_private.h"
27
28 /*
29 * Returns how much space a given descriptor type needs on a bo (GPU
30 * memory).
31 */
32 static uint32_t
descriptor_bo_size(VkDescriptorType type)33 descriptor_bo_size(VkDescriptorType type)
34 {
35 switch(type) {
36 case VK_DESCRIPTOR_TYPE_SAMPLER:
37 return sizeof(struct v3dv_sampler_descriptor);
38 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
39 return sizeof(struct v3dv_combined_image_sampler_descriptor);
40 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
41 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
42 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
43 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
44 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
45 return sizeof(struct v3dv_sampled_image_descriptor);
46 default:
47 return 0;
48 }
49 }
50
51 /*
52 * For a given descriptor defined by the descriptor_set it belongs, its
53 * binding layout, and array_index, it returns the map region assigned to it
54 * from the descriptor pool bo.
55 */
56 static void*
descriptor_bo_map(struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,uint32_t array_index)57 descriptor_bo_map(struct v3dv_descriptor_set *set,
58 const struct v3dv_descriptor_set_binding_layout *binding_layout,
59 uint32_t array_index)
60 {
61 assert(descriptor_bo_size(binding_layout->type) > 0);
62 return set->pool->bo->map +
63 set->base_offset + binding_layout->descriptor_offset +
64 array_index * descriptor_bo_size(binding_layout->type);
65 }
66
67 static bool
descriptor_type_is_dynamic(VkDescriptorType type)68 descriptor_type_is_dynamic(VkDescriptorType type)
69 {
70 switch (type) {
71 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
72 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
73 return true;
74 break;
75 default:
76 return false;
77 }
78 }
79
80 /*
81 * Tries to get a real descriptor using a descriptor map index from the
82 * descriptor_state + pipeline_layout.
83 */
84 struct v3dv_descriptor *
v3dv_descriptor_map_get_descriptor(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index,uint32_t * dynamic_offset)85 v3dv_descriptor_map_get_descriptor(struct v3dv_descriptor_state *descriptor_state,
86 struct v3dv_descriptor_map *map,
87 struct v3dv_pipeline_layout *pipeline_layout,
88 uint32_t index,
89 uint32_t *dynamic_offset)
90 {
91 assert(index < map->num_desc);
92
93 uint32_t set_number = map->set[index];
94 assert((descriptor_state->valid & 1 << set_number));
95
96 struct v3dv_descriptor_set *set =
97 descriptor_state->descriptor_sets[set_number];
98 assert(set);
99
100 uint32_t binding_number = map->binding[index];
101 assert(binding_number < set->layout->binding_count);
102
103 const struct v3dv_descriptor_set_binding_layout *binding_layout =
104 &set->layout->binding[binding_number];
105
106 uint32_t array_index = map->array_index[index];
107 assert(array_index < binding_layout->array_size);
108
109 if (descriptor_type_is_dynamic(binding_layout->type)) {
110 uint32_t dynamic_offset_index =
111 pipeline_layout->set[set_number].dynamic_offset_start +
112 binding_layout->dynamic_offset_index + array_index;
113
114 *dynamic_offset = descriptor_state->dynamic_offsets[dynamic_offset_index];
115 }
116
117 return &set->descriptors[binding_layout->descriptor_index + array_index];
118 }
119
120 /* Equivalent to map_get_descriptor but it returns a reloc with the bo
121 * associated with that descriptor (suballocation of the descriptor pool bo)
122 *
123 * It also returns the descriptor type, so the caller could do extra
124 * validation or adding extra offsets if the bo contains more that one field.
125 */
126 static struct v3dv_cl_reloc
v3dv_descriptor_map_get_descriptor_bo(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index,VkDescriptorType * out_type)127 v3dv_descriptor_map_get_descriptor_bo(struct v3dv_descriptor_state *descriptor_state,
128 struct v3dv_descriptor_map *map,
129 struct v3dv_pipeline_layout *pipeline_layout,
130 uint32_t index,
131 VkDescriptorType *out_type)
132 {
133 assert(index >= 0 && index < map->num_desc);
134
135 uint32_t set_number = map->set[index];
136 assert(descriptor_state->valid & 1 << set_number);
137
138 struct v3dv_descriptor_set *set =
139 descriptor_state->descriptor_sets[set_number];
140 assert(set);
141
142 uint32_t binding_number = map->binding[index];
143 assert(binding_number < set->layout->binding_count);
144
145 const struct v3dv_descriptor_set_binding_layout *binding_layout =
146 &set->layout->binding[binding_number];
147
148 assert(descriptor_bo_size(binding_layout->type) > 0);
149 *out_type = binding_layout->type;
150
151 uint32_t array_index = map->array_index[index];
152 assert(array_index < binding_layout->array_size);
153
154 struct v3dv_cl_reloc reloc = {
155 .bo = set->pool->bo,
156 .offset = set->base_offset + binding_layout->descriptor_offset +
157 array_index * descriptor_bo_size(binding_layout->type),
158 };
159
160 return reloc;
161 }
162
163 /*
164 * The difference between this method and v3dv_descriptor_map_get_descriptor,
165 * is that if the sampler are added as immutable when creating the set layout,
166 * they are bound to the set layout, so not part of the descriptor per
167 * se. This method return early in that case.
168 */
169 const struct v3dv_sampler *
v3dv_descriptor_map_get_sampler(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)170 v3dv_descriptor_map_get_sampler(struct v3dv_descriptor_state *descriptor_state,
171 struct v3dv_descriptor_map *map,
172 struct v3dv_pipeline_layout *pipeline_layout,
173 uint32_t index)
174 {
175 assert(index >= 0 && index < map->num_desc);
176
177 uint32_t set_number = map->set[index];
178 assert(descriptor_state->valid & 1 << set_number);
179
180 struct v3dv_descriptor_set *set =
181 descriptor_state->descriptor_sets[set_number];
182 assert(set);
183
184 uint32_t binding_number = map->binding[index];
185 assert(binding_number < set->layout->binding_count);
186
187 const struct v3dv_descriptor_set_binding_layout *binding_layout =
188 &set->layout->binding[binding_number];
189
190 uint32_t array_index = map->array_index[index];
191 assert(array_index < binding_layout->array_size);
192
193 if (binding_layout->immutable_samplers_offset != 0) {
194 assert(binding_layout->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
195 binding_layout->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
196
197 const struct v3dv_sampler *immutable_samplers =
198 v3dv_immutable_samplers(set->layout, binding_layout);
199
200 assert(immutable_samplers);
201 const struct v3dv_sampler *sampler = &immutable_samplers[array_index];
202 assert(sampler);
203
204 return sampler;
205 }
206
207 struct v3dv_descriptor *descriptor =
208 &set->descriptors[binding_layout->descriptor_index + array_index];
209
210 assert(descriptor->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
211 descriptor->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
212
213 assert(descriptor->sampler);
214
215 return descriptor->sampler;
216 }
217
218
219 struct v3dv_cl_reloc
v3dv_descriptor_map_get_sampler_state(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)220 v3dv_descriptor_map_get_sampler_state(struct v3dv_descriptor_state *descriptor_state,
221 struct v3dv_descriptor_map *map,
222 struct v3dv_pipeline_layout *pipeline_layout,
223 uint32_t index)
224 {
225 VkDescriptorType type;
226 struct v3dv_cl_reloc reloc =
227 v3dv_descriptor_map_get_descriptor_bo(descriptor_state, map,
228 pipeline_layout,
229 index, &type);
230
231 assert(type == VK_DESCRIPTOR_TYPE_SAMPLER ||
232 type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
233
234 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
235 reloc.offset += offsetof(struct v3dv_combined_image_sampler_descriptor,
236 sampler_state);
237 }
238
239 return reloc;
240 }
241
242 const struct v3dv_format*
v3dv_descriptor_map_get_texture_format(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index,VkFormat * out_vk_format)243 v3dv_descriptor_map_get_texture_format(struct v3dv_descriptor_state *descriptor_state,
244 struct v3dv_descriptor_map *map,
245 struct v3dv_pipeline_layout *pipeline_layout,
246 uint32_t index,
247 VkFormat *out_vk_format)
248 {
249 struct v3dv_descriptor *descriptor =
250 v3dv_descriptor_map_get_descriptor(descriptor_state, map,
251 pipeline_layout, index, NULL);
252
253 switch (descriptor->type) {
254 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
255 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
256 assert(descriptor->buffer_view);
257 *out_vk_format = descriptor->buffer_view->vk_format;
258 return descriptor->buffer_view->format;
259 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
260 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
261 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
262 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
263 assert(descriptor->image_view);
264 *out_vk_format = descriptor->image_view->vk_format;
265 return descriptor->image_view->format;
266 default:
267 unreachable("descriptor type doesn't has a texture format");
268 }
269 }
270
271 struct v3dv_bo*
v3dv_descriptor_map_get_texture_bo(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)272 v3dv_descriptor_map_get_texture_bo(struct v3dv_descriptor_state *descriptor_state,
273 struct v3dv_descriptor_map *map,
274 struct v3dv_pipeline_layout *pipeline_layout,
275 uint32_t index)
276
277 {
278 struct v3dv_descriptor *descriptor =
279 v3dv_descriptor_map_get_descriptor(descriptor_state, map,
280 pipeline_layout, index, NULL);
281
282 switch (descriptor->type) {
283 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
284 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
285 assert(descriptor->buffer_view);
286 return descriptor->buffer_view->buffer->mem->bo;
287 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
288 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
289 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
290 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
291 assert(descriptor->image_view);
292 return descriptor->image_view->image->mem->bo;
293 default:
294 unreachable("descriptor type doesn't has a texture bo");
295 }
296 }
297
298 struct v3dv_cl_reloc
v3dv_descriptor_map_get_texture_shader_state(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)299 v3dv_descriptor_map_get_texture_shader_state(struct v3dv_descriptor_state *descriptor_state,
300 struct v3dv_descriptor_map *map,
301 struct v3dv_pipeline_layout *pipeline_layout,
302 uint32_t index)
303 {
304 VkDescriptorType type;
305 struct v3dv_cl_reloc reloc =
306 v3dv_descriptor_map_get_descriptor_bo(descriptor_state, map,
307 pipeline_layout,
308 index, &type);
309
310 assert(type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
311 type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
312 type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ||
313 type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
314 type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ||
315 type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
316
317 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
318 reloc.offset += offsetof(struct v3dv_combined_image_sampler_descriptor,
319 texture_state);
320 }
321
322 return reloc;
323 }
324
325 /*
326 * As anv and tu already points:
327 *
328 * "Pipeline layouts. These have nothing to do with the pipeline. They are
329 * just multiple descriptor set layouts pasted together."
330 */
331
332 VkResult
v3dv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)333 v3dv_CreatePipelineLayout(VkDevice _device,
334 const VkPipelineLayoutCreateInfo *pCreateInfo,
335 const VkAllocationCallbacks *pAllocator,
336 VkPipelineLayout *pPipelineLayout)
337 {
338 V3DV_FROM_HANDLE(v3dv_device, device, _device);
339 struct v3dv_pipeline_layout *layout;
340
341 assert(pCreateInfo->sType ==
342 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
343
344 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
345 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
346 if (layout == NULL)
347 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
348
349 layout->num_sets = pCreateInfo->setLayoutCount;
350
351 uint32_t dynamic_offset_count = 0;
352 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
353 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout,
354 pCreateInfo->pSetLayouts[set]);
355 layout->set[set].layout = set_layout;
356
357 layout->set[set].dynamic_offset_start = dynamic_offset_count;
358 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
359 dynamic_offset_count += set_layout->binding[b].array_size *
360 set_layout->binding[b].dynamic_offset_count;
361 }
362 }
363
364 layout->push_constant_size = 0;
365 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
366 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
367 layout->push_constant_size =
368 MAX2(layout->push_constant_size, range->offset + range->size);
369 }
370
371 layout->push_constant_size = align(layout->push_constant_size, 4096);
372
373 layout->dynamic_offset_count = dynamic_offset_count;
374
375 *pPipelineLayout = v3dv_pipeline_layout_to_handle(layout);
376
377 return VK_SUCCESS;
378 }
379
380 void
v3dv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)381 v3dv_DestroyPipelineLayout(VkDevice _device,
382 VkPipelineLayout _pipelineLayout,
383 const VkAllocationCallbacks *pAllocator)
384 {
385 V3DV_FROM_HANDLE(v3dv_device, device, _device);
386 V3DV_FROM_HANDLE(v3dv_pipeline_layout, pipeline_layout, _pipelineLayout);
387
388 if (!pipeline_layout)
389 return;
390 vk_free2(&device->alloc, pAllocator, pipeline_layout);
391 }
392
393 VkResult
v3dv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)394 v3dv_CreateDescriptorPool(VkDevice _device,
395 const VkDescriptorPoolCreateInfo *pCreateInfo,
396 const VkAllocationCallbacks *pAllocator,
397 VkDescriptorPool *pDescriptorPool)
398 {
399 V3DV_FROM_HANDLE(v3dv_device, device, _device);
400 struct v3dv_descriptor_pool *pool;
401 /* size is for the vulkan object descriptor pool. The final size would
402 * depend on some of FREE_DESCRIPTOR flags used
403 */
404 uint64_t size = sizeof(struct v3dv_descriptor_pool);
405 /* bo_size is for the descriptor related info that we need to have on a GPU
406 * address (so on v3dv_bo_alloc allocated memory), like for example the
407 * texture sampler state. Note that not all the descriptors use it
408 */
409 uint32_t bo_size = 0;
410 uint32_t descriptor_count = 0;
411
412 for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
413 /* Verify supported descriptor type */
414 switch(pCreateInfo->pPoolSizes[i].type) {
415 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
416 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
417 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
418 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
419 case VK_DESCRIPTOR_TYPE_SAMPLER:
420 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
421 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
422 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
423 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
424 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
425 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
426 break;
427 default:
428 unreachable("Unimplemented descriptor type");
429 break;
430 }
431
432 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
433 bo_size += descriptor_bo_size(pCreateInfo->pPoolSizes[i].type) *
434 pCreateInfo->pPoolSizes[i].descriptorCount;
435 }
436
437 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
438 uint64_t host_size =
439 pCreateInfo->maxSets * sizeof(struct v3dv_descriptor_set);
440 host_size += sizeof(struct v3dv_descriptor) * descriptor_count;
441 size += host_size;
442 } else {
443 size += sizeof(struct v3dv_descriptor_pool_entry) * pCreateInfo->maxSets;
444 }
445
446 pool = vk_alloc2(&device->alloc, pAllocator, size, 8,
447 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
448
449 if (!pool)
450 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
451
452 memset(pool, 0, sizeof(*pool));
453
454 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
455 pool->host_memory_base = (uint8_t*)pool + sizeof(struct v3dv_descriptor_pool);
456 pool->host_memory_ptr = pool->host_memory_base;
457 pool->host_memory_end = (uint8_t*)pool + size;
458 }
459
460 pool->max_entry_count = pCreateInfo->maxSets;
461
462 if (bo_size > 0) {
463 pool->bo = v3dv_bo_alloc(device, bo_size, "descriptor pool bo", true);
464 if (!pool->bo)
465 goto out_of_device_memory;
466
467 bool ok = v3dv_bo_map(device, pool->bo, pool->bo->size);
468 if (!ok)
469 goto out_of_device_memory;
470
471 pool->current_offset = 0;
472 } else {
473 pool->bo = NULL;
474 }
475
476 *pDescriptorPool = v3dv_descriptor_pool_to_handle(pool);
477
478 return VK_SUCCESS;
479
480 out_of_device_memory:
481 vk_free2(&device->alloc, pAllocator, pool);
482 return vk_error(device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY);
483 }
484
485 static void
descriptor_set_destroy(struct v3dv_device * device,struct v3dv_descriptor_pool * pool,struct v3dv_descriptor_set * set,bool free_bo)486 descriptor_set_destroy(struct v3dv_device *device,
487 struct v3dv_descriptor_pool *pool,
488 struct v3dv_descriptor_set *set,
489 bool free_bo)
490 {
491 assert(!pool->host_memory_base);
492
493 if (free_bo && !pool->host_memory_base) {
494 for (uint32_t i = 0; i < pool->entry_count; i++) {
495 if (pool->entries[i].set == set) {
496 memmove(&pool->entries[i], &pool->entries[i+1],
497 sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
498 --pool->entry_count;
499 break;
500 }
501 }
502 }
503 vk_free2(&device->alloc, NULL, set);
504 }
505
506 void
v3dv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)507 v3dv_DestroyDescriptorPool(VkDevice _device,
508 VkDescriptorPool _pool,
509 const VkAllocationCallbacks *pAllocator)
510 {
511 V3DV_FROM_HANDLE(v3dv_device, device, _device);
512 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, _pool);
513
514 if (!pool)
515 return;
516
517 if (!pool->host_memory_base) {
518 for(int i = 0; i < pool->entry_count; ++i) {
519 descriptor_set_destroy(device, pool, pool->entries[i].set, false);
520 }
521 }
522
523 if (pool->bo) {
524 v3dv_bo_free(device, pool->bo);
525 pool->bo = NULL;
526 }
527
528 vk_free2(&device->alloc, pAllocator, pool);
529 }
530
531 VkResult
v3dv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)532 v3dv_ResetDescriptorPool(VkDevice _device,
533 VkDescriptorPool descriptorPool,
534 VkDescriptorPoolResetFlags flags)
535 {
536 V3DV_FROM_HANDLE(v3dv_device, device, _device);
537 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
538
539 if (!pool->host_memory_base) {
540 for(int i = 0; i < pool->entry_count; ++i) {
541 descriptor_set_destroy(device, pool, pool->entries[i].set, false);
542 }
543 }
544
545 pool->entry_count = 0;
546 pool->host_memory_ptr = pool->host_memory_base;
547 pool->current_offset = 0;
548
549 return VK_SUCCESS;
550 }
551
552 static int
binding_compare(const void * av,const void * bv)553 binding_compare(const void *av, const void *bv)
554 {
555 const VkDescriptorSetLayoutBinding *a =
556 (const VkDescriptorSetLayoutBinding *) av;
557 const VkDescriptorSetLayoutBinding *b =
558 (const VkDescriptorSetLayoutBinding *) bv;
559
560 return (a->binding < b->binding) ? -1 : (a->binding > b->binding) ? 1 : 0;
561 }
562
563 static VkDescriptorSetLayoutBinding *
create_sorted_bindings(const VkDescriptorSetLayoutBinding * bindings,unsigned count,struct v3dv_device * device,const VkAllocationCallbacks * pAllocator)564 create_sorted_bindings(const VkDescriptorSetLayoutBinding *bindings,
565 unsigned count,
566 struct v3dv_device *device,
567 const VkAllocationCallbacks *pAllocator)
568 {
569 VkDescriptorSetLayoutBinding *sorted_bindings =
570 vk_alloc2(&device->alloc, pAllocator,
571 count * sizeof(VkDescriptorSetLayoutBinding),
572 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
573
574 if (!sorted_bindings)
575 return NULL;
576
577 memcpy(sorted_bindings, bindings,
578 count * sizeof(VkDescriptorSetLayoutBinding));
579
580 qsort(sorted_bindings, count, sizeof(VkDescriptorSetLayoutBinding),
581 binding_compare);
582
583 return sorted_bindings;
584 }
585
586 VkResult
v3dv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)587 v3dv_CreateDescriptorSetLayout(VkDevice _device,
588 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
589 const VkAllocationCallbacks *pAllocator,
590 VkDescriptorSetLayout *pSetLayout)
591 {
592 V3DV_FROM_HANDLE(v3dv_device, device, _device);
593 struct v3dv_descriptor_set_layout *set_layout;
594
595 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
596
597 int32_t max_binding = pCreateInfo->bindingCount > 0 ? 0 : -1;
598 uint32_t immutable_sampler_count = 0;
599 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
600 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
601
602 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
603 *
604 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
605 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
606 * pImmutableSamplers can be used to initialize a set of immutable
607 * samplers. [...] If descriptorType is not one of these descriptor
608 * types, then pImmutableSamplers is ignored.
609 *
610 * We need to be careful here and only parse pImmutableSamplers if we
611 * have one of the right descriptor types.
612 */
613 VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
614 if ((desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
615 desc_type == VK_DESCRIPTOR_TYPE_SAMPLER) &&
616 pCreateInfo->pBindings[j].pImmutableSamplers) {
617 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
618 }
619 }
620
621 uint32_t samplers_offset = sizeof(struct v3dv_descriptor_set_layout) +
622 (max_binding + 1) * sizeof(set_layout->binding[0]);
623 uint32_t size = samplers_offset +
624 immutable_sampler_count * sizeof(struct v3dv_sampler);
625
626 set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8,
627 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
628
629 if (!set_layout)
630 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
631
632 /* We just allocate all the immutable samplers at the end of the struct */
633 struct v3dv_sampler *samplers = (void*) &set_layout->binding[max_binding + 1];
634
635 VkDescriptorSetLayoutBinding *bindings = NULL;
636 if (pCreateInfo->bindingCount > 0) {
637 assert(max_binding >= 0);
638 bindings = create_sorted_bindings(pCreateInfo->pBindings,
639 pCreateInfo->bindingCount,
640 device, pAllocator);
641 if (!bindings) {
642 vk_free2(&device->alloc, pAllocator, set_layout);
643 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
644 }
645 }
646
647 memset(set_layout->binding, 0,
648 size - sizeof(struct v3dv_descriptor_set_layout));
649
650 set_layout->binding_count = max_binding + 1;
651 set_layout->flags = pCreateInfo->flags;
652 set_layout->shader_stages = 0;
653 set_layout->bo_size = 0;
654
655 uint32_t descriptor_count = 0;
656 uint32_t dynamic_offset_count = 0;
657
658 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
659 const VkDescriptorSetLayoutBinding *binding = bindings + i;
660 uint32_t binding_number = binding->binding;
661
662 switch (binding->descriptorType) {
663 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
664 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
665 break;
666 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
667 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
668 set_layout->binding[binding_number].dynamic_offset_count = 1;
669 break;
670 case VK_DESCRIPTOR_TYPE_SAMPLER:
671 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
672 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
673 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
674 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
675 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
676 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
677 /* Nothing here, just to keep the descriptor type filtering below */
678 break;
679 default:
680 unreachable("Unknown descriptor type\n");
681 break;
682 }
683
684 set_layout->binding[binding_number].type = binding->descriptorType;
685 set_layout->binding[binding_number].array_size = binding->descriptorCount;
686 set_layout->binding[binding_number].descriptor_index = descriptor_count;
687 set_layout->binding[binding_number].dynamic_offset_index = dynamic_offset_count;
688
689 if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
690 binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
691 binding->pImmutableSamplers) {
692
693 set_layout->binding[binding_number].immutable_samplers_offset = samplers_offset;
694
695 for (uint32_t i = 0; i < binding->descriptorCount; i++)
696 samplers[i] = *v3dv_sampler_from_handle(binding->pImmutableSamplers[i]);
697
698 samplers += binding->descriptorCount;
699 samplers_offset += sizeof(struct v3dv_sampler) * binding->descriptorCount;
700 }
701
702 descriptor_count += binding->descriptorCount;
703 dynamic_offset_count += binding->descriptorCount *
704 set_layout->binding[binding_number].dynamic_offset_count;
705
706 /* FIXME: right now we don't use shader_stages. We could explore if we
707 * could use it to add another filter to upload or allocate the
708 * descriptor data.
709 */
710 set_layout->shader_stages |= binding->stageFlags;
711
712 set_layout->binding[binding_number].descriptor_offset = set_layout->bo_size;
713 set_layout->bo_size +=
714 descriptor_bo_size(set_layout->binding[binding_number].type) *
715 binding->descriptorCount;
716 }
717
718 if (bindings)
719 vk_free2(&device->alloc, pAllocator, bindings);
720
721 set_layout->descriptor_count = descriptor_count;
722 set_layout->dynamic_offset_count = dynamic_offset_count;
723
724 *pSetLayout = v3dv_descriptor_set_layout_to_handle(set_layout);
725
726 return VK_SUCCESS;
727 }
728
729 void
v3dv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)730 v3dv_DestroyDescriptorSetLayout(VkDevice _device,
731 VkDescriptorSetLayout _set_layout,
732 const VkAllocationCallbacks *pAllocator)
733 {
734 V3DV_FROM_HANDLE(v3dv_device, device, _device);
735 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout, _set_layout);
736
737 if (!set_layout)
738 return;
739
740 vk_free2(&device->alloc, pAllocator, set_layout);
741 }
742
743 static VkResult
descriptor_set_create(struct v3dv_device * device,struct v3dv_descriptor_pool * pool,const struct v3dv_descriptor_set_layout * layout,struct v3dv_descriptor_set ** out_set)744 descriptor_set_create(struct v3dv_device *device,
745 struct v3dv_descriptor_pool *pool,
746 const struct v3dv_descriptor_set_layout *layout,
747 struct v3dv_descriptor_set **out_set)
748 {
749 struct v3dv_descriptor_set *set;
750 uint32_t descriptor_count = layout->descriptor_count;
751 unsigned mem_size = sizeof(struct v3dv_descriptor_set) +
752 sizeof(struct v3dv_descriptor) * descriptor_count;
753
754 if (pool->host_memory_base) {
755 if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
756 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
757
758 set = (struct v3dv_descriptor_set*)pool->host_memory_ptr;
759 pool->host_memory_ptr += mem_size;
760 } else {
761 set = vk_alloc2(&device->alloc, NULL, mem_size, 8,
762 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
763
764 if (!set)
765 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
766 }
767
768 memset(set, 0, mem_size);
769 set->pool = pool;
770
771 set->layout = layout;
772
773 /* FIXME: VK_EXT_descriptor_indexing introduces
774 * VARIABLE_DESCRIPTOR_LAYOUT_COUNT. That would affect the layout_size used
775 * below for bo allocation
776 */
777
778 uint32_t offset = 0;
779 uint32_t index = pool->entry_count;
780
781 if (layout->bo_size) {
782 if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
783 vk_free2(&device->alloc, NULL, set);
784 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
785 }
786
787 /* We first try to allocate linearly fist, so that we don't spend time
788 * looking for gaps if the app only allocates & resets via the pool.
789 *
790 * If that fails, we try to find a gap from previously freed subregions
791 * iterating through the descriptor pool entries. Note that we are not
792 * doing that if we have a pool->host_memory_base. We only have that if
793 * VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT is not set, so in
794 * that case the user can't free subregions, so it doesn't make sense to
795 * even try (or track those subregions).
796 */
797 if (pool->current_offset + layout->bo_size <= pool->bo->size) {
798 offset = pool->current_offset;
799 pool->current_offset += layout->bo_size;
800 } else if (!pool->host_memory_base) {
801 for (index = 0; index < pool->entry_count; index++) {
802 if (pool->entries[index].offset - offset >= layout->bo_size)
803 break;
804 offset = pool->entries[index].offset + pool->entries[index].size;
805 }
806 if (pool->bo->size - offset < layout->bo_size) {
807 vk_free2(&device->alloc, NULL, set);
808 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
809 }
810 memmove(&pool->entries[index + 1], &pool->entries[index],
811 sizeof(pool->entries[0]) * (pool->entry_count - index));
812 } else {
813 assert(pool->host_memory_base);
814 vk_free2(&device->alloc, NULL, set);
815 return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
816 }
817
818 set->base_offset = offset;
819 }
820
821 if (!pool->host_memory_base) {
822 pool->entries[index].set = set;
823 pool->entries[index].offset = offset;
824 pool->entries[index].size = layout->bo_size;
825 pool->entry_count++;
826 }
827
828 /* Go through and fill out immutable samplers if we have any */
829 for (uint32_t b = 0; b < layout->binding_count; b++) {
830 if (layout->binding[b].immutable_samplers_offset == 0)
831 continue;
832
833 const struct v3dv_sampler *samplers =
834 (const struct v3dv_sampler *)((const char *)layout +
835 layout->binding[b].immutable_samplers_offset);
836
837 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
838 uint32_t combined_offset =
839 layout->binding[b].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ?
840 offsetof(struct v3dv_combined_image_sampler_descriptor, sampler_state) :
841 0;
842
843 void *desc_map = descriptor_bo_map(set, &layout->binding[b], i);
844 desc_map += combined_offset;
845
846 memcpy(desc_map,
847 samplers[i].sampler_state,
848 cl_packet_length(SAMPLER_STATE));
849 }
850 }
851
852 *out_set = set;
853
854 return VK_SUCCESS;
855 }
856
857 VkResult
v3dv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)858 v3dv_AllocateDescriptorSets(VkDevice _device,
859 const VkDescriptorSetAllocateInfo *pAllocateInfo,
860 VkDescriptorSet *pDescriptorSets)
861 {
862 V3DV_FROM_HANDLE(v3dv_device, device, _device);
863 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
864
865 VkResult result = VK_SUCCESS;
866 struct v3dv_descriptor_set *set = NULL;
867 uint32_t i = 0;
868
869 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
870 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, layout,
871 pAllocateInfo->pSetLayouts[i]);
872
873 result = descriptor_set_create(device, pool, layout, &set);
874 if (result != VK_SUCCESS)
875 break;
876
877 pDescriptorSets[i] = v3dv_descriptor_set_to_handle(set);
878 }
879
880 if (result != VK_SUCCESS) {
881 v3dv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
882 i, pDescriptorSets);
883 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
884 pDescriptorSets[i] = VK_NULL_HANDLE;
885 }
886 }
887
888 return result;
889 }
890
891 VkResult
v3dv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)892 v3dv_FreeDescriptorSets(VkDevice _device,
893 VkDescriptorPool descriptorPool,
894 uint32_t count,
895 const VkDescriptorSet *pDescriptorSets)
896 {
897 V3DV_FROM_HANDLE(v3dv_device, device, _device);
898 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
899
900 for (uint32_t i = 0; i < count; i++) {
901 V3DV_FROM_HANDLE(v3dv_descriptor_set, set, pDescriptorSets[i]);
902 if (set && !pool->host_memory_base)
903 descriptor_set_destroy(device, pool, set, true);
904 }
905
906 return VK_SUCCESS;
907 }
908
909 static void
descriptor_bo_copy(struct v3dv_descriptor_set * dst_set,const struct v3dv_descriptor_set_binding_layout * dst_binding_layout,uint32_t dst_array_index,struct v3dv_descriptor_set * src_set,const struct v3dv_descriptor_set_binding_layout * src_binding_layout,uint32_t src_array_index)910 descriptor_bo_copy(struct v3dv_descriptor_set *dst_set,
911 const struct v3dv_descriptor_set_binding_layout *dst_binding_layout,
912 uint32_t dst_array_index,
913 struct v3dv_descriptor_set *src_set,
914 const struct v3dv_descriptor_set_binding_layout *src_binding_layout,
915 uint32_t src_array_index)
916 {
917 assert(dst_binding_layout->type == src_binding_layout->type);
918
919 void *dst_map = descriptor_bo_map(dst_set, dst_binding_layout, dst_array_index);
920 void *src_map = descriptor_bo_map(src_set, src_binding_layout, src_array_index);
921
922 memcpy(dst_map, src_map, descriptor_bo_size(src_binding_layout->type));
923 }
924
925 static void
write_image_descriptor(VkDescriptorType desc_type,struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,struct v3dv_image_view * iview,struct v3dv_sampler * sampler,uint32_t array_index)926 write_image_descriptor(VkDescriptorType desc_type,
927 struct v3dv_descriptor_set *set,
928 const struct v3dv_descriptor_set_binding_layout *binding_layout,
929 struct v3dv_image_view *iview,
930 struct v3dv_sampler *sampler,
931 uint32_t array_index)
932 {
933 void *desc_map = descriptor_bo_map(set, binding_layout, array_index);
934
935 if (iview) {
936 const uint32_t tex_state_index =
937 iview->type != VK_IMAGE_VIEW_TYPE_CUBE_ARRAY ||
938 desc_type != VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ? 0 : 1;
939 memcpy(desc_map,
940 iview->texture_shader_state[tex_state_index],
941 sizeof(iview->texture_shader_state[0]));
942 desc_map += offsetof(struct v3dv_combined_image_sampler_descriptor,
943 sampler_state);
944 }
945
946 if (sampler && !binding_layout->immutable_samplers_offset) {
947 /* For immutable samplers this was already done as part of the
948 * descriptor set create, as that info can't change later
949 */
950 memcpy(desc_map,
951 sampler->sampler_state,
952 sizeof(sampler->sampler_state));
953 }
954 }
955
956
957 static void
write_buffer_view_descriptor(VkDescriptorType desc_type,struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,struct v3dv_buffer_view * bview,uint32_t array_index)958 write_buffer_view_descriptor(VkDescriptorType desc_type,
959 struct v3dv_descriptor_set *set,
960 const struct v3dv_descriptor_set_binding_layout *binding_layout,
961 struct v3dv_buffer_view *bview,
962 uint32_t array_index)
963 {
964 void *desc_map = descriptor_bo_map(set, binding_layout, array_index);
965
966 assert(bview);
967
968 memcpy(desc_map,
969 bview->texture_shader_state,
970 sizeof(bview->texture_shader_state));
971 }
972
973 void
v3dv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)974 v3dv_UpdateDescriptorSets(VkDevice _device,
975 uint32_t descriptorWriteCount,
976 const VkWriteDescriptorSet *pDescriptorWrites,
977 uint32_t descriptorCopyCount,
978 const VkCopyDescriptorSet *pDescriptorCopies)
979 {
980 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
981 const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
982 V3DV_FROM_HANDLE(v3dv_descriptor_set, set, writeset->dstSet);
983
984 const struct v3dv_descriptor_set_binding_layout *binding_layout =
985 set->layout->binding + writeset->dstBinding;
986
987 struct v3dv_descriptor *descriptor = set->descriptors;
988
989 descriptor += binding_layout->descriptor_index;
990 descriptor += writeset->dstArrayElement;
991
992 for (uint32_t j = 0; j < writeset->descriptorCount; ++j) {
993 descriptor->type = writeset->descriptorType;
994
995 switch(writeset->descriptorType) {
996
997 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
998 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
999 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1000 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
1001 const VkDescriptorBufferInfo *buffer_info = writeset->pBufferInfo + j;
1002 V3DV_FROM_HANDLE(v3dv_buffer, buffer, buffer_info->buffer);
1003
1004 descriptor->buffer = buffer;
1005 descriptor->offset = buffer_info->offset;
1006 if (buffer_info->range == VK_WHOLE_SIZE) {
1007 descriptor->range = buffer->size - buffer_info->offset;
1008 } else {
1009 assert(descriptor->range <= UINT32_MAX);
1010 descriptor->range = buffer_info->range;
1011 }
1012 break;
1013 }
1014 case VK_DESCRIPTOR_TYPE_SAMPLER: {
1015 /* If we are here we shouldn't be modifying a immutable sampler,
1016 * so we don't ensure that would work or not crash. But let the
1017 * validation layers check that
1018 */
1019 const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1020 V3DV_FROM_HANDLE(v3dv_sampler, sampler, image_info->sampler);
1021
1022 descriptor->sampler = sampler;
1023
1024 write_image_descriptor(writeset->descriptorType,
1025 set, binding_layout, NULL, sampler,
1026 writeset->dstArrayElement + j);
1027
1028 break;
1029 }
1030 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1031 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1032 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
1033 const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1034 V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1035
1036 descriptor->image_view = iview;
1037
1038 write_image_descriptor(writeset->descriptorType,
1039 set, binding_layout, iview, NULL,
1040 writeset->dstArrayElement + j);
1041
1042 break;
1043 }
1044 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1045 const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1046 V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1047 V3DV_FROM_HANDLE(v3dv_sampler, sampler, image_info->sampler);
1048
1049 descriptor->image_view = iview;
1050 descriptor->sampler = sampler;
1051
1052 write_image_descriptor(writeset->descriptorType,
1053 set, binding_layout, iview, sampler,
1054 writeset->dstArrayElement + j);
1055
1056 break;
1057 }
1058 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1059 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
1060 V3DV_FROM_HANDLE(v3dv_buffer_view, buffer_view,
1061 writeset->pTexelBufferView[j]);
1062
1063 assert(buffer_view);
1064
1065 descriptor->buffer_view = buffer_view;
1066
1067 write_buffer_view_descriptor(writeset->descriptorType,
1068 set, binding_layout, buffer_view,
1069 writeset->dstArrayElement + j);
1070 break;
1071 }
1072 default:
1073 unreachable("unimplemented descriptor type");
1074 break;
1075 }
1076 descriptor++;
1077 }
1078 }
1079
1080 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1081 const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
1082 V3DV_FROM_HANDLE(v3dv_descriptor_set, src_set,
1083 copyset->srcSet);
1084 V3DV_FROM_HANDLE(v3dv_descriptor_set, dst_set,
1085 copyset->dstSet);
1086
1087 const struct v3dv_descriptor_set_binding_layout *src_binding_layout =
1088 src_set->layout->binding + copyset->srcBinding;
1089 const struct v3dv_descriptor_set_binding_layout *dst_binding_layout =
1090 dst_set->layout->binding + copyset->dstBinding;
1091
1092 assert(src_binding_layout->type == dst_binding_layout->type);
1093
1094 struct v3dv_descriptor *src_descriptor = src_set->descriptors;
1095 struct v3dv_descriptor *dst_descriptor = dst_set->descriptors;
1096
1097 src_descriptor += src_binding_layout->descriptor_index;
1098 src_descriptor += copyset->srcArrayElement;
1099
1100 dst_descriptor += dst_binding_layout->descriptor_index;
1101 dst_descriptor += copyset->dstArrayElement;
1102
1103 for (uint32_t j = 0; j < copyset->descriptorCount; j++) {
1104 *dst_descriptor = *src_descriptor;
1105 dst_descriptor++;
1106 src_descriptor++;
1107
1108 if (descriptor_bo_size(src_binding_layout->type) > 0) {
1109 descriptor_bo_copy(dst_set, dst_binding_layout,
1110 j + copyset->dstArrayElement,
1111 src_set, src_binding_layout,
1112 j + copyset->srcArrayElement);
1113 }
1114
1115 }
1116 }
1117 }
1118