1 /*
2 * Copyright © 2019 Raspberry Pi
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "vk_descriptors.h"
25 #include "vk_util.h"
26
27 #include "v3dv_private.h"
28
29 /*
30 * For a given descriptor defined by the descriptor_set it belongs, its
31 * binding layout, and array_index, it returns the map region assigned to it
32 * from the descriptor pool bo.
33 */
34 static void*
descriptor_bo_map(struct v3dv_device * device,struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,uint32_t array_index)35 descriptor_bo_map(struct v3dv_device *device,
36 struct v3dv_descriptor_set *set,
37 const struct v3dv_descriptor_set_binding_layout *binding_layout,
38 uint32_t array_index)
39 {
40 assert(v3dv_X(device, descriptor_bo_size)(binding_layout->type) > 0);
41 return set->pool->bo->map +
42 set->base_offset + binding_layout->descriptor_offset +
43 array_index * v3dv_X(device, descriptor_bo_size)(binding_layout->type);
44 }
45
46 static bool
descriptor_type_is_dynamic(VkDescriptorType type)47 descriptor_type_is_dynamic(VkDescriptorType type)
48 {
49 switch (type) {
50 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
51 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
52 return true;
53 break;
54 default:
55 return false;
56 }
57 }
58
59 /*
60 * Tries to get a real descriptor using a descriptor map index from the
61 * descriptor_state + pipeline_layout.
62 */
63 struct v3dv_descriptor *
v3dv_descriptor_map_get_descriptor(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index,uint32_t * dynamic_offset)64 v3dv_descriptor_map_get_descriptor(struct v3dv_descriptor_state *descriptor_state,
65 struct v3dv_descriptor_map *map,
66 struct v3dv_pipeline_layout *pipeline_layout,
67 uint32_t index,
68 uint32_t *dynamic_offset)
69 {
70 assert(index < map->num_desc);
71
72 uint32_t set_number = map->set[index];
73 assert((descriptor_state->valid & 1 << set_number));
74
75 struct v3dv_descriptor_set *set =
76 descriptor_state->descriptor_sets[set_number];
77 assert(set);
78
79 uint32_t binding_number = map->binding[index];
80 assert(binding_number < set->layout->binding_count);
81
82 const struct v3dv_descriptor_set_binding_layout *binding_layout =
83 &set->layout->binding[binding_number];
84
85 uint32_t array_index = map->array_index[index];
86 assert(array_index < binding_layout->array_size);
87
88 if (descriptor_type_is_dynamic(binding_layout->type)) {
89 uint32_t dynamic_offset_index =
90 pipeline_layout->set[set_number].dynamic_offset_start +
91 binding_layout->dynamic_offset_index + array_index;
92
93 *dynamic_offset = descriptor_state->dynamic_offsets[dynamic_offset_index];
94 }
95
96 return &set->descriptors[binding_layout->descriptor_index + array_index];
97 }
98
99 /* Equivalent to map_get_descriptor but it returns a reloc with the bo
100 * associated with that descriptor (suballocation of the descriptor pool bo)
101 *
102 * It also returns the descriptor type, so the caller could do extra
103 * validation or adding extra offsets if the bo contains more that one field.
104 */
105 static struct v3dv_cl_reloc
v3dv_descriptor_map_get_descriptor_bo(struct v3dv_device * device,struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index,VkDescriptorType * out_type)106 v3dv_descriptor_map_get_descriptor_bo(struct v3dv_device *device,
107 struct v3dv_descriptor_state *descriptor_state,
108 struct v3dv_descriptor_map *map,
109 struct v3dv_pipeline_layout *pipeline_layout,
110 uint32_t index,
111 VkDescriptorType *out_type)
112 {
113 assert(index < map->num_desc);
114
115 uint32_t set_number = map->set[index];
116 assert(descriptor_state->valid & 1 << set_number);
117
118 struct v3dv_descriptor_set *set =
119 descriptor_state->descriptor_sets[set_number];
120 assert(set);
121
122 uint32_t binding_number = map->binding[index];
123 assert(binding_number < set->layout->binding_count);
124
125 const struct v3dv_descriptor_set_binding_layout *binding_layout =
126 &set->layout->binding[binding_number];
127
128 assert(v3dv_X(device, descriptor_bo_size)(binding_layout->type) > 0);
129 *out_type = binding_layout->type;
130
131 uint32_t array_index = map->array_index[index];
132 assert(array_index < binding_layout->array_size);
133
134 struct v3dv_cl_reloc reloc = {
135 .bo = set->pool->bo,
136 .offset = set->base_offset + binding_layout->descriptor_offset +
137 array_index * v3dv_X(device, descriptor_bo_size)(binding_layout->type),
138 };
139
140 return reloc;
141 }
142
143 /*
144 * The difference between this method and v3dv_descriptor_map_get_descriptor,
145 * is that if the sampler are added as immutable when creating the set layout,
146 * they are bound to the set layout, so not part of the descriptor per
147 * se. This method return early in that case.
148 */
149 const struct v3dv_sampler *
v3dv_descriptor_map_get_sampler(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)150 v3dv_descriptor_map_get_sampler(struct v3dv_descriptor_state *descriptor_state,
151 struct v3dv_descriptor_map *map,
152 struct v3dv_pipeline_layout *pipeline_layout,
153 uint32_t index)
154 {
155 assert(index < map->num_desc);
156
157 uint32_t set_number = map->set[index];
158 assert(descriptor_state->valid & 1 << set_number);
159
160 struct v3dv_descriptor_set *set =
161 descriptor_state->descriptor_sets[set_number];
162 assert(set);
163
164 uint32_t binding_number = map->binding[index];
165 assert(binding_number < set->layout->binding_count);
166
167 const struct v3dv_descriptor_set_binding_layout *binding_layout =
168 &set->layout->binding[binding_number];
169
170 uint32_t array_index = map->array_index[index];
171 assert(array_index < binding_layout->array_size);
172
173 if (binding_layout->immutable_samplers_offset != 0) {
174 assert(binding_layout->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
175 binding_layout->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
176
177 const struct v3dv_sampler *immutable_samplers =
178 v3dv_immutable_samplers(set->layout, binding_layout);
179
180 assert(immutable_samplers);
181 const struct v3dv_sampler *sampler = &immutable_samplers[array_index];
182 assert(sampler);
183
184 return sampler;
185 }
186
187 struct v3dv_descriptor *descriptor =
188 &set->descriptors[binding_layout->descriptor_index + array_index];
189
190 assert(descriptor->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
191 descriptor->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
192
193 assert(descriptor->sampler);
194
195 return descriptor->sampler;
196 }
197
198
199 struct v3dv_cl_reloc
v3dv_descriptor_map_get_sampler_state(struct v3dv_device * device,struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)200 v3dv_descriptor_map_get_sampler_state(struct v3dv_device *device,
201 struct v3dv_descriptor_state *descriptor_state,
202 struct v3dv_descriptor_map *map,
203 struct v3dv_pipeline_layout *pipeline_layout,
204 uint32_t index)
205 {
206 VkDescriptorType type;
207 struct v3dv_cl_reloc reloc =
208 v3dv_descriptor_map_get_descriptor_bo(device, descriptor_state, map,
209 pipeline_layout,
210 index, &type);
211
212 assert(type == VK_DESCRIPTOR_TYPE_SAMPLER ||
213 type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
214
215 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
216 reloc.offset += v3dv_X(device, combined_image_sampler_sampler_state_offset)();
217
218 return reloc;
219 }
220
221 const struct v3dv_format*
v3dv_descriptor_map_get_texture_format(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index,VkFormat * out_vk_format)222 v3dv_descriptor_map_get_texture_format(struct v3dv_descriptor_state *descriptor_state,
223 struct v3dv_descriptor_map *map,
224 struct v3dv_pipeline_layout *pipeline_layout,
225 uint32_t index,
226 VkFormat *out_vk_format)
227 {
228 struct v3dv_descriptor *descriptor =
229 v3dv_descriptor_map_get_descriptor(descriptor_state, map,
230 pipeline_layout, index, NULL);
231
232 switch (descriptor->type) {
233 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
234 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
235 assert(descriptor->buffer_view);
236 *out_vk_format = descriptor->buffer_view->vk_format;
237 return descriptor->buffer_view->format;
238 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
239 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
240 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
241 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
242 assert(descriptor->image_view);
243 *out_vk_format = descriptor->image_view->vk.format;
244 return descriptor->image_view->format;
245 default:
246 unreachable("descriptor type doesn't has a texture format");
247 }
248 }
249
250 struct v3dv_bo*
v3dv_descriptor_map_get_texture_bo(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)251 v3dv_descriptor_map_get_texture_bo(struct v3dv_descriptor_state *descriptor_state,
252 struct v3dv_descriptor_map *map,
253 struct v3dv_pipeline_layout *pipeline_layout,
254 uint32_t index)
255
256 {
257 struct v3dv_descriptor *descriptor =
258 v3dv_descriptor_map_get_descriptor(descriptor_state, map,
259 pipeline_layout, index, NULL);
260
261 switch (descriptor->type) {
262 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
263 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
264 assert(descriptor->buffer_view);
265 return descriptor->buffer_view->buffer->mem->bo;
266 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
267 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
268 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
269 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
270 assert(descriptor->image_view);
271 struct v3dv_image *image =
272 (struct v3dv_image *) descriptor->image_view->vk.image;
273 return image->mem->bo;
274 }
275 default:
276 unreachable("descriptor type doesn't has a texture bo");
277 }
278 }
279
280 struct v3dv_cl_reloc
v3dv_descriptor_map_get_texture_shader_state(struct v3dv_device * device,struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)281 v3dv_descriptor_map_get_texture_shader_state(struct v3dv_device *device,
282 struct v3dv_descriptor_state *descriptor_state,
283 struct v3dv_descriptor_map *map,
284 struct v3dv_pipeline_layout *pipeline_layout,
285 uint32_t index)
286 {
287 VkDescriptorType type;
288 struct v3dv_cl_reloc reloc =
289 v3dv_descriptor_map_get_descriptor_bo(device,
290 descriptor_state, map,
291 pipeline_layout,
292 index, &type);
293
294 assert(type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
295 type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
296 type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ||
297 type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
298 type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ||
299 type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
300
301 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
302 reloc.offset += v3dv_X(device, combined_image_sampler_texture_state_offset)();
303
304 return reloc;
305 }
306
307 /*
308 * As anv and tu already points:
309 *
310 * "Pipeline layouts. These have nothing to do with the pipeline. They are
311 * just multiple descriptor set layouts pasted together."
312 */
313
314 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)315 v3dv_CreatePipelineLayout(VkDevice _device,
316 const VkPipelineLayoutCreateInfo *pCreateInfo,
317 const VkAllocationCallbacks *pAllocator,
318 VkPipelineLayout *pPipelineLayout)
319 {
320 V3DV_FROM_HANDLE(v3dv_device, device, _device);
321 struct v3dv_pipeline_layout *layout;
322
323 assert(pCreateInfo->sType ==
324 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
325
326 layout = vk_object_zalloc(&device->vk, pAllocator, sizeof(*layout),
327 VK_OBJECT_TYPE_PIPELINE_LAYOUT);
328 if (layout == NULL)
329 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
330
331 layout->num_sets = pCreateInfo->setLayoutCount;
332
333 uint32_t dynamic_offset_count = 0;
334 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
335 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout,
336 pCreateInfo->pSetLayouts[set]);
337 layout->set[set].layout = set_layout;
338
339 layout->set[set].dynamic_offset_start = dynamic_offset_count;
340 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
341 dynamic_offset_count += set_layout->binding[b].array_size *
342 set_layout->binding[b].dynamic_offset_count;
343 }
344
345 layout->shader_stages |= set_layout->shader_stages;
346 }
347
348 layout->push_constant_size = 0;
349 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
350 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
351 layout->push_constant_size =
352 MAX2(layout->push_constant_size, range->offset + range->size);
353 }
354
355 layout->push_constant_size = align(layout->push_constant_size, 4096);
356
357 layout->dynamic_offset_count = dynamic_offset_count;
358
359 *pPipelineLayout = v3dv_pipeline_layout_to_handle(layout);
360
361 return VK_SUCCESS;
362 }
363
364 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)365 v3dv_DestroyPipelineLayout(VkDevice _device,
366 VkPipelineLayout _pipelineLayout,
367 const VkAllocationCallbacks *pAllocator)
368 {
369 V3DV_FROM_HANDLE(v3dv_device, device, _device);
370 V3DV_FROM_HANDLE(v3dv_pipeline_layout, pipeline_layout, _pipelineLayout);
371
372 if (!pipeline_layout)
373 return;
374 vk_object_free(&device->vk, pAllocator, pipeline_layout);
375 }
376
377 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)378 v3dv_CreateDescriptorPool(VkDevice _device,
379 const VkDescriptorPoolCreateInfo *pCreateInfo,
380 const VkAllocationCallbacks *pAllocator,
381 VkDescriptorPool *pDescriptorPool)
382 {
383 V3DV_FROM_HANDLE(v3dv_device, device, _device);
384 struct v3dv_descriptor_pool *pool;
385 /* size is for the vulkan object descriptor pool. The final size would
386 * depend on some of FREE_DESCRIPTOR flags used
387 */
388 uint64_t size = sizeof(struct v3dv_descriptor_pool);
389 /* bo_size is for the descriptor related info that we need to have on a GPU
390 * address (so on v3dv_bo_alloc allocated memory), like for example the
391 * texture sampler state. Note that not all the descriptors use it
392 */
393 uint32_t bo_size = 0;
394 uint32_t descriptor_count = 0;
395
396 assert(pCreateInfo->poolSizeCount > 0);
397 for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
398 /* Verify supported descriptor type */
399 switch(pCreateInfo->pPoolSizes[i].type) {
400 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
401 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
402 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
403 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
404 case VK_DESCRIPTOR_TYPE_SAMPLER:
405 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
406 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
407 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
408 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
409 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
410 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
411 break;
412 default:
413 unreachable("Unimplemented descriptor type");
414 break;
415 }
416
417 assert(pCreateInfo->pPoolSizes[i].descriptorCount > 0);
418 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
419 bo_size += v3dv_X(device, descriptor_bo_size)(pCreateInfo->pPoolSizes[i].type) *
420 pCreateInfo->pPoolSizes[i].descriptorCount;
421 }
422
423 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
424 uint64_t host_size =
425 pCreateInfo->maxSets * sizeof(struct v3dv_descriptor_set);
426 host_size += sizeof(struct v3dv_descriptor) * descriptor_count;
427 size += host_size;
428 } else {
429 size += sizeof(struct v3dv_descriptor_pool_entry) * pCreateInfo->maxSets;
430 }
431
432 pool = vk_object_zalloc(&device->vk, pAllocator, size,
433 VK_OBJECT_TYPE_DESCRIPTOR_POOL);
434
435 if (!pool)
436 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
437
438 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
439 pool->host_memory_base = (uint8_t*)pool + sizeof(struct v3dv_descriptor_pool);
440 pool->host_memory_ptr = pool->host_memory_base;
441 pool->host_memory_end = (uint8_t*)pool + size;
442 }
443
444 pool->max_entry_count = pCreateInfo->maxSets;
445
446 if (bo_size > 0) {
447 pool->bo = v3dv_bo_alloc(device, bo_size, "descriptor pool bo", true);
448 if (!pool->bo)
449 goto out_of_device_memory;
450
451 bool ok = v3dv_bo_map(device, pool->bo, pool->bo->size);
452 if (!ok)
453 goto out_of_device_memory;
454
455 pool->current_offset = 0;
456 } else {
457 pool->bo = NULL;
458 }
459
460 *pDescriptorPool = v3dv_descriptor_pool_to_handle(pool);
461
462 return VK_SUCCESS;
463
464 out_of_device_memory:
465 vk_object_free(&device->vk, pAllocator, pool);
466 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
467 }
468
469 static void
descriptor_set_destroy(struct v3dv_device * device,struct v3dv_descriptor_pool * pool,struct v3dv_descriptor_set * set,bool free_bo)470 descriptor_set_destroy(struct v3dv_device *device,
471 struct v3dv_descriptor_pool *pool,
472 struct v3dv_descriptor_set *set,
473 bool free_bo)
474 {
475 assert(!pool->host_memory_base);
476
477 if (free_bo && !pool->host_memory_base) {
478 for (uint32_t i = 0; i < pool->entry_count; i++) {
479 if (pool->entries[i].set == set) {
480 memmove(&pool->entries[i], &pool->entries[i+1],
481 sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
482 --pool->entry_count;
483 break;
484 }
485 }
486 }
487 vk_object_free(&device->vk, NULL, set);
488 }
489
490 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)491 v3dv_DestroyDescriptorPool(VkDevice _device,
492 VkDescriptorPool _pool,
493 const VkAllocationCallbacks *pAllocator)
494 {
495 V3DV_FROM_HANDLE(v3dv_device, device, _device);
496 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, _pool);
497
498 if (!pool)
499 return;
500
501 if (!pool->host_memory_base) {
502 for(int i = 0; i < pool->entry_count; ++i) {
503 descriptor_set_destroy(device, pool, pool->entries[i].set, false);
504 }
505 }
506
507 if (pool->bo) {
508 v3dv_bo_free(device, pool->bo);
509 pool->bo = NULL;
510 }
511
512 vk_object_free(&device->vk, pAllocator, pool);
513 }
514
515 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)516 v3dv_ResetDescriptorPool(VkDevice _device,
517 VkDescriptorPool descriptorPool,
518 VkDescriptorPoolResetFlags flags)
519 {
520 V3DV_FROM_HANDLE(v3dv_device, device, _device);
521 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
522
523 if (!pool->host_memory_base) {
524 for(int i = 0; i < pool->entry_count; ++i) {
525 descriptor_set_destroy(device, pool, pool->entries[i].set, false);
526 }
527 } else {
528 /* We clean-up the host memory, so when allocating a new set from the
529 * pool, it is already 0
530 */
531 uint32_t host_size = pool->host_memory_end - pool->host_memory_base;
532 memset(pool->host_memory_base, 0, host_size);
533 }
534
535 pool->entry_count = 0;
536 pool->host_memory_ptr = pool->host_memory_base;
537 pool->current_offset = 0;
538
539 return VK_SUCCESS;
540 }
541
542 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)543 v3dv_CreateDescriptorSetLayout(VkDevice _device,
544 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
545 const VkAllocationCallbacks *pAllocator,
546 VkDescriptorSetLayout *pSetLayout)
547 {
548 V3DV_FROM_HANDLE(v3dv_device, device, _device);
549 struct v3dv_descriptor_set_layout *set_layout;
550
551 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
552
553 uint32_t num_bindings = 0;
554 uint32_t immutable_sampler_count = 0;
555 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
556 num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
557
558 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
559 *
560 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
561 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
562 * pImmutableSamplers can be used to initialize a set of immutable
563 * samplers. [...] If descriptorType is not one of these descriptor
564 * types, then pImmutableSamplers is ignored.
565 *
566 * We need to be careful here and only parse pImmutableSamplers if we
567 * have one of the right descriptor types.
568 */
569 VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
570 if ((desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
571 desc_type == VK_DESCRIPTOR_TYPE_SAMPLER) &&
572 pCreateInfo->pBindings[j].pImmutableSamplers) {
573 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
574 }
575 }
576
577 uint32_t samplers_offset = sizeof(struct v3dv_descriptor_set_layout) +
578 num_bindings * sizeof(set_layout->binding[0]);
579 uint32_t size = samplers_offset +
580 immutable_sampler_count * sizeof(struct v3dv_sampler);
581
582 set_layout = vk_object_zalloc(&device->vk, pAllocator, size,
583 VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
584
585 if (!set_layout)
586 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
587
588 /* We just allocate all the immutable samplers at the end of the struct */
589 struct v3dv_sampler *samplers = (void*) &set_layout->binding[num_bindings];
590
591 assert(pCreateInfo->bindingCount == 0 || num_bindings > 0);
592
593 VkDescriptorSetLayoutBinding *bindings = NULL;
594 VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
595 pCreateInfo->bindingCount, &bindings);
596 if (result != VK_SUCCESS) {
597 vk_object_free(&device->vk, pAllocator, set_layout);
598 return vk_error(device, result);
599 }
600
601 memset(set_layout->binding, 0,
602 size - sizeof(struct v3dv_descriptor_set_layout));
603
604 set_layout->binding_count = num_bindings;
605 set_layout->flags = pCreateInfo->flags;
606 set_layout->shader_stages = 0;
607 set_layout->bo_size = 0;
608
609 uint32_t descriptor_count = 0;
610 uint32_t dynamic_offset_count = 0;
611
612 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
613 const VkDescriptorSetLayoutBinding *binding = bindings + i;
614 uint32_t binding_number = binding->binding;
615
616 switch (binding->descriptorType) {
617 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
618 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
619 break;
620 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
621 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
622 set_layout->binding[binding_number].dynamic_offset_count = 1;
623 break;
624 case VK_DESCRIPTOR_TYPE_SAMPLER:
625 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
626 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
627 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
628 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
629 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
630 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
631 /* Nothing here, just to keep the descriptor type filtering below */
632 break;
633 default:
634 unreachable("Unknown descriptor type\n");
635 break;
636 }
637
638 set_layout->binding[binding_number].type = binding->descriptorType;
639 set_layout->binding[binding_number].array_size = binding->descriptorCount;
640 set_layout->binding[binding_number].descriptor_index = descriptor_count;
641 set_layout->binding[binding_number].dynamic_offset_index = dynamic_offset_count;
642
643 if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
644 binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
645 binding->pImmutableSamplers) {
646
647 set_layout->binding[binding_number].immutable_samplers_offset = samplers_offset;
648
649 for (uint32_t i = 0; i < binding->descriptorCount; i++)
650 samplers[i] = *v3dv_sampler_from_handle(binding->pImmutableSamplers[i]);
651
652 samplers += binding->descriptorCount;
653 samplers_offset += sizeof(struct v3dv_sampler) * binding->descriptorCount;
654 }
655
656 descriptor_count += binding->descriptorCount;
657 dynamic_offset_count += binding->descriptorCount *
658 set_layout->binding[binding_number].dynamic_offset_count;
659
660 set_layout->shader_stages |= binding->stageFlags;
661
662 set_layout->binding[binding_number].descriptor_offset = set_layout->bo_size;
663 set_layout->bo_size +=
664 v3dv_X(device, descriptor_bo_size)(set_layout->binding[binding_number].type) *
665 binding->descriptorCount;
666 }
667
668 free(bindings);
669
670 set_layout->descriptor_count = descriptor_count;
671 set_layout->dynamic_offset_count = dynamic_offset_count;
672
673 *pSetLayout = v3dv_descriptor_set_layout_to_handle(set_layout);
674
675 return VK_SUCCESS;
676 }
677
678 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)679 v3dv_DestroyDescriptorSetLayout(VkDevice _device,
680 VkDescriptorSetLayout _set_layout,
681 const VkAllocationCallbacks *pAllocator)
682 {
683 V3DV_FROM_HANDLE(v3dv_device, device, _device);
684 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout, _set_layout);
685
686 if (!set_layout)
687 return;
688
689 vk_object_free(&device->vk, pAllocator, set_layout);
690 }
691
692 static inline VkResult
out_of_pool_memory(const struct v3dv_device * device,const struct v3dv_descriptor_pool * pool)693 out_of_pool_memory(const struct v3dv_device *device,
694 const struct v3dv_descriptor_pool *pool)
695 {
696 /* Don't log OOPM errors for internal driver pools, we handle these properly
697 * by allocating a new pool, so they don't point to real issues.
698 */
699 if (!pool->is_driver_internal)
700 return vk_error(device, VK_ERROR_OUT_OF_POOL_MEMORY);
701 else
702 return VK_ERROR_OUT_OF_POOL_MEMORY;
703 }
704
705 static VkResult
descriptor_set_create(struct v3dv_device * device,struct v3dv_descriptor_pool * pool,const struct v3dv_descriptor_set_layout * layout,struct v3dv_descriptor_set ** out_set)706 descriptor_set_create(struct v3dv_device *device,
707 struct v3dv_descriptor_pool *pool,
708 const struct v3dv_descriptor_set_layout *layout,
709 struct v3dv_descriptor_set **out_set)
710 {
711 struct v3dv_descriptor_set *set;
712 uint32_t descriptor_count = layout->descriptor_count;
713 unsigned mem_size = sizeof(struct v3dv_descriptor_set) +
714 sizeof(struct v3dv_descriptor) * descriptor_count;
715
716 if (pool->host_memory_base) {
717 if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
718 return out_of_pool_memory(device, pool);
719
720 set = (struct v3dv_descriptor_set*)pool->host_memory_ptr;
721 pool->host_memory_ptr += mem_size;
722
723 vk_object_base_init(&device->vk, &set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET);
724 } else {
725 set = vk_object_zalloc(&device->vk, NULL, mem_size,
726 VK_OBJECT_TYPE_DESCRIPTOR_SET);
727
728 if (!set)
729 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
730 }
731
732 set->pool = pool;
733
734 set->layout = layout;
735
736 /* FIXME: VK_EXT_descriptor_indexing introduces
737 * VARIABLE_DESCRIPTOR_LAYOUT_COUNT. That would affect the layout_size used
738 * below for bo allocation
739 */
740
741 uint32_t offset = 0;
742 uint32_t index = pool->entry_count;
743
744 if (layout->bo_size) {
745 if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
746 vk_object_free(&device->vk, NULL, set);
747 return out_of_pool_memory(device, pool);
748 }
749
750 /* We first try to allocate linearly fist, so that we don't spend time
751 * looking for gaps if the app only allocates & resets via the pool.
752 *
753 * If that fails, we try to find a gap from previously freed subregions
754 * iterating through the descriptor pool entries. Note that we are not
755 * doing that if we have a pool->host_memory_base. We only have that if
756 * VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT is not set, so in
757 * that case the user can't free subregions, so it doesn't make sense to
758 * even try (or track those subregions).
759 */
760 if (pool->current_offset + layout->bo_size <= pool->bo->size) {
761 offset = pool->current_offset;
762 pool->current_offset += layout->bo_size;
763 } else if (!pool->host_memory_base) {
764 for (index = 0; index < pool->entry_count; index++) {
765 if (pool->entries[index].offset - offset >= layout->bo_size)
766 break;
767 offset = pool->entries[index].offset + pool->entries[index].size;
768 }
769 if (pool->bo->size - offset < layout->bo_size) {
770 vk_object_free(&device->vk, NULL, set);
771 return out_of_pool_memory(device, pool);
772 }
773 memmove(&pool->entries[index + 1], &pool->entries[index],
774 sizeof(pool->entries[0]) * (pool->entry_count - index));
775 } else {
776 assert(pool->host_memory_base);
777 return out_of_pool_memory(device, pool);
778 }
779
780 set->base_offset = offset;
781 }
782
783 if (!pool->host_memory_base) {
784 pool->entries[index].set = set;
785 pool->entries[index].offset = offset;
786 pool->entries[index].size = layout->bo_size;
787 pool->entry_count++;
788 }
789
790 /* Go through and fill out immutable samplers if we have any */
791 for (uint32_t b = 0; b < layout->binding_count; b++) {
792 if (layout->binding[b].immutable_samplers_offset == 0)
793 continue;
794
795 const struct v3dv_sampler *samplers =
796 (const struct v3dv_sampler *)((const char *)layout +
797 layout->binding[b].immutable_samplers_offset);
798
799 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
800 uint32_t combined_offset =
801 layout->binding[b].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ?
802 v3dv_X(device, combined_image_sampler_sampler_state_offset)() : 0;
803
804 void *desc_map = descriptor_bo_map(device, set, &layout->binding[b], i);
805 desc_map += combined_offset;
806
807 memcpy(desc_map,
808 samplers[i].sampler_state,
809 sizeof(samplers[i].sampler_state));
810 }
811 }
812
813 *out_set = set;
814
815 return VK_SUCCESS;
816 }
817
818 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)819 v3dv_AllocateDescriptorSets(VkDevice _device,
820 const VkDescriptorSetAllocateInfo *pAllocateInfo,
821 VkDescriptorSet *pDescriptorSets)
822 {
823 V3DV_FROM_HANDLE(v3dv_device, device, _device);
824 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
825
826 VkResult result = VK_SUCCESS;
827 struct v3dv_descriptor_set *set = NULL;
828 uint32_t i = 0;
829
830 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
831 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, layout,
832 pAllocateInfo->pSetLayouts[i]);
833
834 result = descriptor_set_create(device, pool, layout, &set);
835 if (result != VK_SUCCESS)
836 break;
837
838 pDescriptorSets[i] = v3dv_descriptor_set_to_handle(set);
839 }
840
841 if (result != VK_SUCCESS) {
842 v3dv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
843 i, pDescriptorSets);
844 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
845 pDescriptorSets[i] = VK_NULL_HANDLE;
846 }
847 }
848
849 return result;
850 }
851
852 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)853 v3dv_FreeDescriptorSets(VkDevice _device,
854 VkDescriptorPool descriptorPool,
855 uint32_t count,
856 const VkDescriptorSet *pDescriptorSets)
857 {
858 V3DV_FROM_HANDLE(v3dv_device, device, _device);
859 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
860
861 for (uint32_t i = 0; i < count; i++) {
862 V3DV_FROM_HANDLE(v3dv_descriptor_set, set, pDescriptorSets[i]);
863 if (set && !pool->host_memory_base)
864 descriptor_set_destroy(device, pool, set, true);
865 }
866
867 return VK_SUCCESS;
868 }
869
870 static void
descriptor_bo_copy(struct v3dv_device * device,struct v3dv_descriptor_set * dst_set,const struct v3dv_descriptor_set_binding_layout * dst_binding_layout,uint32_t dst_array_index,struct v3dv_descriptor_set * src_set,const struct v3dv_descriptor_set_binding_layout * src_binding_layout,uint32_t src_array_index)871 descriptor_bo_copy(struct v3dv_device *device,
872 struct v3dv_descriptor_set *dst_set,
873 const struct v3dv_descriptor_set_binding_layout *dst_binding_layout,
874 uint32_t dst_array_index,
875 struct v3dv_descriptor_set *src_set,
876 const struct v3dv_descriptor_set_binding_layout *src_binding_layout,
877 uint32_t src_array_index)
878 {
879 assert(dst_binding_layout->type == src_binding_layout->type);
880
881 void *dst_map = descriptor_bo_map(device, dst_set, dst_binding_layout, dst_array_index);
882 void *src_map = descriptor_bo_map(device, src_set, src_binding_layout, src_array_index);
883
884 memcpy(dst_map, src_map, v3dv_X(device, descriptor_bo_size)(src_binding_layout->type));
885 }
886
887 static void
write_buffer_descriptor(struct v3dv_descriptor * descriptor,VkDescriptorType desc_type,const VkDescriptorBufferInfo * buffer_info)888 write_buffer_descriptor(struct v3dv_descriptor *descriptor,
889 VkDescriptorType desc_type,
890 const VkDescriptorBufferInfo *buffer_info)
891 {
892 V3DV_FROM_HANDLE(v3dv_buffer, buffer, buffer_info->buffer);
893
894 descriptor->type = desc_type;
895 descriptor->buffer = buffer;
896 descriptor->offset = buffer_info->offset;
897 if (buffer_info->range == VK_WHOLE_SIZE) {
898 descriptor->range = buffer->size - buffer_info->offset;
899 } else {
900 assert(descriptor->range <= UINT32_MAX);
901 descriptor->range = buffer_info->range;
902 }
903 }
904
905 static void
write_image_descriptor(struct v3dv_device * device,struct v3dv_descriptor * descriptor,VkDescriptorType desc_type,struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,struct v3dv_image_view * iview,struct v3dv_sampler * sampler,uint32_t array_index)906 write_image_descriptor(struct v3dv_device *device,
907 struct v3dv_descriptor *descriptor,
908 VkDescriptorType desc_type,
909 struct v3dv_descriptor_set *set,
910 const struct v3dv_descriptor_set_binding_layout *binding_layout,
911 struct v3dv_image_view *iview,
912 struct v3dv_sampler *sampler,
913 uint32_t array_index)
914 {
915 descriptor->type = desc_type;
916 descriptor->sampler = sampler;
917 descriptor->image_view = iview;
918
919 void *desc_map = descriptor_bo_map(device, set,
920 binding_layout, array_index);
921
922 if (iview) {
923 const uint32_t tex_state_index =
924 iview->vk.view_type != VK_IMAGE_VIEW_TYPE_CUBE_ARRAY ||
925 desc_type != VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ? 0 : 1;
926 memcpy(desc_map,
927 iview->texture_shader_state[tex_state_index],
928 sizeof(iview->texture_shader_state[0]));
929 desc_map += v3dv_X(device, combined_image_sampler_sampler_state_offset)();
930 }
931
932 if (sampler && !binding_layout->immutable_samplers_offset) {
933 /* For immutable samplers this was already done as part of the
934 * descriptor set create, as that info can't change later
935 */
936 memcpy(desc_map,
937 sampler->sampler_state,
938 sizeof(sampler->sampler_state));
939 }
940 }
941
942
943 static void
write_buffer_view_descriptor(struct v3dv_device * device,struct v3dv_descriptor * descriptor,VkDescriptorType desc_type,struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,struct v3dv_buffer_view * bview,uint32_t array_index)944 write_buffer_view_descriptor(struct v3dv_device *device,
945 struct v3dv_descriptor *descriptor,
946 VkDescriptorType desc_type,
947 struct v3dv_descriptor_set *set,
948 const struct v3dv_descriptor_set_binding_layout *binding_layout,
949 struct v3dv_buffer_view *bview,
950 uint32_t array_index)
951 {
952 assert(bview);
953 descriptor->type = desc_type;
954 descriptor->buffer_view = bview;
955
956 void *desc_map = descriptor_bo_map(device, set, binding_layout, array_index);
957
958 memcpy(desc_map,
959 bview->texture_shader_state,
960 sizeof(bview->texture_shader_state));
961 }
962
963 VKAPI_ATTR void VKAPI_CALL
v3dv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)964 v3dv_UpdateDescriptorSets(VkDevice _device,
965 uint32_t descriptorWriteCount,
966 const VkWriteDescriptorSet *pDescriptorWrites,
967 uint32_t descriptorCopyCount,
968 const VkCopyDescriptorSet *pDescriptorCopies)
969 {
970 V3DV_FROM_HANDLE(v3dv_device, device, _device);
971 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
972 const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
973 V3DV_FROM_HANDLE(v3dv_descriptor_set, set, writeset->dstSet);
974
975 const struct v3dv_descriptor_set_binding_layout *binding_layout =
976 set->layout->binding + writeset->dstBinding;
977
978 struct v3dv_descriptor *descriptor = set->descriptors;
979
980 descriptor += binding_layout->descriptor_index;
981 descriptor += writeset->dstArrayElement;
982
983 for (uint32_t j = 0; j < writeset->descriptorCount; ++j) {
984 switch(writeset->descriptorType) {
985
986 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
987 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
988 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
989 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
990 const VkDescriptorBufferInfo *buffer_info = writeset->pBufferInfo + j;
991 write_buffer_descriptor(descriptor, writeset->descriptorType,
992 buffer_info);
993 break;
994 }
995 case VK_DESCRIPTOR_TYPE_SAMPLER: {
996 /* If we are here we shouldn't be modifying a immutable sampler,
997 * so we don't ensure that would work or not crash. But let the
998 * validation layers check that
999 */
1000 const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1001 V3DV_FROM_HANDLE(v3dv_sampler, sampler, image_info->sampler);
1002 write_image_descriptor(device, descriptor, writeset->descriptorType,
1003 set, binding_layout, NULL, sampler,
1004 writeset->dstArrayElement + j);
1005
1006 break;
1007 }
1008 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1009 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1010 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
1011 const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1012 V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1013 write_image_descriptor(device, descriptor, writeset->descriptorType,
1014 set, binding_layout, iview, NULL,
1015 writeset->dstArrayElement + j);
1016
1017 break;
1018 }
1019 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1020 const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1021 V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1022 V3DV_FROM_HANDLE(v3dv_sampler, sampler, image_info->sampler);
1023 write_image_descriptor(device, descriptor, writeset->descriptorType,
1024 set, binding_layout, iview, sampler,
1025 writeset->dstArrayElement + j);
1026
1027 break;
1028 }
1029 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1030 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
1031 V3DV_FROM_HANDLE(v3dv_buffer_view, buffer_view,
1032 writeset->pTexelBufferView[j]);
1033 write_buffer_view_descriptor(device, descriptor, writeset->descriptorType,
1034 set, binding_layout, buffer_view,
1035 writeset->dstArrayElement + j);
1036 break;
1037 }
1038 default:
1039 unreachable("unimplemented descriptor type");
1040 break;
1041 }
1042 descriptor++;
1043 }
1044 }
1045
1046 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1047 const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
1048 V3DV_FROM_HANDLE(v3dv_descriptor_set, src_set,
1049 copyset->srcSet);
1050 V3DV_FROM_HANDLE(v3dv_descriptor_set, dst_set,
1051 copyset->dstSet);
1052
1053 const struct v3dv_descriptor_set_binding_layout *src_binding_layout =
1054 src_set->layout->binding + copyset->srcBinding;
1055 const struct v3dv_descriptor_set_binding_layout *dst_binding_layout =
1056 dst_set->layout->binding + copyset->dstBinding;
1057
1058 assert(src_binding_layout->type == dst_binding_layout->type);
1059
1060 struct v3dv_descriptor *src_descriptor = src_set->descriptors;
1061 struct v3dv_descriptor *dst_descriptor = dst_set->descriptors;
1062
1063 src_descriptor += src_binding_layout->descriptor_index;
1064 src_descriptor += copyset->srcArrayElement;
1065
1066 dst_descriptor += dst_binding_layout->descriptor_index;
1067 dst_descriptor += copyset->dstArrayElement;
1068
1069 for (uint32_t j = 0; j < copyset->descriptorCount; j++) {
1070 *dst_descriptor = *src_descriptor;
1071 dst_descriptor++;
1072 src_descriptor++;
1073
1074 if (v3dv_X(device, descriptor_bo_size)(src_binding_layout->type) > 0) {
1075 descriptor_bo_copy(device,
1076 dst_set, dst_binding_layout,
1077 j + copyset->dstArrayElement,
1078 src_set, src_binding_layout,
1079 j + copyset->srcArrayElement);
1080 }
1081
1082 }
1083 }
1084 }
1085
1086 VKAPI_ATTR void VKAPI_CALL
v3dv_GetDescriptorSetLayoutSupport(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)1087 v3dv_GetDescriptorSetLayoutSupport(
1088 VkDevice _device,
1089 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1090 VkDescriptorSetLayoutSupport *pSupport)
1091 {
1092 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1093 VkDescriptorSetLayoutBinding *bindings = NULL;
1094 VkResult result = vk_create_sorted_bindings(
1095 pCreateInfo->pBindings, pCreateInfo->bindingCount, &bindings);
1096 if (result != VK_SUCCESS) {
1097 pSupport->supported = false;
1098 return;
1099 }
1100
1101 bool supported = true;
1102
1103 uint32_t desc_host_size = sizeof(struct v3dv_descriptor);
1104 uint32_t host_size = sizeof(struct v3dv_descriptor_set);
1105 uint32_t bo_size = 0;
1106 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
1107 const VkDescriptorSetLayoutBinding *binding = bindings + i;
1108
1109 if ((UINT32_MAX - host_size) / desc_host_size < binding->descriptorCount) {
1110 supported = false;
1111 break;
1112 }
1113
1114 uint32_t desc_bo_size = v3dv_X(device, descriptor_bo_size)(binding->descriptorType);
1115 if (desc_bo_size > 0 &&
1116 (UINT32_MAX - bo_size) / desc_bo_size < binding->descriptorCount) {
1117 supported = false;
1118 break;
1119 }
1120
1121 host_size += binding->descriptorCount * desc_host_size;
1122 bo_size += binding->descriptorCount * desc_bo_size;
1123 }
1124
1125 free(bindings);
1126
1127 pSupport->supported = supported;
1128 }
1129
1130 VkResult
v3dv_CreateDescriptorUpdateTemplate(VkDevice _device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)1131 v3dv_CreateDescriptorUpdateTemplate(
1132 VkDevice _device,
1133 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
1134 const VkAllocationCallbacks *pAllocator,
1135 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
1136 {
1137 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1138 struct v3dv_descriptor_update_template *template;
1139
1140 size_t size = sizeof(*template) +
1141 pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
1142 template = vk_object_alloc(&device->vk, pAllocator, size,
1143 VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
1144 if (template == NULL)
1145 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1146
1147 template->bind_point = pCreateInfo->pipelineBindPoint;
1148
1149 assert(pCreateInfo->templateType ==
1150 VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET);
1151 template->set = pCreateInfo->set;
1152
1153 template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
1154 for (uint32_t i = 0; i < template->entry_count; i++) {
1155 const VkDescriptorUpdateTemplateEntry *pEntry =
1156 &pCreateInfo->pDescriptorUpdateEntries[i];
1157
1158 template->entries[i] = (struct v3dv_descriptor_template_entry) {
1159 .type = pEntry->descriptorType,
1160 .binding = pEntry->dstBinding,
1161 .array_element = pEntry->dstArrayElement,
1162 .array_count = pEntry->descriptorCount,
1163 .offset = pEntry->offset,
1164 .stride = pEntry->stride,
1165 };
1166 }
1167
1168 *pDescriptorUpdateTemplate =
1169 v3dv_descriptor_update_template_to_handle(template);
1170
1171 return VK_SUCCESS;
1172 }
1173
1174 void
v3dv_DestroyDescriptorUpdateTemplate(VkDevice _device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)1175 v3dv_DestroyDescriptorUpdateTemplate(
1176 VkDevice _device,
1177 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1178 const VkAllocationCallbacks *pAllocator)
1179 {
1180 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1181 V3DV_FROM_HANDLE(v3dv_descriptor_update_template, template,
1182 descriptorUpdateTemplate);
1183
1184 if (!template)
1185 return;
1186
1187 vk_object_free(&device->vk, pAllocator, template);
1188 }
1189
1190 void
v3dv_UpdateDescriptorSetWithTemplate(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)1191 v3dv_UpdateDescriptorSetWithTemplate(
1192 VkDevice _device,
1193 VkDescriptorSet descriptorSet,
1194 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1195 const void *pData)
1196 {
1197 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1198 V3DV_FROM_HANDLE(v3dv_descriptor_set, set, descriptorSet);
1199 V3DV_FROM_HANDLE(v3dv_descriptor_update_template, template,
1200 descriptorUpdateTemplate);
1201
1202 for (int i = 0; i < template->entry_count; i++) {
1203 const struct v3dv_descriptor_template_entry *entry =
1204 &template->entries[i];
1205
1206 const struct v3dv_descriptor_set_binding_layout *binding_layout =
1207 set->layout->binding + entry->binding;
1208
1209 struct v3dv_descriptor *descriptor =
1210 set->descriptors +
1211 binding_layout->descriptor_index +
1212 entry->array_element;
1213
1214 switch (entry->type) {
1215 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1216 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1217 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1218 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1219 for (uint32_t j = 0; j < entry->array_count; j++) {
1220 const VkDescriptorBufferInfo *info =
1221 pData + entry->offset + j * entry->stride;
1222 write_buffer_descriptor(descriptor + j, entry->type, info);
1223 }
1224 break;
1225
1226 case VK_DESCRIPTOR_TYPE_SAMPLER:
1227 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1228 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1229 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1230 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1231 for (uint32_t j = 0; j < entry->array_count; j++) {
1232 const VkDescriptorImageInfo *info =
1233 pData + entry->offset + j * entry->stride;
1234 V3DV_FROM_HANDLE(v3dv_image_view, iview, info->imageView);
1235 V3DV_FROM_HANDLE(v3dv_sampler, sampler, info->sampler);
1236 write_image_descriptor(device, descriptor + j, entry->type,
1237 set, binding_layout, iview, sampler,
1238 entry->array_element + j);
1239 }
1240 break;
1241
1242 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1243 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1244 for (uint32_t j = 0; j < entry->array_count; j++) {
1245 const VkBufferView *_bview =
1246 pData + entry->offset + j * entry->stride;
1247 V3DV_FROM_HANDLE(v3dv_buffer_view, bview, *_bview);
1248 write_buffer_view_descriptor(device, descriptor + j, entry->type,
1249 set, binding_layout, bview,
1250 entry->array_element + j);
1251 }
1252 break;
1253
1254 default:
1255 unreachable("Unsupported descriptor type");
1256 }
1257 }
1258 }
1259
1260 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateSamplerYcbcrConversion(VkDevice _device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)1261 v3dv_CreateSamplerYcbcrConversion(
1262 VkDevice _device,
1263 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
1264 const VkAllocationCallbacks *pAllocator,
1265 VkSamplerYcbcrConversion *pYcbcrConversion)
1266 {
1267 unreachable("Ycbcr sampler conversion is not supported");
1268 return VK_SUCCESS;
1269 }
1270
1271 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroySamplerYcbcrConversion(VkDevice _device,VkSamplerYcbcrConversion YcbcrConversion,const VkAllocationCallbacks * pAllocator)1272 v3dv_DestroySamplerYcbcrConversion(
1273 VkDevice _device,
1274 VkSamplerYcbcrConversion YcbcrConversion,
1275 const VkAllocationCallbacks *pAllocator)
1276 {
1277 unreachable("Ycbcr sampler conversion is not supported");
1278 }
1279