1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "lvp_private.h"
25 #include "vk_descriptors.h"
26 #include "vk_util.h"
27 #include "u_math.h"
28
lvp_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)29 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorSetLayout(
30 VkDevice _device,
31 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
32 const VkAllocationCallbacks* pAllocator,
33 VkDescriptorSetLayout* pSetLayout)
34 {
35 LVP_FROM_HANDLE(lvp_device, device, _device);
36 struct lvp_descriptor_set_layout *set_layout;
37
38 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
39 uint32_t num_bindings = 0;
40 uint32_t immutable_sampler_count = 0;
41 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
42 num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
43 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
44 *
45 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
46 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
47 * pImmutableSamplers can be used to initialize a set of immutable
48 * samplers. [...] If descriptorType is not one of these descriptor
49 * types, then pImmutableSamplers is ignored.
50 *
51 * We need to be careful here and only parse pImmutableSamplers if we
52 * have one of the right descriptor types.
53 */
54 VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
55 if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
56 desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
57 pCreateInfo->pBindings[j].pImmutableSamplers)
58 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
59 }
60
61 size_t size = sizeof(struct lvp_descriptor_set_layout) +
62 num_bindings * sizeof(set_layout->binding[0]) +
63 immutable_sampler_count * sizeof(struct lvp_sampler *);
64
65 set_layout = vk_descriptor_set_layout_zalloc(&device->vk, size);
66 if (!set_layout)
67 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
68
69 set_layout->immutable_sampler_count = immutable_sampler_count;
70 /* We just allocate all the samplers at the end of the struct */
71 struct lvp_sampler **samplers =
72 (struct lvp_sampler **)&set_layout->binding[num_bindings];
73
74 set_layout->binding_count = num_bindings;
75 set_layout->shader_stages = 0;
76 set_layout->size = 0;
77
78 VkDescriptorSetLayoutBinding *bindings = NULL;
79 VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
80 pCreateInfo->bindingCount,
81 &bindings);
82 if (result != VK_SUCCESS) {
83 vk_descriptor_set_layout_unref(&device->vk, &set_layout->vk);
84 return vk_error(device, result);
85 }
86
87 uint32_t dynamic_offset_count = 0;
88 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
89 const VkDescriptorSetLayoutBinding *binding = bindings + j;
90 uint32_t b = binding->binding;
91
92 set_layout->binding[b].array_size = binding->descriptorCount;
93 set_layout->binding[b].descriptor_index = set_layout->size;
94 set_layout->binding[b].type = binding->descriptorType;
95 set_layout->binding[b].valid = true;
96 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
97 set_layout->size++;
98 else
99 set_layout->size += binding->descriptorCount;
100
101 for (gl_shader_stage stage = MESA_SHADER_VERTEX; stage < MESA_SHADER_STAGES; stage++) {
102 set_layout->binding[b].stage[stage].const_buffer_index = -1;
103 set_layout->binding[b].stage[stage].shader_buffer_index = -1;
104 set_layout->binding[b].stage[stage].sampler_index = -1;
105 set_layout->binding[b].stage[stage].sampler_view_index = -1;
106 set_layout->binding[b].stage[stage].image_index = -1;
107 set_layout->binding[b].stage[stage].uniform_block_index = -1;
108 }
109
110 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
111 binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
112 set_layout->binding[b].dynamic_index = dynamic_offset_count;
113 dynamic_offset_count += binding->descriptorCount;
114 }
115 switch (binding->descriptorType) {
116 case VK_DESCRIPTOR_TYPE_SAMPLER:
117 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
118 lvp_foreach_stage(s, binding->stageFlags) {
119 set_layout->binding[b].stage[s].sampler_index = set_layout->stage[s].sampler_count;
120 set_layout->stage[s].sampler_count += binding->descriptorCount;
121 }
122 if (binding->pImmutableSamplers) {
123 set_layout->binding[b].immutable_samplers = samplers;
124 samplers += binding->descriptorCount;
125
126 for (uint32_t i = 0; i < binding->descriptorCount; i++)
127 set_layout->binding[b].immutable_samplers[i] =
128 lvp_sampler_from_handle(binding->pImmutableSamplers[i]);
129 }
130 break;
131 default:
132 break;
133 }
134
135 switch (binding->descriptorType) {
136 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
137 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
138 lvp_foreach_stage(s, binding->stageFlags) {
139 set_layout->binding[b].stage[s].const_buffer_index = set_layout->stage[s].const_buffer_count;
140 set_layout->stage[s].const_buffer_count += binding->descriptorCount;
141 }
142 break;
143 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
144 lvp_foreach_stage(s, binding->stageFlags) {
145 set_layout->binding[b].stage[s].uniform_block_offset = set_layout->stage[s].uniform_block_size;
146 set_layout->binding[b].stage[s].uniform_block_index = set_layout->stage[s].uniform_block_count;
147 set_layout->stage[s].uniform_block_size += binding->descriptorCount;
148 set_layout->stage[s].uniform_block_sizes[set_layout->stage[s].uniform_block_count++] = binding->descriptorCount;
149 }
150 break;
151 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
152 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
153 lvp_foreach_stage(s, binding->stageFlags) {
154 set_layout->binding[b].stage[s].shader_buffer_index = set_layout->stage[s].shader_buffer_count;
155 set_layout->stage[s].shader_buffer_count += binding->descriptorCount;
156 }
157 break;
158
159 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
160 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
161 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
162 lvp_foreach_stage(s, binding->stageFlags) {
163 set_layout->binding[b].stage[s].image_index = set_layout->stage[s].image_count;
164 set_layout->stage[s].image_count += binding->descriptorCount;
165 }
166 break;
167 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
168 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
169 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
170 lvp_foreach_stage(s, binding->stageFlags) {
171 set_layout->binding[b].stage[s].sampler_view_index = set_layout->stage[s].sampler_view_count;
172 set_layout->stage[s].sampler_view_count += binding->descriptorCount;
173 }
174 break;
175 default:
176 break;
177 }
178
179 set_layout->shader_stages |= binding->stageFlags;
180 }
181
182 #ifndef NDEBUG
183 /* this otherwise crashes later and is annoying to track down */
184 unsigned array[] = {
185 VK_SHADER_STAGE_VERTEX_BIT,
186 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
187 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
188 VK_SHADER_STAGE_GEOMETRY_BIT,
189 VK_SHADER_STAGE_FRAGMENT_BIT,
190 VK_SHADER_STAGE_COMPUTE_BIT,
191 };
192 for (unsigned i = 0; i <= MESA_SHADER_COMPUTE; i++) {
193 uint16_t const_buffer_count = 0;
194 uint16_t shader_buffer_count = 0;
195 uint16_t sampler_count = 0;
196 uint16_t sampler_view_count = 0;
197 uint16_t image_count = 0;
198 if (set_layout->shader_stages & array[i]) {
199 const_buffer_count += set_layout->stage[i].const_buffer_count;
200 shader_buffer_count += set_layout->stage[i].shader_buffer_count;
201 sampler_count += set_layout->stage[i].sampler_count;
202 sampler_view_count += set_layout->stage[i].sampler_view_count;
203 image_count += set_layout->stage[i].image_count;
204 }
205 assert(const_buffer_count <= device->physical_device->device_limits.maxPerStageDescriptorUniformBuffers);
206 assert(shader_buffer_count <= device->physical_device->device_limits.maxPerStageDescriptorStorageBuffers);
207 assert(sampler_count <= device->physical_device->device_limits.maxPerStageDescriptorSamplers);
208 assert(sampler_view_count <= device->physical_device->device_limits.maxPerStageDescriptorSampledImages);
209 assert(image_count <= device->physical_device->device_limits.maxPerStageDescriptorStorageImages);
210 }
211 #endif
212
213 free(bindings);
214
215 set_layout->dynamic_offset_count = dynamic_offset_count;
216
217 *pSetLayout = lvp_descriptor_set_layout_to_handle(set_layout);
218
219 return VK_SUCCESS;
220 }
221
lvp_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)222 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreatePipelineLayout(
223 VkDevice _device,
224 const VkPipelineLayoutCreateInfo* pCreateInfo,
225 const VkAllocationCallbacks* pAllocator,
226 VkPipelineLayout* pPipelineLayout)
227 {
228 LVP_FROM_HANDLE(lvp_device, device, _device);
229 struct lvp_pipeline_layout *layout;
230
231 layout = vk_pipeline_layout_zalloc(&device->vk, sizeof(*layout),
232 pCreateInfo);
233
234 for (uint32_t set = 0; set < layout->vk.set_count; set++) {
235 if (layout->vk.set_layouts[set] == NULL)
236 continue;
237
238 const struct lvp_descriptor_set_layout *set_layout =
239 vk_to_lvp_descriptor_set_layout(layout->vk.set_layouts[set]);
240
241 for (unsigned i = 0; i < MESA_SHADER_STAGES; i++) {
242 layout->stage[i].uniform_block_size += set_layout->stage[i].uniform_block_size;
243 for (unsigned j = 0; j < set_layout->stage[i].uniform_block_count; j++) {
244 assert(layout->stage[i].uniform_block_count + j < MAX_PER_STAGE_DESCRIPTOR_UNIFORM_BLOCKS * MAX_SETS);
245 layout->stage[i].uniform_block_sizes[layout->stage[i].uniform_block_count + j] = set_layout->stage[i].uniform_block_sizes[j];
246 }
247 layout->stage[i].uniform_block_count += set_layout->stage[i].uniform_block_count;
248 }
249 }
250
251 #ifndef NDEBUG
252 /* this otherwise crashes later and is annoying to track down */
253 unsigned array[] = {
254 VK_SHADER_STAGE_VERTEX_BIT,
255 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
256 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
257 VK_SHADER_STAGE_GEOMETRY_BIT,
258 VK_SHADER_STAGE_FRAGMENT_BIT,
259 VK_SHADER_STAGE_COMPUTE_BIT,
260 };
261 for (unsigned i = 0; i <= MESA_SHADER_COMPUTE; i++) {
262 uint16_t const_buffer_count = 0;
263 uint16_t shader_buffer_count = 0;
264 uint16_t sampler_count = 0;
265 uint16_t sampler_view_count = 0;
266 uint16_t image_count = 0;
267 for (unsigned j = 0; j < layout->vk.set_count; j++) {
268 if (layout->vk.set_layouts[j] == NULL)
269 continue;
270
271 const struct lvp_descriptor_set_layout *set_layout =
272 vk_to_lvp_descriptor_set_layout(layout->vk.set_layouts[j]);
273
274 if (set_layout->shader_stages & array[i]) {
275 const_buffer_count += set_layout->stage[i].const_buffer_count;
276 shader_buffer_count += set_layout->stage[i].shader_buffer_count;
277 sampler_count += set_layout->stage[i].sampler_count;
278 sampler_view_count += set_layout->stage[i].sampler_view_count;
279 image_count += set_layout->stage[i].image_count;
280 }
281 }
282 assert(const_buffer_count <= device->physical_device->device_limits.maxPerStageDescriptorUniformBuffers);
283 assert(shader_buffer_count <= device->physical_device->device_limits.maxPerStageDescriptorStorageBuffers);
284 assert(sampler_count <= device->physical_device->device_limits.maxPerStageDescriptorSamplers);
285 assert(sampler_view_count <= device->physical_device->device_limits.maxPerStageDescriptorSampledImages);
286 assert(image_count <= device->physical_device->device_limits.maxPerStageDescriptorStorageImages);
287 }
288 #endif
289
290 layout->push_constant_size = 0;
291 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
292 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
293 layout->push_constant_size = MAX2(layout->push_constant_size,
294 range->offset + range->size);
295 layout->push_constant_stages |= (range->stageFlags & BITFIELD_MASK(MESA_SHADER_STAGES));
296 }
297 layout->push_constant_size = align(layout->push_constant_size, 16);
298 *pPipelineLayout = lvp_pipeline_layout_to_handle(layout);
299
300 return VK_SUCCESS;
301 }
302
303 VkResult
lvp_descriptor_set_create(struct lvp_device * device,struct lvp_descriptor_set_layout * layout,struct lvp_descriptor_set ** out_set)304 lvp_descriptor_set_create(struct lvp_device *device,
305 struct lvp_descriptor_set_layout *layout,
306 struct lvp_descriptor_set **out_set)
307 {
308 struct lvp_descriptor_set *set;
309 size_t base_size = sizeof(*set) + layout->size * sizeof(set->descriptors[0]);
310 size_t size = base_size;
311 for (unsigned i = 0; i < MESA_SHADER_STAGES; i++)
312 size += layout->stage[i].uniform_block_size;
313 set = vk_alloc(&device->vk.alloc /* XXX: Use the pool */, size, 8,
314 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
315 if (!set)
316 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
317
318 /* A descriptor set may not be 100% filled. Clear the set so we can can
319 * later detect holes in it.
320 */
321 memset(set, 0, size);
322
323 vk_object_base_init(&device->vk, &set->base,
324 VK_OBJECT_TYPE_DESCRIPTOR_SET);
325 set->layout = layout;
326 vk_descriptor_set_layout_ref(&layout->vk);
327
328 /* Go through and fill out immutable samplers if we have any */
329 struct lvp_descriptor *desc = set->descriptors;
330 uint8_t *uniform_mem = (uint8_t*)(set) + base_size;
331 for (uint32_t b = 0; b < layout->binding_count; b++) {
332 if (layout->binding[b].type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
333 desc->info.uniform = uniform_mem;
334 uniform_mem += layout->binding[b].array_size;
335 desc++;
336 } else {
337 if (layout->binding[b].immutable_samplers) {
338 for (uint32_t i = 0; i < layout->binding[b].array_size; i++)
339 desc[i].info.sampler = layout->binding[b].immutable_samplers[i];
340 }
341 desc += layout->binding[b].array_size;
342 }
343 }
344
345 *out_set = set;
346
347 return VK_SUCCESS;
348 }
349
350 void
lvp_descriptor_set_destroy(struct lvp_device * device,struct lvp_descriptor_set * set)351 lvp_descriptor_set_destroy(struct lvp_device *device,
352 struct lvp_descriptor_set *set)
353 {
354 vk_descriptor_set_layout_unref(&device->vk, &set->layout->vk);
355 vk_object_base_finish(&set->base);
356 vk_free(&device->vk.alloc, set);
357 }
358
lvp_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)359 VKAPI_ATTR VkResult VKAPI_CALL lvp_AllocateDescriptorSets(
360 VkDevice _device,
361 const VkDescriptorSetAllocateInfo* pAllocateInfo,
362 VkDescriptorSet* pDescriptorSets)
363 {
364 LVP_FROM_HANDLE(lvp_device, device, _device);
365 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, pAllocateInfo->descriptorPool);
366 VkResult result = VK_SUCCESS;
367 struct lvp_descriptor_set *set;
368 uint32_t i;
369
370 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
371 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout,
372 pAllocateInfo->pSetLayouts[i]);
373
374 result = lvp_descriptor_set_create(device, layout, &set);
375 if (result != VK_SUCCESS)
376 break;
377
378 list_addtail(&set->link, &pool->sets);
379 pDescriptorSets[i] = lvp_descriptor_set_to_handle(set);
380 }
381
382 if (result != VK_SUCCESS)
383 lvp_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
384 i, pDescriptorSets);
385
386 return result;
387 }
388
lvp_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)389 VKAPI_ATTR VkResult VKAPI_CALL lvp_FreeDescriptorSets(
390 VkDevice _device,
391 VkDescriptorPool descriptorPool,
392 uint32_t count,
393 const VkDescriptorSet* pDescriptorSets)
394 {
395 LVP_FROM_HANDLE(lvp_device, device, _device);
396 for (uint32_t i = 0; i < count; i++) {
397 LVP_FROM_HANDLE(lvp_descriptor_set, set, pDescriptorSets[i]);
398
399 if (!set)
400 continue;
401 list_del(&set->link);
402 lvp_descriptor_set_destroy(device, set);
403 }
404 return VK_SUCCESS;
405 }
406
lvp_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)407 VKAPI_ATTR void VKAPI_CALL lvp_UpdateDescriptorSets(
408 VkDevice _device,
409 uint32_t descriptorWriteCount,
410 const VkWriteDescriptorSet* pDescriptorWrites,
411 uint32_t descriptorCopyCount,
412 const VkCopyDescriptorSet* pDescriptorCopies)
413 {
414 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
415 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
416 LVP_FROM_HANDLE(lvp_descriptor_set, set, write->dstSet);
417 const struct lvp_descriptor_set_binding_layout *bind_layout =
418 &set->layout->binding[write->dstBinding];
419 struct lvp_descriptor *desc =
420 &set->descriptors[bind_layout->descriptor_index];
421 if (write->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
422 const VkWriteDescriptorSetInlineUniformBlock *uniform_data =
423 vk_find_struct_const(write->pNext, WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
424 assert(uniform_data);
425 desc->type = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK;
426 memcpy(desc->info.uniform + write->dstArrayElement, uniform_data->pData, uniform_data->dataSize);
427 continue;
428 }
429 desc += write->dstArrayElement;
430
431 switch (write->descriptorType) {
432 case VK_DESCRIPTOR_TYPE_SAMPLER:
433 for (uint32_t j = 0; j < write->descriptorCount; j++) {
434 LVP_FROM_HANDLE(lvp_sampler, sampler,
435 write->pImageInfo[j].sampler);
436
437 desc[j] = (struct lvp_descriptor) {
438 .type = VK_DESCRIPTOR_TYPE_SAMPLER,
439 .info.sampler = sampler,
440 };
441 }
442 break;
443
444 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
445 for (uint32_t j = 0; j < write->descriptorCount; j++) {
446 LVP_FROM_HANDLE(lvp_image_view, iview,
447 write->pImageInfo[j].imageView);
448 desc[j].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
449 desc[j].info.iview = iview;
450 /*
451 * All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those
452 * with a descriptorCount of zero, must all either use immutable samplers or must all not
453 * use immutable samplers
454 */
455 if (bind_layout->immutable_samplers) {
456 desc[j].info.sampler = bind_layout->immutable_samplers[j];
457 } else {
458 LVP_FROM_HANDLE(lvp_sampler, sampler,
459 write->pImageInfo[j].sampler);
460
461 desc[j].info.sampler = sampler;
462 }
463 }
464 break;
465
466 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
467 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
468 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
469 for (uint32_t j = 0; j < write->descriptorCount; j++) {
470 LVP_FROM_HANDLE(lvp_image_view, iview,
471 write->pImageInfo[j].imageView);
472
473 desc[j] = (struct lvp_descriptor) {
474 .type = write->descriptorType,
475 .info.iview = iview,
476 };
477 }
478 break;
479
480 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
481 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
482 for (uint32_t j = 0; j < write->descriptorCount; j++) {
483 LVP_FROM_HANDLE(lvp_buffer_view, bview,
484 write->pTexelBufferView[j]);
485
486 desc[j] = (struct lvp_descriptor) {
487 .type = write->descriptorType,
488 .info.buffer_view = bview,
489 };
490 }
491 break;
492
493 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
494 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
495 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
496 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
497 for (uint32_t j = 0; j < write->descriptorCount; j++) {
498 LVP_FROM_HANDLE(lvp_buffer, buffer, write->pBufferInfo[j].buffer);
499 desc[j] = (struct lvp_descriptor) {
500 .type = write->descriptorType,
501 .info.offset = write->pBufferInfo[j].offset,
502 .info.buffer = buffer,
503 .info.range = write->pBufferInfo[j].range,
504 };
505
506 }
507 break;
508
509 default:
510 break;
511 }
512 }
513
514 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
515 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
516 LVP_FROM_HANDLE(lvp_descriptor_set, src, copy->srcSet);
517 LVP_FROM_HANDLE(lvp_descriptor_set, dst, copy->dstSet);
518
519 const struct lvp_descriptor_set_binding_layout *src_layout =
520 &src->layout->binding[copy->srcBinding];
521 struct lvp_descriptor *src_desc =
522 &src->descriptors[src_layout->descriptor_index];
523
524 const struct lvp_descriptor_set_binding_layout *dst_layout =
525 &dst->layout->binding[copy->dstBinding];
526 struct lvp_descriptor *dst_desc =
527 &dst->descriptors[dst_layout->descriptor_index];
528
529 if (src_desc->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
530 dst_desc->type = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK;
531 memcpy(dst_desc->info.uniform + copy->dstArrayElement,
532 src_desc->info.uniform + copy->srcArrayElement,
533 copy->descriptorCount);
534 } else {
535 src_desc += copy->srcArrayElement;
536 dst_desc += copy->dstArrayElement;
537
538 for (uint32_t j = 0; j < copy->descriptorCount; j++)
539 dst_desc[j] = src_desc[j];
540 }
541 }
542 }
543
lvp_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)544 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorPool(
545 VkDevice _device,
546 const VkDescriptorPoolCreateInfo* pCreateInfo,
547 const VkAllocationCallbacks* pAllocator,
548 VkDescriptorPool* pDescriptorPool)
549 {
550 LVP_FROM_HANDLE(lvp_device, device, _device);
551 struct lvp_descriptor_pool *pool;
552 size_t size = sizeof(struct lvp_descriptor_pool);
553 pool = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8,
554 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
555 if (!pool)
556 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
557
558 vk_object_base_init(&device->vk, &pool->base,
559 VK_OBJECT_TYPE_DESCRIPTOR_POOL);
560 pool->flags = pCreateInfo->flags;
561 list_inithead(&pool->sets);
562 *pDescriptorPool = lvp_descriptor_pool_to_handle(pool);
563 return VK_SUCCESS;
564 }
565
lvp_reset_descriptor_pool(struct lvp_device * device,struct lvp_descriptor_pool * pool)566 static void lvp_reset_descriptor_pool(struct lvp_device *device,
567 struct lvp_descriptor_pool *pool)
568 {
569 struct lvp_descriptor_set *set, *tmp;
570 LIST_FOR_EACH_ENTRY_SAFE(set, tmp, &pool->sets, link) {
571 vk_descriptor_set_layout_unref(&device->vk, &set->layout->vk);
572 list_del(&set->link);
573 vk_free(&device->vk.alloc, set);
574 }
575 }
576
lvp_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)577 VKAPI_ATTR void VKAPI_CALL lvp_DestroyDescriptorPool(
578 VkDevice _device,
579 VkDescriptorPool _pool,
580 const VkAllocationCallbacks* pAllocator)
581 {
582 LVP_FROM_HANDLE(lvp_device, device, _device);
583 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
584
585 if (!_pool)
586 return;
587
588 lvp_reset_descriptor_pool(device, pool);
589 vk_object_base_finish(&pool->base);
590 vk_free2(&device->vk.alloc, pAllocator, pool);
591 }
592
lvp_ResetDescriptorPool(VkDevice _device,VkDescriptorPool _pool,VkDescriptorPoolResetFlags flags)593 VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetDescriptorPool(
594 VkDevice _device,
595 VkDescriptorPool _pool,
596 VkDescriptorPoolResetFlags flags)
597 {
598 LVP_FROM_HANDLE(lvp_device, device, _device);
599 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
600
601 lvp_reset_descriptor_pool(device, pool);
602 return VK_SUCCESS;
603 }
604
lvp_GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)605 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutSupport(VkDevice device,
606 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
607 VkDescriptorSetLayoutSupport* pSupport)
608 {
609 pSupport->supported = true;
610 }
611
lvp_CreateDescriptorUpdateTemplate(VkDevice _device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)612 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorUpdateTemplate(VkDevice _device,
613 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
614 const VkAllocationCallbacks *pAllocator,
615 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
616 {
617 LVP_FROM_HANDLE(lvp_device, device, _device);
618 const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
619 const size_t size = sizeof(struct lvp_descriptor_update_template) +
620 sizeof(VkDescriptorUpdateTemplateEntry) * entry_count;
621
622 struct lvp_descriptor_update_template *templ;
623
624 templ = vk_alloc(&device->vk.alloc, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
625 if (!templ)
626 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
627
628 vk_object_base_init(&device->vk, &templ->base,
629 VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
630
631 templ->ref_cnt = 1;
632 templ->type = pCreateInfo->templateType;
633 templ->bind_point = pCreateInfo->pipelineBindPoint;
634 templ->set = pCreateInfo->set;
635 /* This parameter is ignored if templateType is not VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR */
636 if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR)
637 templ->pipeline_layout = lvp_pipeline_layout_from_handle(pCreateInfo->pipelineLayout);
638 else
639 templ->pipeline_layout = NULL;
640 templ->entry_count = entry_count;
641
642 VkDescriptorUpdateTemplateEntry *entries = (VkDescriptorUpdateTemplateEntry *)(templ + 1);
643 for (unsigned i = 0; i < entry_count; i++) {
644 entries[i] = pCreateInfo->pDescriptorUpdateEntries[i];
645 }
646
647 *pDescriptorUpdateTemplate = lvp_descriptor_update_template_to_handle(templ);
648 return VK_SUCCESS;
649 }
650
651 void
lvp_descriptor_template_destroy(struct lvp_device * device,struct lvp_descriptor_update_template * templ)652 lvp_descriptor_template_destroy(struct lvp_device *device, struct lvp_descriptor_update_template *templ)
653 {
654 if (!templ)
655 return;
656
657 vk_object_base_finish(&templ->base);
658 vk_free(&device->vk.alloc, templ);
659 }
660
lvp_DestroyDescriptorUpdateTemplate(VkDevice _device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)661 VKAPI_ATTR void VKAPI_CALL lvp_DestroyDescriptorUpdateTemplate(VkDevice _device,
662 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
663 const VkAllocationCallbacks *pAllocator)
664 {
665 LVP_FROM_HANDLE(lvp_device, device, _device);
666 LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate);
667 lvp_descriptor_template_templ_unref(device, templ);
668 }
669
lvp_UpdateDescriptorSetWithTemplate(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)670 VKAPI_ATTR void VKAPI_CALL lvp_UpdateDescriptorSetWithTemplate(VkDevice _device,
671 VkDescriptorSet descriptorSet,
672 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
673 const void *pData)
674 {
675 LVP_FROM_HANDLE(lvp_descriptor_set, set, descriptorSet);
676 LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate);
677 uint32_t i, j;
678
679 for (i = 0; i < templ->entry_count; ++i) {
680 VkDescriptorUpdateTemplateEntry *entry = &templ->entry[i];
681 const uint8_t *pSrc = ((const uint8_t *) pData) + entry->offset;
682 const struct lvp_descriptor_set_binding_layout *bind_layout =
683 &set->layout->binding[entry->dstBinding];
684 struct lvp_descriptor *desc =
685 &set->descriptors[bind_layout->descriptor_index];
686 if (entry->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
687 desc->type = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK;
688 memcpy(desc->info.uniform + entry->dstArrayElement, pSrc, entry->descriptorCount);
689 continue;
690 }
691 for (j = 0; j < entry->descriptorCount; ++j) {
692 unsigned idx = j + entry->dstArrayElement;
693 switch (entry->descriptorType) {
694 case VK_DESCRIPTOR_TYPE_SAMPLER: {
695 LVP_FROM_HANDLE(lvp_sampler, sampler,
696 *(VkSampler *)pSrc);
697 desc[idx] = (struct lvp_descriptor) {
698 .type = VK_DESCRIPTOR_TYPE_SAMPLER,
699 .info.sampler = sampler,
700 };
701 break;
702 }
703 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
704 VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
705 desc[idx] = (struct lvp_descriptor) {
706 .type = entry->descriptorType,
707 .info.iview = lvp_image_view_from_handle(info->imageView),
708 .info.sampler = lvp_sampler_from_handle(info->sampler),
709 };
710 break;
711 }
712 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
713 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
714 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
715 LVP_FROM_HANDLE(lvp_image_view, iview,
716 ((VkDescriptorImageInfo *)pSrc)->imageView);
717 desc[idx] = (struct lvp_descriptor) {
718 .type = entry->descriptorType,
719 .info.iview = iview,
720 };
721 break;
722 }
723 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
724 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
725 LVP_FROM_HANDLE(lvp_buffer_view, bview,
726 *(VkBufferView *)pSrc);
727 desc[idx] = (struct lvp_descriptor) {
728 .type = entry->descriptorType,
729 .info.buffer_view = bview,
730 };
731 break;
732 }
733
734 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
735 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
736 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
737 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
738 VkDescriptorBufferInfo *info = (VkDescriptorBufferInfo *)pSrc;
739 desc[idx] = (struct lvp_descriptor) {
740 .type = entry->descriptorType,
741 .info.offset = info->offset,
742 .info.buffer = lvp_buffer_from_handle(info->buffer),
743 .info.range = info->range,
744 };
745 break;
746 }
747 default:
748 break;
749 }
750 pSrc += entry->stride;
751 }
752 }
753 }
754