1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "lvp_private.h"
25 #include "vk_acceleration_structure.h"
26 #include "vk_descriptor_update_template.h"
27 #include "vk_descriptors.h"
28 #include "vk_util.h"
29 #include "util/u_math.h"
30 #include "util/u_inlines.h"
31 #include "lp_texture.h"
32
33 static bool
binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding * binding)34 binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding *binding)
35 {
36 switch (binding->descriptorType) {
37 case VK_DESCRIPTOR_TYPE_SAMPLER:
38 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
39 return binding->pImmutableSamplers != NULL;
40
41 default:
42 return false;
43 }
44 }
45
46 static void
lvp_descriptor_set_layout_destroy(struct vk_device * _device,struct vk_descriptor_set_layout * _layout)47 lvp_descriptor_set_layout_destroy(struct vk_device *_device, struct vk_descriptor_set_layout *_layout)
48 {
49 struct lvp_device *device = container_of(_device, struct lvp_device, vk);
50 struct lvp_descriptor_set_layout *set_layout = (void*)vk_to_lvp_descriptor_set_layout(_layout);
51
52 _layout->ref_cnt = UINT32_MAX;
53 lvp_descriptor_set_destroy(device, set_layout->immutable_set);
54
55 vk_descriptor_set_layout_destroy(_device, _layout);
56 }
57
lvp_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)58 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorSetLayout(
59 VkDevice _device,
60 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
61 const VkAllocationCallbacks* pAllocator,
62 VkDescriptorSetLayout* pSetLayout)
63 {
64 LVP_FROM_HANDLE(lvp_device, device, _device);
65 struct lvp_descriptor_set_layout *set_layout;
66
67 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
68 uint32_t num_bindings = 0;
69 uint32_t immutable_sampler_count = 0;
70 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
71 num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
72 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
73 *
74 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
75 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
76 * pImmutableSamplers can be used to initialize a set of immutable
77 * samplers. [...] If descriptorType is not one of these descriptor
78 * types, then pImmutableSamplers is ignored.
79 *
80 * We need to be careful here and only parse pImmutableSamplers if we
81 * have one of the right descriptor types.
82 */
83 if (binding_has_immutable_samplers(&pCreateInfo->pBindings[j]))
84 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
85 }
86
87 size_t size = sizeof(struct lvp_descriptor_set_layout) +
88 num_bindings * sizeof(set_layout->binding[0]) +
89 immutable_sampler_count * sizeof(struct lvp_sampler *);
90
91 set_layout = vk_descriptor_set_layout_zalloc(&device->vk, size);
92 if (!set_layout)
93 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
94
95 set_layout->immutable_sampler_count = immutable_sampler_count;
96 /* We just allocate all the samplers at the end of the struct */
97 struct lvp_sampler **samplers =
98 (struct lvp_sampler **)&set_layout->binding[num_bindings];
99
100 set_layout->binding_count = num_bindings;
101 set_layout->shader_stages = 0;
102 set_layout->size = 0;
103
104 VkDescriptorSetLayoutBinding *bindings = NULL;
105 VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
106 pCreateInfo->bindingCount,
107 &bindings);
108 if (result != VK_SUCCESS) {
109 vk_descriptor_set_layout_unref(&device->vk, &set_layout->vk);
110 return vk_error(device, result);
111 }
112
113 uint32_t uniform_block_size = 0;
114
115 uint32_t dynamic_offset_count = 0;
116 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
117 const VkDescriptorSetLayoutBinding *binding = bindings + j;
118 uint32_t b = binding->binding;
119
120 uint32_t descriptor_count = binding->descriptorCount;
121 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
122 descriptor_count = 1;
123
124 set_layout->binding[b].array_size = descriptor_count;
125 set_layout->binding[b].descriptor_index = set_layout->size;
126 set_layout->binding[b].type = binding->descriptorType;
127 set_layout->binding[b].valid = true;
128 set_layout->binding[b].uniform_block_offset = 0;
129 set_layout->binding[b].uniform_block_size = 0;
130
131 if (vk_descriptor_type_is_dynamic(binding->descriptorType)) {
132 set_layout->binding[b].dynamic_index = dynamic_offset_count;
133 dynamic_offset_count += binding->descriptorCount;
134 }
135
136 uint8_t max_plane_count = 1;
137 if (binding_has_immutable_samplers(binding)) {
138 set_layout->binding[b].immutable_samplers = samplers;
139 samplers += binding->descriptorCount;
140
141 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
142 VK_FROM_HANDLE(lvp_sampler, sampler, binding->pImmutableSamplers[i]);
143 set_layout->binding[b].immutable_samplers[i] = sampler;
144 const uint8_t sampler_plane_count = sampler->vk.ycbcr_conversion ?
145 vk_format_get_plane_count(sampler->vk.ycbcr_conversion->state.format) : 1;
146 if (max_plane_count < sampler_plane_count)
147 max_plane_count = sampler_plane_count;
148 }
149 }
150
151 set_layout->binding[b].stride = max_plane_count;
152 set_layout->size += descriptor_count * max_plane_count;
153
154 switch (binding->descriptorType) {
155 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
156 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
157 break;
158 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
159 set_layout->binding[b].uniform_block_offset = uniform_block_size;
160 set_layout->binding[b].uniform_block_size = binding->descriptorCount;
161 uniform_block_size += binding->descriptorCount;
162 break;
163 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
164 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
165 break;
166
167 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
168 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
169 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
170 break;
171 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
172 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
173 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
174 break;
175 default:
176 break;
177 }
178
179 set_layout->shader_stages |= binding->stageFlags;
180 }
181
182 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++)
183 set_layout->binding[i].uniform_block_offset += set_layout->size * sizeof(struct lp_descriptor);
184
185 free(bindings);
186
187 set_layout->dynamic_offset_count = dynamic_offset_count;
188
189 if (set_layout->binding_count == set_layout->immutable_sampler_count) {
190 /* create a bindable set with all the immutable samplers */
191 lvp_descriptor_set_create(device, set_layout, &set_layout->immutable_set);
192 vk_descriptor_set_layout_unref(&device->vk, &set_layout->vk);
193 set_layout->vk.destroy = lvp_descriptor_set_layout_destroy;
194 }
195
196 *pSetLayout = lvp_descriptor_set_layout_to_handle(set_layout);
197
198 return VK_SUCCESS;
199 }
200
201 struct lvp_pipeline_layout *
lvp_pipeline_layout_create(struct lvp_device * device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)202 lvp_pipeline_layout_create(struct lvp_device *device,
203 const VkPipelineLayoutCreateInfo* pCreateInfo,
204 const VkAllocationCallbacks* pAllocator)
205 {
206 struct lvp_pipeline_layout *layout = vk_pipeline_layout_zalloc(&device->vk, sizeof(*layout),
207 pCreateInfo);
208
209 layout->push_constant_size = 0;
210 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
211 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
212 layout->push_constant_size = MAX2(layout->push_constant_size,
213 range->offset + range->size);
214 layout->push_constant_stages |= (range->stageFlags & LVP_STAGE_MASK);
215 }
216 layout->push_constant_size = align(layout->push_constant_size, 16);
217 return layout;
218 }
219
lvp_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)220 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreatePipelineLayout(
221 VkDevice _device,
222 const VkPipelineLayoutCreateInfo* pCreateInfo,
223 const VkAllocationCallbacks* pAllocator,
224 VkPipelineLayout* pPipelineLayout)
225 {
226 LVP_FROM_HANDLE(lvp_device, device, _device);
227 struct lvp_pipeline_layout *layout = lvp_pipeline_layout_create(device, pCreateInfo, pAllocator);
228 *pPipelineLayout = lvp_pipeline_layout_to_handle(layout);
229
230 return VK_SUCCESS;
231 }
232
233 static struct pipe_resource *
get_buffer_resource(struct pipe_context * ctx,VkDeviceAddress address,size_t range)234 get_buffer_resource(struct pipe_context *ctx, VkDeviceAddress address, size_t range)
235 {
236 struct pipe_screen *pscreen = ctx->screen;
237 struct pipe_resource templ = {0};
238
239 templ.screen = pscreen;
240 templ.target = PIPE_BUFFER;
241 templ.format = PIPE_FORMAT_R8_UNORM;
242 templ.width0 = range;
243 templ.height0 = 1;
244 templ.depth0 = 1;
245 templ.array_size = 1;
246 templ.bind |= PIPE_BIND_SAMPLER_VIEW;
247 templ.bind |= PIPE_BIND_SHADER_IMAGE;
248 templ.flags = PIPE_RESOURCE_FLAG_DONT_OVER_ALLOCATE;
249
250 uint64_t size;
251 struct pipe_resource *pres = pscreen->resource_create_unbacked(pscreen, &templ, &size);
252 assert(size == range);
253
254 struct llvmpipe_memory_allocation alloc = {
255 .cpu_addr = (void *)(uintptr_t)address,
256 };
257
258 pscreen->resource_bind_backing(pscreen, pres, (void *)&alloc, 0, 0, 0);
259 return pres;
260 }
261
262 static struct lp_texture_handle
get_texture_handle_bda(struct lvp_device * device,VkDeviceAddress address,size_t range,enum pipe_format format)263 get_texture_handle_bda(struct lvp_device *device, VkDeviceAddress address, size_t range, enum pipe_format format)
264 {
265 struct pipe_context *ctx = device->queue.ctx;
266
267 struct pipe_resource *pres = get_buffer_resource(ctx, address, range);
268
269 struct pipe_sampler_view templ;
270 memset(&templ, 0, sizeof(templ));
271 templ.target = PIPE_BUFFER;
272 templ.swizzle_r = PIPE_SWIZZLE_X;
273 templ.swizzle_g = PIPE_SWIZZLE_Y;
274 templ.swizzle_b = PIPE_SWIZZLE_Z;
275 templ.swizzle_a = PIPE_SWIZZLE_W;
276 templ.format = format;
277 templ.u.buf.size = range;
278 templ.texture = pres;
279 templ.context = ctx;
280 struct pipe_sampler_view *view = ctx->create_sampler_view(ctx, pres, &templ);
281
282 simple_mtx_lock(&device->queue.lock);
283
284 struct lp_texture_handle *handle = (void *)(uintptr_t)ctx->create_texture_handle(ctx, view, NULL);
285 util_dynarray_append(&device->bda_texture_handles, struct lp_texture_handle *, handle);
286
287 simple_mtx_unlock(&device->queue.lock);
288
289 ctx->sampler_view_destroy(ctx, view);
290 pipe_resource_reference(&pres, NULL);
291
292 return *handle;
293 }
294
295 static struct lp_texture_handle
get_image_handle_bda(struct lvp_device * device,VkDeviceAddress address,size_t range,enum pipe_format format)296 get_image_handle_bda(struct lvp_device *device, VkDeviceAddress address, size_t range, enum pipe_format format)
297 {
298 struct pipe_context *ctx = device->queue.ctx;
299
300 struct pipe_resource *pres = get_buffer_resource(ctx, address, range);
301 struct pipe_image_view view = {0};
302 view.resource = pres;
303 view.format = format;
304 view.u.buf.size = range;
305
306 simple_mtx_lock(&device->queue.lock);
307
308 struct lp_texture_handle *handle = (void *)(uintptr_t)ctx->create_image_handle(ctx, &view);
309 util_dynarray_append(&device->bda_image_handles, struct lp_texture_handle *, handle);
310
311 simple_mtx_unlock(&device->queue.lock);
312
313 pipe_resource_reference(&pres, NULL);
314
315 return *handle;
316 }
317
318 VkResult
lvp_descriptor_set_create(struct lvp_device * device,struct lvp_descriptor_set_layout * layout,struct lvp_descriptor_set ** out_set)319 lvp_descriptor_set_create(struct lvp_device *device,
320 struct lvp_descriptor_set_layout *layout,
321 struct lvp_descriptor_set **out_set)
322 {
323 struct lvp_descriptor_set *set = vk_zalloc(&device->vk.alloc /* XXX: Use the pool */,
324 sizeof(struct lvp_descriptor_set), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
325 if (!set)
326 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
327
328 vk_object_base_init(&device->vk, &set->base,
329 VK_OBJECT_TYPE_DESCRIPTOR_SET);
330 set->layout = layout;
331 vk_descriptor_set_layout_ref(&layout->vk);
332
333 uint64_t bo_size = layout->size * sizeof(struct lp_descriptor);
334
335 for (unsigned i = 0; i < layout->binding_count; i++)
336 bo_size += layout->binding[i].uniform_block_size;
337
338 bo_size = MAX2(bo_size, 64);
339
340 struct pipe_resource template = {
341 .bind = PIPE_BIND_CONSTANT_BUFFER,
342 .screen = device->pscreen,
343 .target = PIPE_BUFFER,
344 .format = PIPE_FORMAT_R8_UNORM,
345 .width0 = bo_size,
346 .height0 = 1,
347 .depth0 = 1,
348 .array_size = 1,
349 .flags = PIPE_RESOURCE_FLAG_DONT_OVER_ALLOCATE,
350 };
351
352 set->bo = device->pscreen->resource_create_unbacked(device->pscreen, &template, &bo_size);
353 set->pmem = device->pscreen->allocate_memory(device->pscreen, bo_size);
354
355 set->map = device->pscreen->map_memory(device->pscreen, set->pmem);
356 memset(set->map, 0, bo_size);
357
358 device->pscreen->resource_bind_backing(device->pscreen, set->bo, set->pmem, 0, 0, 0);
359
360 for (uint32_t binding_index = 0; binding_index < layout->binding_count; binding_index++) {
361 const struct lvp_descriptor_set_binding_layout *bind_layout = &set->layout->binding[binding_index];
362 if (!bind_layout->immutable_samplers)
363 continue;
364
365 struct lp_descriptor *desc = set->map;
366 desc += bind_layout->descriptor_index;
367
368 for (uint32_t sampler_index = 0; sampler_index < bind_layout->array_size; sampler_index++) {
369 if (bind_layout->immutable_samplers[sampler_index]) {
370 for (uint32_t s = 0; s < bind_layout->stride; s++) {
371 int idx = sampler_index * bind_layout->stride + s;
372 desc[idx] = bind_layout->immutable_samplers[sampler_index]->desc;
373 }
374 }
375 }
376 }
377
378 *out_set = set;
379
380 return VK_SUCCESS;
381 }
382
383 void
lvp_descriptor_set_destroy(struct lvp_device * device,struct lvp_descriptor_set * set)384 lvp_descriptor_set_destroy(struct lvp_device *device,
385 struct lvp_descriptor_set *set)
386 {
387 pipe_resource_reference(&set->bo, NULL);
388 device->pscreen->unmap_memory(device->pscreen, set->pmem);
389 device->pscreen->free_memory(device->pscreen, set->pmem);
390
391 vk_descriptor_set_layout_unref(&device->vk, &set->layout->vk);
392 vk_object_base_finish(&set->base);
393 vk_free(&device->vk.alloc, set);
394 }
395
lvp_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)396 VKAPI_ATTR VkResult VKAPI_CALL lvp_AllocateDescriptorSets(
397 VkDevice _device,
398 const VkDescriptorSetAllocateInfo* pAllocateInfo,
399 VkDescriptorSet* pDescriptorSets)
400 {
401 LVP_FROM_HANDLE(lvp_device, device, _device);
402 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, pAllocateInfo->descriptorPool);
403 VkResult result = VK_SUCCESS;
404 struct lvp_descriptor_set *set;
405 uint32_t i;
406
407 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
408 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout,
409 pAllocateInfo->pSetLayouts[i]);
410
411 result = lvp_descriptor_set_create(device, layout, &set);
412 if (result != VK_SUCCESS)
413 break;
414
415 list_addtail(&set->link, &pool->sets);
416 pDescriptorSets[i] = lvp_descriptor_set_to_handle(set);
417 }
418
419 if (result != VK_SUCCESS)
420 lvp_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
421 i, pDescriptorSets);
422
423 return result;
424 }
425
lvp_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)426 VKAPI_ATTR VkResult VKAPI_CALL lvp_FreeDescriptorSets(
427 VkDevice _device,
428 VkDescriptorPool descriptorPool,
429 uint32_t count,
430 const VkDescriptorSet* pDescriptorSets)
431 {
432 LVP_FROM_HANDLE(lvp_device, device, _device);
433 for (uint32_t i = 0; i < count; i++) {
434 LVP_FROM_HANDLE(lvp_descriptor_set, set, pDescriptorSets[i]);
435
436 if (!set)
437 continue;
438 list_del(&set->link);
439 lvp_descriptor_set_destroy(device, set);
440 }
441 return VK_SUCCESS;
442 }
443
lvp_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)444 VKAPI_ATTR void VKAPI_CALL lvp_UpdateDescriptorSets(
445 VkDevice _device,
446 uint32_t descriptorWriteCount,
447 const VkWriteDescriptorSet* pDescriptorWrites,
448 uint32_t descriptorCopyCount,
449 const VkCopyDescriptorSet* pDescriptorCopies)
450 {
451 LVP_FROM_HANDLE(lvp_device, device, _device);
452
453 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
454 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
455 LVP_FROM_HANDLE(lvp_descriptor_set, set, write->dstSet);
456 const struct lvp_descriptor_set_binding_layout *bind_layout =
457 &set->layout->binding[write->dstBinding];
458
459 if (write->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
460 const VkWriteDescriptorSetInlineUniformBlock *uniform_data =
461 vk_find_struct_const(write->pNext, WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
462 assert(uniform_data);
463 memcpy((uint8_t *)set->map + bind_layout->uniform_block_offset + write->dstArrayElement, uniform_data->pData, uniform_data->dataSize);
464 continue;
465 }
466
467 struct lp_descriptor *desc = set->map;
468 desc += bind_layout->descriptor_index + (write->dstArrayElement * bind_layout->stride);
469
470 switch (write->descriptorType) {
471 case VK_DESCRIPTOR_TYPE_SAMPLER:
472 if (!bind_layout->immutable_samplers) {
473 for (uint32_t j = 0; j < write->descriptorCount; j++) {
474 LVP_FROM_HANDLE(lvp_sampler, sampler, write->pImageInfo[j].sampler);
475 uint32_t didx = j * bind_layout->stride;
476
477 for (unsigned k = 0; k < bind_layout->stride; k++) {
478 desc[didx + k].sampler = sampler->desc.sampler;
479 desc[didx + k].texture.sampler_index = sampler->desc.texture.sampler_index;
480 }
481 }
482 }
483 break;
484
485 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
486 for (uint32_t j = 0; j < write->descriptorCount; j++) {
487 LVP_FROM_HANDLE(lvp_image_view, iview,
488 write->pImageInfo[j].imageView);
489 uint32_t didx = j * bind_layout->stride;
490 if (iview) {
491 unsigned plane_count = iview->plane_count;
492
493 for (unsigned p = 0; p < plane_count; p++) {
494 lp_jit_texture_from_pipe(&desc[didx + p].texture, iview->planes[p].sv);
495 desc[didx + p].functions = iview->planes[p].texture_handle->functions;
496 }
497
498 if (!bind_layout->immutable_samplers) {
499 LVP_FROM_HANDLE(lvp_sampler, sampler,
500 write->pImageInfo[j].sampler);
501
502 for (unsigned p = 0; p < plane_count; p++) {
503 desc[didx + p].sampler = sampler->desc.sampler;
504 desc[didx + p].texture.sampler_index = sampler->desc.texture.sampler_index;
505 }
506 }
507 } else {
508 for (unsigned k = 0; k < bind_layout->stride; k++) {
509 desc[didx + k].functions = device->null_texture_handle->functions;
510 desc[didx + k].texture.sampler_index = 0;
511 }
512 }
513 }
514 break;
515
516 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
517 for (uint32_t j = 0; j < write->descriptorCount; j++) {
518 LVP_FROM_HANDLE(lvp_image_view, iview,
519 write->pImageInfo[j].imageView);
520 uint32_t didx = j * bind_layout->stride;
521 if (iview) {
522 unsigned plane_count = iview->plane_count;
523
524 for (unsigned p = 0; p < plane_count; p++) {
525 lp_jit_texture_from_pipe(&desc[didx + p].texture, iview->planes[p].sv);
526 desc[didx + p].functions = iview->planes[p].texture_handle->functions;
527 }
528 } else {
529 for (unsigned k = 0; k < bind_layout->stride; k++) {
530 desc[didx + k].functions = device->null_texture_handle->functions;
531 desc[didx + k].texture.sampler_index = 0;
532 }
533 }
534 }
535 break;
536 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
537 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
538 for (uint32_t j = 0; j < write->descriptorCount; j++) {
539 LVP_FROM_HANDLE(lvp_image_view, iview,
540 write->pImageInfo[j].imageView);
541 uint32_t didx = j * bind_layout->stride;
542 if (iview) {
543 unsigned plane_count = iview->plane_count;
544
545 for (unsigned p = 0; p < plane_count; p++) {
546 lp_jit_image_from_pipe(&desc[didx + p].image, &iview->planes[p].iv);
547 desc[didx + p].functions = iview->planes[p].image_handle->functions;
548 }
549 } else {
550 for (unsigned k = 0; k < bind_layout->stride; k++)
551 desc[didx + k].functions = device->null_image_handle->functions;
552 }
553 }
554 break;
555
556 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
557 for (uint32_t j = 0; j < write->descriptorCount; j++) {
558 LVP_FROM_HANDLE(lvp_buffer_view, bview,
559 write->pTexelBufferView[j]);
560 assert(bind_layout->stride == 1);
561 if (bview) {
562 lp_jit_texture_from_pipe(&desc[j].texture, bview->sv);
563 desc[j].functions = bview->texture_handle->functions;
564 } else {
565 desc[j].functions = device->null_texture_handle->functions;
566 desc[j].texture.sampler_index = 0;
567 }
568 }
569 break;
570
571 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
572 for (uint32_t j = 0; j < write->descriptorCount; j++) {
573 LVP_FROM_HANDLE(lvp_buffer_view, bview,
574 write->pTexelBufferView[j]);
575 assert(bind_layout->stride == 1);
576 if (bview) {
577 lp_jit_image_from_pipe(&desc[j].image, &bview->iv);
578 desc[j].functions = bview->image_handle->functions;
579 } else {
580 desc[j].functions = device->null_image_handle->functions;
581 }
582 }
583 break;
584
585 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
586 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
587 for (uint32_t j = 0; j < write->descriptorCount; j++) {
588 LVP_FROM_HANDLE(lvp_buffer, buffer, write->pBufferInfo[j].buffer);
589 assert(bind_layout->stride == 1);
590 if (buffer) {
591 struct pipe_constant_buffer ubo = {
592 .buffer = buffer->bo,
593 .buffer_offset = write->pBufferInfo[j].offset,
594 .buffer_size = write->pBufferInfo[j].range,
595 };
596
597 if (write->pBufferInfo[j].range == VK_WHOLE_SIZE)
598 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
599
600 lp_jit_buffer_from_pipe_const(&desc[j].buffer, &ubo, device->pscreen);
601 } else {
602 lp_jit_buffer_from_pipe_const(&desc[j].buffer, &((struct pipe_constant_buffer){0}), device->pscreen);
603 }
604 }
605 break;
606
607 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
608 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
609 for (uint32_t j = 0; j < write->descriptorCount; j++) {
610 LVP_FROM_HANDLE(lvp_buffer, buffer, write->pBufferInfo[j].buffer);
611 assert(bind_layout->stride == 1);
612 if (buffer) {
613 struct pipe_shader_buffer ubo = {
614 .buffer = buffer->bo,
615 .buffer_offset = write->pBufferInfo[j].offset,
616 .buffer_size = write->pBufferInfo[j].range,
617 };
618
619 if (write->pBufferInfo[j].range == VK_WHOLE_SIZE)
620 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
621
622 lp_jit_buffer_from_pipe(&desc[j].buffer, &ubo);
623 } else {
624 lp_jit_buffer_from_pipe(&desc[j].buffer, &((struct pipe_shader_buffer){0}));
625 }
626 }
627 break;
628
629 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
630 for (uint32_t j = 0; j < write->descriptorCount; j++) {
631 const VkWriteDescriptorSetAccelerationStructureKHR *accel_structs =
632 vk_find_struct_const(write->pNext, WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR);
633 VK_FROM_HANDLE(vk_acceleration_structure, accel_struct, accel_structs->pAccelerationStructures[j]);
634
635 desc[j].accel_struct = accel_struct ? vk_acceleration_structure_get_va(accel_struct) : 0;
636 }
637 break;
638
639 default:
640 unreachable("Unsupported descriptor type");
641 break;
642 }
643 }
644
645 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
646 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
647 LVP_FROM_HANDLE(lvp_descriptor_set, src, copy->srcSet);
648 LVP_FROM_HANDLE(lvp_descriptor_set, dst, copy->dstSet);
649
650 const struct lvp_descriptor_set_binding_layout *src_layout =
651 &src->layout->binding[copy->srcBinding];
652 struct lp_descriptor *src_desc = src->map;
653 src_desc += src_layout->descriptor_index;
654
655 const struct lvp_descriptor_set_binding_layout *dst_layout =
656 &dst->layout->binding[copy->dstBinding];
657 struct lp_descriptor *dst_desc = dst->map;
658 dst_desc += dst_layout->descriptor_index;
659
660 if (src_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
661 memcpy((uint8_t *)dst->map + dst_layout->uniform_block_offset + copy->dstArrayElement,
662 (uint8_t *)src->map + src_layout->uniform_block_offset + copy->srcArrayElement,
663 copy->descriptorCount);
664 } else {
665 src_desc += copy->srcArrayElement;
666 dst_desc += copy->dstArrayElement;
667
668 for (uint32_t j = 0; j < copy->descriptorCount; j++)
669 dst_desc[j] = src_desc[j];
670 }
671 }
672 }
673
lvp_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)674 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorPool(
675 VkDevice _device,
676 const VkDescriptorPoolCreateInfo* pCreateInfo,
677 const VkAllocationCallbacks* pAllocator,
678 VkDescriptorPool* pDescriptorPool)
679 {
680 LVP_FROM_HANDLE(lvp_device, device, _device);
681 struct lvp_descriptor_pool *pool;
682 size_t size = sizeof(struct lvp_descriptor_pool);
683 pool = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8,
684 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
685 if (!pool)
686 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
687
688 vk_object_base_init(&device->vk, &pool->base,
689 VK_OBJECT_TYPE_DESCRIPTOR_POOL);
690 pool->flags = pCreateInfo->flags;
691 list_inithead(&pool->sets);
692 *pDescriptorPool = lvp_descriptor_pool_to_handle(pool);
693 return VK_SUCCESS;
694 }
695
lvp_reset_descriptor_pool(struct lvp_device * device,struct lvp_descriptor_pool * pool)696 static void lvp_reset_descriptor_pool(struct lvp_device *device,
697 struct lvp_descriptor_pool *pool)
698 {
699 struct lvp_descriptor_set *set, *tmp;
700 LIST_FOR_EACH_ENTRY_SAFE(set, tmp, &pool->sets, link) {
701 list_del(&set->link);
702 lvp_descriptor_set_destroy(device, set);
703 }
704 }
705
lvp_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)706 VKAPI_ATTR void VKAPI_CALL lvp_DestroyDescriptorPool(
707 VkDevice _device,
708 VkDescriptorPool _pool,
709 const VkAllocationCallbacks* pAllocator)
710 {
711 LVP_FROM_HANDLE(lvp_device, device, _device);
712 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
713
714 if (!_pool)
715 return;
716
717 lvp_reset_descriptor_pool(device, pool);
718 vk_object_base_finish(&pool->base);
719 vk_free2(&device->vk.alloc, pAllocator, pool);
720 }
721
lvp_ResetDescriptorPool(VkDevice _device,VkDescriptorPool _pool,VkDescriptorPoolResetFlags flags)722 VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetDescriptorPool(
723 VkDevice _device,
724 VkDescriptorPool _pool,
725 VkDescriptorPoolResetFlags flags)
726 {
727 LVP_FROM_HANDLE(lvp_device, device, _device);
728 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
729
730 lvp_reset_descriptor_pool(device, pool);
731 return VK_SUCCESS;
732 }
733
lvp_GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)734 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutSupport(VkDevice device,
735 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
736 VkDescriptorSetLayoutSupport* pSupport)
737 {
738 const VkDescriptorSetLayoutBindingFlagsCreateInfo *variable_flags =
739 vk_find_struct_const(pCreateInfo->pNext, DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
740 VkDescriptorSetVariableDescriptorCountLayoutSupport *variable_count =
741 vk_find_struct(pSupport->pNext, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
742 if (variable_count) {
743 variable_count->maxVariableDescriptorCount = 0;
744 if (variable_flags) {
745 for (unsigned i = 0; i < variable_flags->bindingCount; i++) {
746 if (variable_flags->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
747 variable_count->maxVariableDescriptorCount = MAX_DESCRIPTORS;
748 }
749 }
750 }
751 pSupport->supported = true;
752 }
753
754 uint32_t
lvp_descriptor_update_template_entry_size(VkDescriptorType type)755 lvp_descriptor_update_template_entry_size(VkDescriptorType type)
756 {
757 switch (type) {
758 case VK_DESCRIPTOR_TYPE_SAMPLER:
759 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
760 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
761 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
762 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
763 return sizeof(VkDescriptorImageInfo);
764 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
765 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
766 return sizeof(VkBufferView);
767 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
768 return sizeof(VkAccelerationStructureKHR);
769 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
770 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
771 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
772 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
773 default:
774 return sizeof(VkDescriptorBufferInfo);
775 }
776 }
777
778 void
lvp_descriptor_set_update_with_template(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)779 lvp_descriptor_set_update_with_template(VkDevice _device, VkDescriptorSet descriptorSet,
780 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
781 const void *pData)
782 {
783 LVP_FROM_HANDLE(lvp_device, device, _device);
784 LVP_FROM_HANDLE(lvp_descriptor_set, set, descriptorSet);
785 LVP_FROM_HANDLE(vk_descriptor_update_template, templ, descriptorUpdateTemplate);
786 uint32_t i, j;
787
788 for (i = 0; i < templ->entry_count; ++i) {
789 struct vk_descriptor_template_entry *entry = &templ->entries[i];
790
791 const uint8_t *pSrc = ((const uint8_t *) pData) + entry->offset;
792
793 const struct lvp_descriptor_set_binding_layout *bind_layout =
794 &set->layout->binding[entry->binding];
795
796 if (entry->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
797 memcpy((uint8_t *)set->map + bind_layout->uniform_block_offset + entry->array_element, pSrc, entry->array_count);
798 continue;
799 }
800
801 struct lp_descriptor *desc = set->map;
802 desc += bind_layout->descriptor_index;
803
804 for (j = 0; j < entry->array_count; ++j) {
805 unsigned idx = j + entry->array_element;
806
807 idx *= bind_layout->stride;
808 switch (entry->type) {
809 case VK_DESCRIPTOR_TYPE_SAMPLER: {
810 VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
811 LVP_FROM_HANDLE(lvp_sampler, sampler, info->sampler);
812
813 for (unsigned k = 0; k < bind_layout->stride; k++) {
814 desc[idx + k].sampler = sampler->desc.sampler;
815 desc[idx + k].texture.sampler_index = sampler->desc.texture.sampler_index;
816 }
817 break;
818 }
819 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
820 VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
821 LVP_FROM_HANDLE(lvp_image_view, iview, info->imageView);
822
823 if (iview) {
824 for (unsigned p = 0; p < iview->plane_count; p++) {
825 lp_jit_texture_from_pipe(&desc[idx + p].texture, iview->planes[p].sv);
826 desc[idx + p].functions = iview->planes[p].texture_handle->functions;
827 }
828
829 if (!bind_layout->immutable_samplers) {
830 LVP_FROM_HANDLE(lvp_sampler, sampler, info->sampler);
831
832 for (unsigned p = 0; p < iview->plane_count; p++) {
833 desc[idx + p].sampler = sampler->desc.sampler;
834 desc[idx + p].texture.sampler_index = sampler->desc.texture.sampler_index;
835 }
836 }
837 } else {
838 for (unsigned k = 0; k < bind_layout->stride; k++) {
839 desc[idx + k].functions = device->null_texture_handle->functions;
840 desc[idx + k].texture.sampler_index = 0;
841 }
842 }
843 break;
844 }
845 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
846 VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
847 LVP_FROM_HANDLE(lvp_image_view, iview, info->imageView);
848
849 if (iview) {
850 for (unsigned p = 0; p < iview->plane_count; p++) {
851 lp_jit_texture_from_pipe(&desc[idx + p].texture, iview->planes[p].sv);
852 desc[idx + p].functions = iview->planes[p].texture_handle->functions;
853 }
854 } else {
855 for (unsigned k = 0; k < bind_layout->stride; k++) {
856 desc[idx + k].functions = device->null_texture_handle->functions;
857 desc[idx + k].texture.sampler_index = 0;
858 }
859 }
860 break;
861 }
862 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
863 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
864 LVP_FROM_HANDLE(lvp_image_view, iview,
865 ((VkDescriptorImageInfo *)pSrc)->imageView);
866
867 if (iview) {
868 for (unsigned p = 0; p < iview->plane_count; p++) {
869 lp_jit_image_from_pipe(&desc[idx + p].image, &iview->planes[p].iv);
870 desc[idx + p].functions = iview->planes[p].image_handle->functions;
871 }
872 } else {
873 for (unsigned k = 0; k < bind_layout->stride; k++)
874 desc[idx + k].functions = device->null_image_handle->functions;
875 }
876 break;
877 }
878 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
879 LVP_FROM_HANDLE(lvp_buffer_view, bview,
880 *(VkBufferView *)pSrc);
881 assert(bind_layout->stride == 1);
882 if (bview) {
883 lp_jit_texture_from_pipe(&desc[idx].texture, bview->sv);
884 desc[idx].functions = bview->texture_handle->functions;
885 } else {
886 desc[j].functions = device->null_texture_handle->functions;
887 desc[j].texture.sampler_index = 0;
888 }
889 break;
890 }
891 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
892 LVP_FROM_HANDLE(lvp_buffer_view, bview,
893 *(VkBufferView *)pSrc);
894 assert(bind_layout->stride == 1);
895 if (bview) {
896 lp_jit_image_from_pipe(&desc[idx].image, &bview->iv);
897 desc[idx].functions = bview->image_handle->functions;
898 } else {
899 desc[idx].functions = device->null_image_handle->functions;
900 }
901 break;
902 }
903
904 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
905 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
906 VkDescriptorBufferInfo *info = (VkDescriptorBufferInfo *)pSrc;
907 LVP_FROM_HANDLE(lvp_buffer, buffer, info->buffer);
908 assert(bind_layout->stride == 1);
909 if (buffer) {
910 struct pipe_constant_buffer ubo = {
911 .buffer = buffer->bo,
912 .buffer_offset = info->offset,
913 .buffer_size = info->range,
914 };
915
916 if (info->range == VK_WHOLE_SIZE)
917 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
918
919 lp_jit_buffer_from_pipe_const(&desc[idx].buffer, &ubo, device->pscreen);
920 } else {
921 lp_jit_buffer_from_pipe_const(&desc[idx].buffer, &((struct pipe_constant_buffer){0}), device->pscreen);
922 }
923 break;
924 }
925
926 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
927 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
928 VkDescriptorBufferInfo *info = (VkDescriptorBufferInfo *)pSrc;
929 LVP_FROM_HANDLE(lvp_buffer, buffer, info->buffer);
930 assert(bind_layout->stride == 1);
931
932 if (buffer) {
933 struct pipe_shader_buffer ubo = {
934 .buffer = buffer->bo,
935 .buffer_offset = info->offset,
936 .buffer_size = info->range,
937 };
938
939 if (info->range == VK_WHOLE_SIZE)
940 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
941
942 lp_jit_buffer_from_pipe(&desc[idx].buffer, &ubo);
943 } else {
944 lp_jit_buffer_from_pipe(&desc[idx].buffer, &((struct pipe_shader_buffer){0}));
945 }
946 break;
947 }
948
949 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
950 VK_FROM_HANDLE(vk_acceleration_structure, accel_struct, *(VkAccelerationStructureKHR *)pSrc);
951 desc[idx].accel_struct = accel_struct ? vk_acceleration_structure_get_va(accel_struct) : 0;
952 break;
953 }
954
955 default:
956 unreachable("Unsupported descriptor type");
957 break;
958 }
959
960 pSrc += entry->stride;
961 }
962 }
963 }
964
965 VKAPI_ATTR void VKAPI_CALL
lvp_UpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)966 lvp_UpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
967 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
968 const void *pData)
969 {
970 lvp_descriptor_set_update_with_template(device, descriptorSet, descriptorUpdateTemplate, pData);
971 }
972
lvp_GetDescriptorSetLayoutSizeEXT(VkDevice _device,VkDescriptorSetLayout _layout,VkDeviceSize * pSize)973 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutSizeEXT(
974 VkDevice _device,
975 VkDescriptorSetLayout _layout,
976 VkDeviceSize* pSize)
977 {
978 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout, _layout);
979
980 *pSize = layout->size * sizeof(struct lp_descriptor);
981
982 for (unsigned i = 0; i < layout->binding_count; i++)
983 *pSize += layout->binding[i].uniform_block_size;
984 }
985
lvp_GetDescriptorSetLayoutBindingOffsetEXT(VkDevice _device,VkDescriptorSetLayout _layout,uint32_t binding,VkDeviceSize * pOffset)986 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutBindingOffsetEXT(
987 VkDevice _device,
988 VkDescriptorSetLayout _layout,
989 uint32_t binding,
990 VkDeviceSize* pOffset)
991 {
992 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout, _layout);
993 assert(binding < layout->binding_count);
994
995 const struct lvp_descriptor_set_binding_layout *bind_layout = &layout->binding[binding];
996 if (bind_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
997 *pOffset = bind_layout->uniform_block_offset;
998 else
999 *pOffset = bind_layout->descriptor_index * sizeof(struct lp_descriptor);
1000 }
1001
lvp_GetDescriptorEXT(VkDevice _device,const VkDescriptorGetInfoEXT * pCreateInfo,size_t size,void * pDescriptor)1002 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorEXT(
1003 VkDevice _device,
1004 const VkDescriptorGetInfoEXT* pCreateInfo,
1005 size_t size,
1006 void* pDescriptor)
1007 {
1008 LVP_FROM_HANDLE(lvp_device, device, _device);
1009
1010 struct lp_descriptor *desc = pDescriptor;
1011
1012 struct pipe_sampler_state sampler = {
1013 .seamless_cube_map = 1,
1014 .max_lod = 0.25,
1015 };
1016
1017 switch (pCreateInfo->type) {
1018 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
1019 unreachable("this is a spec violation");
1020 break;
1021 }
1022 case VK_DESCRIPTOR_TYPE_SAMPLER: {
1023 if (pCreateInfo->data.pSampler) {
1024 LVP_FROM_HANDLE(lvp_sampler, sampler, pCreateInfo->data.pSampler[0]);
1025 desc->sampler = sampler->desc.sampler;
1026 desc->texture.sampler_index = sampler->desc.texture.sampler_index;
1027 } else {
1028 lp_jit_sampler_from_pipe(&desc->sampler, &sampler);
1029 desc->texture.sampler_index = 0;
1030 }
1031 break;
1032 }
1033
1034 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1035 const VkDescriptorImageInfo *info = pCreateInfo->data.pCombinedImageSampler;
1036 if (info && info->imageView) {
1037 LVP_FROM_HANDLE(lvp_image_view, iview, info->imageView);
1038
1039 unsigned plane_count = iview->plane_count;
1040
1041 for (unsigned p = 0; p < plane_count; p++) {
1042 lp_jit_texture_from_pipe(&desc[p].texture, iview->planes[p].sv);
1043 desc[p].functions = iview->planes[p].texture_handle->functions;
1044
1045 if (info->sampler) {
1046 LVP_FROM_HANDLE(lvp_sampler, sampler, info->sampler);
1047 desc[p].sampler = sampler->desc.sampler;
1048 desc[p].texture.sampler_index = sampler->desc.texture.sampler_index;
1049 } else {
1050 lp_jit_sampler_from_pipe(&desc->sampler, &sampler);
1051 desc[p].texture.sampler_index = 0;
1052 }
1053 }
1054 } else {
1055 unsigned plane_count = size / sizeof(struct lp_descriptor);
1056
1057 for (unsigned p = 0; p < plane_count; p++) {
1058 desc[p].functions = device->null_texture_handle->functions;
1059 desc[p].texture.sampler_index = 0;
1060 }
1061 }
1062
1063 break;
1064 }
1065
1066 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
1067 if (pCreateInfo->data.pSampledImage && pCreateInfo->data.pSampledImage->imageView) {
1068 LVP_FROM_HANDLE(lvp_image_view, iview, pCreateInfo->data.pSampledImage->imageView);
1069
1070 unsigned plane_count = iview->plane_count;
1071
1072 for (unsigned p = 0; p < plane_count; p++) {
1073 lp_jit_texture_from_pipe(&desc[p].texture, iview->planes[p].sv);
1074 desc[p].functions = iview->planes[p].texture_handle->functions;
1075 }
1076 } else {
1077 unsigned plane_count = size / sizeof(struct lp_descriptor);
1078
1079 for (unsigned p = 0; p < plane_count; p++) {
1080 desc[p].functions = device->null_texture_handle->functions;
1081 desc[p].texture.sampler_index = 0;
1082 }
1083 }
1084 break;
1085 }
1086
1087 /* technically these use different pointers, but it's a union, so they're all the same */
1088 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1089 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
1090 if (pCreateInfo->data.pStorageImage && pCreateInfo->data.pStorageImage->imageView) {
1091 LVP_FROM_HANDLE(lvp_image_view, iview, pCreateInfo->data.pStorageImage->imageView);
1092
1093 unsigned plane_count = iview->plane_count;
1094
1095 for (unsigned p = 0; p < plane_count; p++) {
1096 lp_jit_image_from_pipe(&desc[p].image, &iview->planes[p].iv);
1097 desc[p].functions = iview->planes[p].image_handle->functions;
1098 }
1099 } else {
1100 unsigned plane_count = size / sizeof(struct lp_descriptor);
1101
1102 for (unsigned p = 0; p < plane_count; p++)
1103 desc[p].functions = device->null_image_handle->functions;
1104 }
1105 break;
1106 }
1107 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
1108 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pUniformTexelBuffer;
1109 if (bda && bda->address) {
1110 enum pipe_format pformat = vk_format_to_pipe_format(bda->format);
1111 lp_jit_texture_buffer_from_bda(&desc->texture, (void*)(uintptr_t)bda->address, bda->range, pformat);
1112 desc->functions = get_texture_handle_bda(device, bda->address, bda->range, pformat).functions;
1113 } else {
1114 desc->functions = device->null_texture_handle->functions;
1115 desc->texture.sampler_index = 0;
1116 }
1117 break;
1118 }
1119 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
1120 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pStorageTexelBuffer;
1121 if (bda && bda->address) {
1122 enum pipe_format pformat = vk_format_to_pipe_format(bda->format);
1123 lp_jit_image_buffer_from_bda(&desc->image, (void *)(uintptr_t)bda->address, bda->range, pformat);
1124 desc->functions = get_image_handle_bda(device, bda->address, bda->range, pformat).functions;
1125 } else {
1126 desc->functions = device->null_image_handle->functions;
1127 }
1128 break;
1129 }
1130 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
1131 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pUniformBuffer;
1132 if (bda && bda->address) {
1133 struct pipe_constant_buffer ubo = {
1134 .user_buffer = (void *)(uintptr_t)bda->address,
1135 .buffer_size = bda->range,
1136 };
1137
1138 lp_jit_buffer_from_pipe_const(&desc->buffer, &ubo, device->pscreen);
1139 } else {
1140 lp_jit_buffer_from_pipe_const(&desc->buffer, &((struct pipe_constant_buffer){0}), device->pscreen);
1141 }
1142 break;
1143 }
1144 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
1145 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pStorageBuffer;
1146 if (bda && bda->address) {
1147 lp_jit_buffer_from_bda(&desc->buffer, (void *)(uintptr_t)bda->address, bda->range);
1148 } else {
1149 lp_jit_buffer_from_pipe(&desc->buffer, &((struct pipe_shader_buffer){0}));
1150 }
1151 break;
1152 }
1153 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
1154 desc->accel_struct = pCreateInfo->data.accelerationStructure;
1155 break;
1156 }
1157 default:
1158 unreachable("Unsupported descriptor type");
1159 break;
1160 }
1161 }
1162