1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "lvp_private.h"
25 #include "vk_descriptors.h"
26 #include "vk_util.h"
27 #include "util/u_math.h"
28 #include "util/u_inlines.h"
29
30 static bool
binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding * binding)31 binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding *binding)
32 {
33 switch (binding->descriptorType) {
34 case VK_DESCRIPTOR_TYPE_SAMPLER:
35 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
36 return binding->pImmutableSamplers != NULL;
37
38 default:
39 return false;
40 }
41 }
42
43 static void
lvp_descriptor_set_layout_destroy(struct vk_device * _device,struct vk_descriptor_set_layout * _layout)44 lvp_descriptor_set_layout_destroy(struct vk_device *_device, struct vk_descriptor_set_layout *_layout)
45 {
46 struct lvp_device *device = container_of(_device, struct lvp_device, vk);
47 struct lvp_descriptor_set_layout *set_layout = (void*)vk_to_lvp_descriptor_set_layout(_layout);
48
49 _layout->ref_cnt = UINT32_MAX;
50 lvp_descriptor_set_destroy(device, set_layout->immutable_set);
51
52 vk_descriptor_set_layout_destroy(_device, _layout);
53 }
54
lvp_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)55 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorSetLayout(
56 VkDevice _device,
57 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
58 const VkAllocationCallbacks* pAllocator,
59 VkDescriptorSetLayout* pSetLayout)
60 {
61 LVP_FROM_HANDLE(lvp_device, device, _device);
62 struct lvp_descriptor_set_layout *set_layout;
63
64 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
65 uint32_t num_bindings = 0;
66 uint32_t immutable_sampler_count = 0;
67 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
68 num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
69 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
70 *
71 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
72 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
73 * pImmutableSamplers can be used to initialize a set of immutable
74 * samplers. [...] If descriptorType is not one of these descriptor
75 * types, then pImmutableSamplers is ignored.
76 *
77 * We need to be careful here and only parse pImmutableSamplers if we
78 * have one of the right descriptor types.
79 */
80 if (binding_has_immutable_samplers(&pCreateInfo->pBindings[j]))
81 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
82 }
83
84 size_t size = sizeof(struct lvp_descriptor_set_layout) +
85 num_bindings * sizeof(set_layout->binding[0]) +
86 immutable_sampler_count * sizeof(struct lvp_sampler *);
87
88 set_layout = vk_descriptor_set_layout_zalloc(&device->vk, size);
89 if (!set_layout)
90 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
91
92 set_layout->immutable_sampler_count = immutable_sampler_count;
93 /* We just allocate all the samplers at the end of the struct */
94 struct lvp_sampler **samplers =
95 (struct lvp_sampler **)&set_layout->binding[num_bindings];
96
97 set_layout->binding_count = num_bindings;
98 set_layout->shader_stages = 0;
99 set_layout->size = 0;
100
101 VkDescriptorSetLayoutBinding *bindings = NULL;
102 VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
103 pCreateInfo->bindingCount,
104 &bindings);
105 if (result != VK_SUCCESS) {
106 vk_descriptor_set_layout_unref(&device->vk, &set_layout->vk);
107 return vk_error(device, result);
108 }
109
110 uint32_t uniform_block_size = 0;
111
112 uint32_t dynamic_offset_count = 0;
113 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
114 const VkDescriptorSetLayoutBinding *binding = bindings + j;
115 uint32_t b = binding->binding;
116
117 uint32_t descriptor_count = binding->descriptorCount;
118 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
119 descriptor_count = 1;
120
121 set_layout->binding[b].array_size = descriptor_count;
122 set_layout->binding[b].descriptor_index = set_layout->size;
123 set_layout->binding[b].type = binding->descriptorType;
124 set_layout->binding[b].valid = true;
125 set_layout->binding[b].uniform_block_offset = 0;
126 set_layout->binding[b].uniform_block_size = 0;
127
128 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
129 binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
130 set_layout->binding[b].dynamic_index = dynamic_offset_count;
131 dynamic_offset_count += binding->descriptorCount;
132 }
133
134 uint8_t max_plane_count = 1;
135 if (binding_has_immutable_samplers(binding)) {
136 set_layout->binding[b].immutable_samplers = samplers;
137 samplers += binding->descriptorCount;
138
139 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
140 VK_FROM_HANDLE(lvp_sampler, sampler, binding->pImmutableSamplers[i]);
141 set_layout->binding[b].immutable_samplers[i] = sampler;
142 const uint8_t sampler_plane_count = sampler->vk.ycbcr_conversion ?
143 vk_format_get_plane_count(sampler->vk.ycbcr_conversion->state.format) : 1;
144 if (max_plane_count < sampler_plane_count)
145 max_plane_count = sampler_plane_count;
146 }
147 }
148
149 set_layout->binding[b].stride = max_plane_count;
150 set_layout->size += descriptor_count * max_plane_count;
151
152 switch (binding->descriptorType) {
153 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
154 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
155 break;
156 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
157 set_layout->binding[b].uniform_block_offset = uniform_block_size;
158 set_layout->binding[b].uniform_block_size = binding->descriptorCount;
159 uniform_block_size += binding->descriptorCount;
160 break;
161 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
162 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
163 break;
164
165 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
166 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
167 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
168 break;
169 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
170 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
171 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
172 break;
173 default:
174 break;
175 }
176
177 set_layout->shader_stages |= binding->stageFlags;
178 }
179
180 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++)
181 set_layout->binding[i].uniform_block_offset += set_layout->size * sizeof(struct lp_descriptor);
182
183 free(bindings);
184
185 set_layout->dynamic_offset_count = dynamic_offset_count;
186
187 if (set_layout->binding_count == set_layout->immutable_sampler_count) {
188 /* create a bindable set with all the immutable samplers */
189 lvp_descriptor_set_create(device, set_layout, &set_layout->immutable_set);
190 vk_descriptor_set_layout_unref(&device->vk, &set_layout->vk);
191 set_layout->vk.destroy = lvp_descriptor_set_layout_destroy;
192 }
193
194 *pSetLayout = lvp_descriptor_set_layout_to_handle(set_layout);
195
196 return VK_SUCCESS;
197 }
198
199 struct lvp_pipeline_layout *
lvp_pipeline_layout_create(struct lvp_device * device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator)200 lvp_pipeline_layout_create(struct lvp_device *device,
201 const VkPipelineLayoutCreateInfo* pCreateInfo,
202 const VkAllocationCallbacks* pAllocator)
203 {
204 struct lvp_pipeline_layout *layout = vk_pipeline_layout_zalloc(&device->vk, sizeof(*layout),
205 pCreateInfo);
206
207 layout->push_constant_size = 0;
208 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
209 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
210 layout->push_constant_size = MAX2(layout->push_constant_size,
211 range->offset + range->size);
212 layout->push_constant_stages |= (range->stageFlags & LVP_STAGE_MASK);
213 }
214 layout->push_constant_size = align(layout->push_constant_size, 16);
215 return layout;
216 }
217
lvp_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)218 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreatePipelineLayout(
219 VkDevice _device,
220 const VkPipelineLayoutCreateInfo* pCreateInfo,
221 const VkAllocationCallbacks* pAllocator,
222 VkPipelineLayout* pPipelineLayout)
223 {
224 LVP_FROM_HANDLE(lvp_device, device, _device);
225 struct lvp_pipeline_layout *layout = lvp_pipeline_layout_create(device, pCreateInfo, pAllocator);
226 *pPipelineLayout = lvp_pipeline_layout_to_handle(layout);
227
228 return VK_SUCCESS;
229 }
230
231 static struct pipe_resource *
get_buffer_resource(struct pipe_context * ctx,const VkDescriptorAddressInfoEXT * bda)232 get_buffer_resource(struct pipe_context *ctx, const VkDescriptorAddressInfoEXT *bda)
233 {
234 struct pipe_screen *pscreen = ctx->screen;
235 struct pipe_resource templ = {0};
236
237 templ.screen = pscreen;
238 templ.target = PIPE_BUFFER;
239 templ.format = PIPE_FORMAT_R8_UNORM;
240 templ.width0 = bda->range;
241 templ.height0 = 1;
242 templ.depth0 = 1;
243 templ.array_size = 1;
244 templ.bind |= PIPE_BIND_SAMPLER_VIEW;
245 templ.bind |= PIPE_BIND_SHADER_IMAGE;
246 templ.flags = PIPE_RESOURCE_FLAG_DONT_OVER_ALLOCATE;
247
248 uint64_t size;
249 struct pipe_resource *pres = pscreen->resource_create_unbacked(pscreen, &templ, &size);
250 assert(size == bda->range);
251 pscreen->resource_bind_backing(pscreen, pres, (void *)(uintptr_t)bda->address, 0);
252 return pres;
253 }
254
255 static struct lp_texture_handle
get_texture_handle_bda(struct lvp_device * device,const VkDescriptorAddressInfoEXT * bda,enum pipe_format format)256 get_texture_handle_bda(struct lvp_device *device, const VkDescriptorAddressInfoEXT *bda, enum pipe_format format)
257 {
258 struct pipe_context *ctx = device->queue.ctx;
259
260 struct pipe_resource *pres = get_buffer_resource(ctx, bda);
261
262 struct pipe_sampler_view templ;
263 memset(&templ, 0, sizeof(templ));
264 templ.target = PIPE_BUFFER;
265 templ.swizzle_r = PIPE_SWIZZLE_X;
266 templ.swizzle_g = PIPE_SWIZZLE_Y;
267 templ.swizzle_b = PIPE_SWIZZLE_Z;
268 templ.swizzle_a = PIPE_SWIZZLE_W;
269 templ.format = format;
270 templ.u.buf.size = bda->range;
271 templ.texture = pres;
272 templ.context = ctx;
273 struct pipe_sampler_view *view = ctx->create_sampler_view(ctx, pres, &templ);
274
275 simple_mtx_lock(&device->queue.lock);
276
277 struct lp_texture_handle *handle = (void *)(uintptr_t)ctx->create_texture_handle(ctx, view, NULL);
278 util_dynarray_append(&device->bda_texture_handles, struct lp_texture_handle *, handle);
279
280 simple_mtx_unlock(&device->queue.lock);
281
282 ctx->sampler_view_destroy(ctx, view);
283 pipe_resource_reference(&pres, NULL);
284
285 return *handle;
286 }
287
288 static struct lp_texture_handle
get_image_handle_bda(struct lvp_device * device,const VkDescriptorAddressInfoEXT * bda,enum pipe_format format)289 get_image_handle_bda(struct lvp_device *device, const VkDescriptorAddressInfoEXT *bda, enum pipe_format format)
290 {
291 struct pipe_context *ctx = device->queue.ctx;
292
293 struct pipe_resource *pres = get_buffer_resource(ctx, bda);
294 struct pipe_image_view view = {0};
295 view.resource = pres;
296 view.format = format;
297 view.u.buf.size = bda->range;
298
299 simple_mtx_lock(&device->queue.lock);
300
301 struct lp_texture_handle *handle = (void *)(uintptr_t)ctx->create_image_handle(ctx, &view);
302 util_dynarray_append(&device->bda_image_handles, struct lp_texture_handle *, handle);
303
304 simple_mtx_unlock(&device->queue.lock);
305
306 pipe_resource_reference(&pres, NULL);
307
308 return *handle;
309 }
310
311 VkResult
lvp_descriptor_set_create(struct lvp_device * device,struct lvp_descriptor_set_layout * layout,struct lvp_descriptor_set ** out_set)312 lvp_descriptor_set_create(struct lvp_device *device,
313 struct lvp_descriptor_set_layout *layout,
314 struct lvp_descriptor_set **out_set)
315 {
316 struct lvp_descriptor_set *set = vk_zalloc(&device->vk.alloc /* XXX: Use the pool */,
317 sizeof(struct lvp_descriptor_set), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
318 if (!set)
319 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
320
321 vk_object_base_init(&device->vk, &set->base,
322 VK_OBJECT_TYPE_DESCRIPTOR_SET);
323 set->layout = layout;
324 vk_descriptor_set_layout_ref(&layout->vk);
325
326 uint64_t bo_size = layout->size * sizeof(struct lp_descriptor);
327
328 for (unsigned i = 0; i < layout->binding_count; i++)
329 bo_size += layout->binding[i].uniform_block_size;
330
331 struct pipe_resource template = {
332 .bind = PIPE_BIND_CONSTANT_BUFFER,
333 .screen = device->pscreen,
334 .target = PIPE_BUFFER,
335 .format = PIPE_FORMAT_R8_UNORM,
336 .width0 = bo_size,
337 .height0 = 1,
338 .depth0 = 1,
339 .array_size = 1,
340 .flags = PIPE_RESOURCE_FLAG_DONT_OVER_ALLOCATE,
341 };
342
343 set->bo = device->pscreen->resource_create_unbacked(device->pscreen, &template, &bo_size);
344 set->pmem = device->pscreen->allocate_memory(device->pscreen, bo_size);
345
346 set->map = device->pscreen->map_memory(device->pscreen, set->pmem);
347 memset(set->map, 0, bo_size);
348
349 device->pscreen->resource_bind_backing(device->pscreen, set->bo, set->pmem, 0);
350
351 for (uint32_t binding_index = 0; binding_index < layout->binding_count; binding_index++) {
352 const struct lvp_descriptor_set_binding_layout *bind_layout = &set->layout->binding[binding_index];
353 if (!bind_layout->immutable_samplers)
354 continue;
355
356 struct lp_descriptor *desc = set->map;
357 desc += bind_layout->descriptor_index;
358
359 for (uint32_t sampler_index = 0; sampler_index < bind_layout->array_size; sampler_index++) {
360 if (bind_layout->immutable_samplers[sampler_index]) {
361 for (uint32_t s = 0; s < bind_layout->stride; s++) {
362 int idx = sampler_index * bind_layout->stride + s;
363 desc[idx] = bind_layout->immutable_samplers[sampler_index]->desc;
364 }
365 }
366 }
367 }
368
369 *out_set = set;
370
371 return VK_SUCCESS;
372 }
373
374 void
lvp_descriptor_set_destroy(struct lvp_device * device,struct lvp_descriptor_set * set)375 lvp_descriptor_set_destroy(struct lvp_device *device,
376 struct lvp_descriptor_set *set)
377 {
378 pipe_resource_reference(&set->bo, NULL);
379 device->pscreen->unmap_memory(device->pscreen, set->pmem);
380 device->pscreen->free_memory(device->pscreen, set->pmem);
381
382 vk_descriptor_set_layout_unref(&device->vk, &set->layout->vk);
383 vk_object_base_finish(&set->base);
384 vk_free(&device->vk.alloc, set);
385 }
386
lvp_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)387 VKAPI_ATTR VkResult VKAPI_CALL lvp_AllocateDescriptorSets(
388 VkDevice _device,
389 const VkDescriptorSetAllocateInfo* pAllocateInfo,
390 VkDescriptorSet* pDescriptorSets)
391 {
392 LVP_FROM_HANDLE(lvp_device, device, _device);
393 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, pAllocateInfo->descriptorPool);
394 VkResult result = VK_SUCCESS;
395 struct lvp_descriptor_set *set;
396 uint32_t i;
397
398 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
399 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout,
400 pAllocateInfo->pSetLayouts[i]);
401
402 result = lvp_descriptor_set_create(device, layout, &set);
403 if (result != VK_SUCCESS)
404 break;
405
406 list_addtail(&set->link, &pool->sets);
407 pDescriptorSets[i] = lvp_descriptor_set_to_handle(set);
408 }
409
410 if (result != VK_SUCCESS)
411 lvp_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
412 i, pDescriptorSets);
413
414 return result;
415 }
416
lvp_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)417 VKAPI_ATTR VkResult VKAPI_CALL lvp_FreeDescriptorSets(
418 VkDevice _device,
419 VkDescriptorPool descriptorPool,
420 uint32_t count,
421 const VkDescriptorSet* pDescriptorSets)
422 {
423 LVP_FROM_HANDLE(lvp_device, device, _device);
424 for (uint32_t i = 0; i < count; i++) {
425 LVP_FROM_HANDLE(lvp_descriptor_set, set, pDescriptorSets[i]);
426
427 if (!set)
428 continue;
429 list_del(&set->link);
430 lvp_descriptor_set_destroy(device, set);
431 }
432 return VK_SUCCESS;
433 }
434
lvp_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)435 VKAPI_ATTR void VKAPI_CALL lvp_UpdateDescriptorSets(
436 VkDevice _device,
437 uint32_t descriptorWriteCount,
438 const VkWriteDescriptorSet* pDescriptorWrites,
439 uint32_t descriptorCopyCount,
440 const VkCopyDescriptorSet* pDescriptorCopies)
441 {
442 LVP_FROM_HANDLE(lvp_device, device, _device);
443
444 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
445 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
446 LVP_FROM_HANDLE(lvp_descriptor_set, set, write->dstSet);
447 const struct lvp_descriptor_set_binding_layout *bind_layout =
448 &set->layout->binding[write->dstBinding];
449
450 if (write->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
451 const VkWriteDescriptorSetInlineUniformBlock *uniform_data =
452 vk_find_struct_const(write->pNext, WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
453 assert(uniform_data);
454 memcpy((uint8_t *)set->map + bind_layout->uniform_block_offset + write->dstArrayElement, uniform_data->pData, uniform_data->dataSize);
455 continue;
456 }
457
458 struct lp_descriptor *desc = set->map;
459 desc += bind_layout->descriptor_index + (write->dstArrayElement * bind_layout->stride);
460
461 switch (write->descriptorType) {
462 case VK_DESCRIPTOR_TYPE_SAMPLER:
463 if (!bind_layout->immutable_samplers) {
464 for (uint32_t j = 0; j < write->descriptorCount; j++) {
465 LVP_FROM_HANDLE(lvp_sampler, sampler, write->pImageInfo[j].sampler);
466 uint32_t didx = j * bind_layout->stride;
467
468 for (unsigned k = 0; k < bind_layout->stride; k++) {
469 desc[didx + k].sampler = sampler->desc.sampler;
470 desc[didx + k].texture.sampler_index = sampler->desc.texture.sampler_index;
471 }
472 }
473 }
474 break;
475
476 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
477 for (uint32_t j = 0; j < write->descriptorCount; j++) {
478 LVP_FROM_HANDLE(lvp_image_view, iview,
479 write->pImageInfo[j].imageView);
480 uint32_t didx = j * bind_layout->stride;
481 if (iview) {
482 unsigned plane_count = iview->plane_count;
483
484 for (unsigned p = 0; p < plane_count; p++) {
485 lp_jit_texture_from_pipe(&desc[didx + p].texture, iview->planes[p].sv);
486 desc[didx + p].functions = iview->planes[p].texture_handle->functions;
487 }
488
489 if (!bind_layout->immutable_samplers) {
490 LVP_FROM_HANDLE(lvp_sampler, sampler,
491 write->pImageInfo[j].sampler);
492
493 for (unsigned p = 0; p < plane_count; p++) {
494 desc[didx + p].sampler = sampler->desc.sampler;
495 desc[didx + p].texture.sampler_index = sampler->desc.texture.sampler_index;
496 }
497 }
498 } else {
499 for (unsigned k = 0; k < bind_layout->stride; k++) {
500 desc[didx + k].functions = device->null_texture_handle->functions;
501 desc[didx + k].texture.sampler_index = 0;
502 }
503 }
504 }
505 break;
506
507 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
508 for (uint32_t j = 0; j < write->descriptorCount; j++) {
509 LVP_FROM_HANDLE(lvp_image_view, iview,
510 write->pImageInfo[j].imageView);
511 uint32_t didx = j * bind_layout->stride;
512 if (iview) {
513 unsigned plane_count = iview->plane_count;
514
515 for (unsigned p = 0; p < plane_count; p++) {
516 lp_jit_texture_from_pipe(&desc[didx + p].texture, iview->planes[p].sv);
517 desc[didx + p].functions = iview->planes[p].texture_handle->functions;
518 }
519 } else {
520 for (unsigned k = 0; k < bind_layout->stride; k++) {
521 desc[didx + k].functions = device->null_texture_handle->functions;
522 desc[didx + k].texture.sampler_index = 0;
523 }
524 }
525 }
526 break;
527 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
528 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
529 for (uint32_t j = 0; j < write->descriptorCount; j++) {
530 LVP_FROM_HANDLE(lvp_image_view, iview,
531 write->pImageInfo[j].imageView);
532 uint32_t didx = j * bind_layout->stride;
533 if (iview) {
534 unsigned plane_count = iview->plane_count;
535
536 for (unsigned p = 0; p < plane_count; p++) {
537 lp_jit_image_from_pipe(&desc[didx + p].image, &iview->planes[p].iv);
538 desc[didx + p].functions = iview->planes[p].image_handle->functions;
539 }
540 } else {
541 for (unsigned k = 0; k < bind_layout->stride; k++)
542 desc[didx + k].functions = device->null_image_handle->functions;
543 }
544 }
545 break;
546
547 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
548 for (uint32_t j = 0; j < write->descriptorCount; j++) {
549 LVP_FROM_HANDLE(lvp_buffer_view, bview,
550 write->pTexelBufferView[j]);
551 assert(bind_layout->stride == 1);
552 if (bview) {
553 lp_jit_texture_from_pipe(&desc[j].texture, bview->sv);
554 desc[j].functions = bview->texture_handle->functions;
555 } else {
556 desc[j].functions = device->null_texture_handle->functions;
557 desc[j].texture.sampler_index = 0;
558 }
559 }
560 break;
561
562 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
563 for (uint32_t j = 0; j < write->descriptorCount; j++) {
564 LVP_FROM_HANDLE(lvp_buffer_view, bview,
565 write->pTexelBufferView[j]);
566 assert(bind_layout->stride == 1);
567 if (bview) {
568 lp_jit_image_from_pipe(&desc[j].image, &bview->iv);
569 desc[j].functions = bview->image_handle->functions;
570 } else {
571 desc[j].functions = device->null_image_handle->functions;
572 }
573 }
574 break;
575
576 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
577 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
578 for (uint32_t j = 0; j < write->descriptorCount; j++) {
579 LVP_FROM_HANDLE(lvp_buffer, buffer, write->pBufferInfo[j].buffer);
580 assert(bind_layout->stride == 1);
581 if (buffer) {
582 struct pipe_constant_buffer ubo = {
583 .buffer = buffer->bo,
584 .buffer_offset = write->pBufferInfo[j].offset,
585 .buffer_size = write->pBufferInfo[j].range,
586 };
587
588 if (write->pBufferInfo[j].range == VK_WHOLE_SIZE)
589 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
590
591 lp_jit_buffer_from_pipe_const(&desc[j].buffer, &ubo, device->pscreen);
592 } else {
593 lp_jit_buffer_from_pipe_const(&desc[j].buffer, &((struct pipe_constant_buffer){0}), device->pscreen);
594 }
595 }
596 break;
597
598 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
599 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
600 for (uint32_t j = 0; j < write->descriptorCount; j++) {
601 LVP_FROM_HANDLE(lvp_buffer, buffer, write->pBufferInfo[j].buffer);
602 assert(bind_layout->stride == 1);
603 if (buffer) {
604 struct pipe_shader_buffer ubo = {
605 .buffer = buffer->bo,
606 .buffer_offset = write->pBufferInfo[j].offset,
607 .buffer_size = write->pBufferInfo[j].range,
608 };
609
610 if (write->pBufferInfo[j].range == VK_WHOLE_SIZE)
611 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
612
613 lp_jit_buffer_from_pipe(&desc[j].buffer, &ubo);
614 } else {
615 lp_jit_buffer_from_pipe(&desc[j].buffer, &((struct pipe_shader_buffer){0}));
616 }
617 }
618 break;
619
620 default:
621 break;
622 }
623 }
624
625 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
626 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
627 LVP_FROM_HANDLE(lvp_descriptor_set, src, copy->srcSet);
628 LVP_FROM_HANDLE(lvp_descriptor_set, dst, copy->dstSet);
629
630 const struct lvp_descriptor_set_binding_layout *src_layout =
631 &src->layout->binding[copy->srcBinding];
632 struct lp_descriptor *src_desc = src->map;
633 src_desc += src_layout->descriptor_index;
634
635 const struct lvp_descriptor_set_binding_layout *dst_layout =
636 &dst->layout->binding[copy->dstBinding];
637 struct lp_descriptor *dst_desc = dst->map;
638 dst_desc += dst_layout->descriptor_index;
639
640 if (src_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
641 memcpy((uint8_t *)dst->map + dst_layout->uniform_block_offset + copy->dstArrayElement,
642 (uint8_t *)src->map + src_layout->uniform_block_offset + copy->srcArrayElement,
643 copy->descriptorCount);
644 } else {
645 src_desc += copy->srcArrayElement;
646 dst_desc += copy->dstArrayElement;
647
648 for (uint32_t j = 0; j < copy->descriptorCount; j++)
649 dst_desc[j] = src_desc[j];
650 }
651 }
652 }
653
lvp_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)654 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorPool(
655 VkDevice _device,
656 const VkDescriptorPoolCreateInfo* pCreateInfo,
657 const VkAllocationCallbacks* pAllocator,
658 VkDescriptorPool* pDescriptorPool)
659 {
660 LVP_FROM_HANDLE(lvp_device, device, _device);
661 struct lvp_descriptor_pool *pool;
662 size_t size = sizeof(struct lvp_descriptor_pool);
663 pool = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8,
664 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
665 if (!pool)
666 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
667
668 vk_object_base_init(&device->vk, &pool->base,
669 VK_OBJECT_TYPE_DESCRIPTOR_POOL);
670 pool->flags = pCreateInfo->flags;
671 list_inithead(&pool->sets);
672 *pDescriptorPool = lvp_descriptor_pool_to_handle(pool);
673 return VK_SUCCESS;
674 }
675
lvp_reset_descriptor_pool(struct lvp_device * device,struct lvp_descriptor_pool * pool)676 static void lvp_reset_descriptor_pool(struct lvp_device *device,
677 struct lvp_descriptor_pool *pool)
678 {
679 struct lvp_descriptor_set *set, *tmp;
680 LIST_FOR_EACH_ENTRY_SAFE(set, tmp, &pool->sets, link) {
681 list_del(&set->link);
682 lvp_descriptor_set_destroy(device, set);
683 }
684 }
685
lvp_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)686 VKAPI_ATTR void VKAPI_CALL lvp_DestroyDescriptorPool(
687 VkDevice _device,
688 VkDescriptorPool _pool,
689 const VkAllocationCallbacks* pAllocator)
690 {
691 LVP_FROM_HANDLE(lvp_device, device, _device);
692 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
693
694 if (!_pool)
695 return;
696
697 lvp_reset_descriptor_pool(device, pool);
698 vk_object_base_finish(&pool->base);
699 vk_free2(&device->vk.alloc, pAllocator, pool);
700 }
701
lvp_ResetDescriptorPool(VkDevice _device,VkDescriptorPool _pool,VkDescriptorPoolResetFlags flags)702 VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetDescriptorPool(
703 VkDevice _device,
704 VkDescriptorPool _pool,
705 VkDescriptorPoolResetFlags flags)
706 {
707 LVP_FROM_HANDLE(lvp_device, device, _device);
708 LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
709
710 lvp_reset_descriptor_pool(device, pool);
711 return VK_SUCCESS;
712 }
713
lvp_GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)714 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutSupport(VkDevice device,
715 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
716 VkDescriptorSetLayoutSupport* pSupport)
717 {
718 const VkDescriptorSetLayoutBindingFlagsCreateInfo *variable_flags =
719 vk_find_struct_const(pCreateInfo->pNext, DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
720 VkDescriptorSetVariableDescriptorCountLayoutSupport *variable_count =
721 vk_find_struct(pSupport->pNext, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
722 if (variable_count) {
723 variable_count->maxVariableDescriptorCount = 0;
724 if (variable_flags) {
725 for (unsigned i = 0; i < variable_flags->bindingCount; i++) {
726 if (variable_flags->pBindingFlags[i] & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
727 variable_count->maxVariableDescriptorCount = MAX_DESCRIPTORS;
728 }
729 }
730 }
731 pSupport->supported = true;
732 }
733
lvp_CreateDescriptorUpdateTemplate(VkDevice _device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)734 VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorUpdateTemplate(VkDevice _device,
735 const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
736 const VkAllocationCallbacks *pAllocator,
737 VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
738 {
739 LVP_FROM_HANDLE(lvp_device, device, _device);
740 const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
741 const size_t size = sizeof(struct lvp_descriptor_update_template) +
742 sizeof(VkDescriptorUpdateTemplateEntry) * entry_count;
743
744 struct lvp_descriptor_update_template *templ;
745
746 templ = vk_alloc(&device->vk.alloc, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
747 if (!templ)
748 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
749
750 vk_object_base_init(&device->vk, &templ->base,
751 VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
752
753 templ->ref_cnt = 1;
754 templ->type = pCreateInfo->templateType;
755 templ->bind_point = pCreateInfo->pipelineBindPoint;
756 templ->set = pCreateInfo->set;
757 /* This parameter is ignored if templateType is not VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR */
758 if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR)
759 templ->pipeline_layout = lvp_pipeline_layout_from_handle(pCreateInfo->pipelineLayout);
760 else
761 templ->pipeline_layout = NULL;
762 templ->entry_count = entry_count;
763
764 VkDescriptorUpdateTemplateEntry *entries = (VkDescriptorUpdateTemplateEntry *)(templ + 1);
765 for (unsigned i = 0; i < entry_count; i++) {
766 entries[i] = pCreateInfo->pDescriptorUpdateEntries[i];
767 }
768
769 *pDescriptorUpdateTemplate = lvp_descriptor_update_template_to_handle(templ);
770 return VK_SUCCESS;
771 }
772
773 void
lvp_descriptor_template_destroy(struct lvp_device * device,struct lvp_descriptor_update_template * templ)774 lvp_descriptor_template_destroy(struct lvp_device *device, struct lvp_descriptor_update_template *templ)
775 {
776 if (!templ)
777 return;
778
779 vk_object_base_finish(&templ->base);
780 vk_free(&device->vk.alloc, templ);
781 }
782
lvp_DestroyDescriptorUpdateTemplate(VkDevice _device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)783 VKAPI_ATTR void VKAPI_CALL lvp_DestroyDescriptorUpdateTemplate(VkDevice _device,
784 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
785 const VkAllocationCallbacks *pAllocator)
786 {
787 LVP_FROM_HANDLE(lvp_device, device, _device);
788 LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate);
789 lvp_descriptor_template_templ_unref(device, templ);
790 }
791
792 uint32_t
lvp_descriptor_update_template_entry_size(VkDescriptorType type)793 lvp_descriptor_update_template_entry_size(VkDescriptorType type)
794 {
795 switch (type) {
796 case VK_DESCRIPTOR_TYPE_SAMPLER:
797 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
798 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
799 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
800 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
801 return sizeof(VkDescriptorImageInfo);
802 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
803 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
804 return sizeof(VkBufferView);
805 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
806 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
807 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
808 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
809 default:
810 return sizeof(VkDescriptorBufferInfo);
811 }
812 }
813
814 void
lvp_descriptor_set_update_with_template(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData,bool push)815 lvp_descriptor_set_update_with_template(VkDevice _device, VkDescriptorSet descriptorSet,
816 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
817 const void *pData, bool push)
818 {
819 LVP_FROM_HANDLE(lvp_device, device, _device);
820 LVP_FROM_HANDLE(lvp_descriptor_set, set, descriptorSet);
821 LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate);
822 uint32_t i, j;
823
824 const uint8_t *pSrc = pData;
825
826 for (i = 0; i < templ->entry_count; ++i) {
827 VkDescriptorUpdateTemplateEntry *entry = &templ->entry[i];
828
829 if (!push)
830 pSrc = ((const uint8_t *) pData) + entry->offset;
831
832 const struct lvp_descriptor_set_binding_layout *bind_layout =
833 &set->layout->binding[entry->dstBinding];
834
835 if (entry->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
836 memcpy((uint8_t *)set->map + bind_layout->uniform_block_offset + entry->dstArrayElement, pSrc, entry->descriptorCount);
837 continue;
838 }
839
840 struct lp_descriptor *desc = set->map;
841 desc += bind_layout->descriptor_index;
842
843 for (j = 0; j < entry->descriptorCount; ++j) {
844 unsigned idx = j + entry->dstArrayElement;
845
846 idx *= bind_layout->stride;
847 switch (entry->descriptorType) {
848 case VK_DESCRIPTOR_TYPE_SAMPLER: {
849 LVP_FROM_HANDLE(lvp_sampler, sampler,
850 *(VkSampler *)pSrc);
851
852 for (unsigned k = 0; k < bind_layout->stride; k++) {
853 desc[idx + k].sampler = sampler->desc.sampler;
854 desc[idx + k].texture.sampler_index = sampler->desc.texture.sampler_index;
855 }
856 break;
857 }
858 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
859 VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
860 LVP_FROM_HANDLE(lvp_image_view, iview, info->imageView);
861
862 if (iview) {
863 for (unsigned p = 0; p < iview->plane_count; p++) {
864 lp_jit_texture_from_pipe(&desc[idx + p].texture, iview->planes[p].sv);
865 desc[idx + p].functions = iview->planes[p].texture_handle->functions;
866 }
867
868 if (!bind_layout->immutable_samplers) {
869 LVP_FROM_HANDLE(lvp_sampler, sampler, info->sampler);
870
871 for (unsigned p = 0; p < iview->plane_count; p++) {
872 desc[idx + p].sampler = sampler->desc.sampler;
873 desc[idx + p].texture.sampler_index = sampler->desc.texture.sampler_index;
874 }
875 }
876 } else {
877 for (unsigned k = 0; k < bind_layout->stride; k++) {
878 desc[idx + k].functions = device->null_texture_handle->functions;
879 desc[idx + k].texture.sampler_index = 0;
880 }
881 }
882 break;
883 }
884 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
885 VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
886 LVP_FROM_HANDLE(lvp_image_view, iview, info->imageView);
887
888 if (iview) {
889 for (unsigned p = 0; p < iview->plane_count; p++) {
890 lp_jit_texture_from_pipe(&desc[idx + p].texture, iview->planes[p].sv);
891 desc[idx + p].functions = iview->planes[p].texture_handle->functions;
892 }
893 } else {
894 for (unsigned k = 0; k < bind_layout->stride; k++) {
895 desc[idx + k].functions = device->null_texture_handle->functions;
896 desc[idx + k].texture.sampler_index = 0;
897 }
898 }
899 break;
900 }
901 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
902 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
903 LVP_FROM_HANDLE(lvp_image_view, iview,
904 ((VkDescriptorImageInfo *)pSrc)->imageView);
905
906 if (iview) {
907 for (unsigned p = 0; p < iview->plane_count; p++) {
908 lp_jit_image_from_pipe(&desc[idx + p].image, &iview->planes[p].iv);
909 desc[idx + p].functions = iview->planes[p].image_handle->functions;
910 }
911 } else {
912 for (unsigned k = 0; k < bind_layout->stride; k++)
913 desc[idx + k].functions = device->null_image_handle->functions;
914 }
915 break;
916 }
917 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
918 LVP_FROM_HANDLE(lvp_buffer_view, bview,
919 *(VkBufferView *)pSrc);
920 assert(bind_layout->stride == 1);
921 if (bview) {
922 lp_jit_texture_from_pipe(&desc[idx].texture, bview->sv);
923 desc[idx].functions = bview->texture_handle->functions;
924 } else {
925 desc[j].functions = device->null_texture_handle->functions;
926 desc[j].texture.sampler_index = 0;
927 }
928 break;
929 }
930 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
931 LVP_FROM_HANDLE(lvp_buffer_view, bview,
932 *(VkBufferView *)pSrc);
933 assert(bind_layout->stride == 1);
934 if (bview) {
935 lp_jit_image_from_pipe(&desc[idx].image, &bview->iv);
936 desc[idx].functions = bview->image_handle->functions;
937 } else {
938 desc[idx].functions = device->null_image_handle->functions;
939 }
940 break;
941 }
942
943 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
944 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
945 VkDescriptorBufferInfo *info = (VkDescriptorBufferInfo *)pSrc;
946 LVP_FROM_HANDLE(lvp_buffer, buffer, info->buffer);
947 assert(bind_layout->stride == 1);
948 if (buffer) {
949 struct pipe_constant_buffer ubo = {
950 .buffer = buffer->bo,
951 .buffer_offset = info->offset,
952 .buffer_size = info->range,
953 };
954
955 if (info->range == VK_WHOLE_SIZE)
956 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
957
958 lp_jit_buffer_from_pipe_const(&desc[idx].buffer, &ubo, device->pscreen);
959 } else {
960 lp_jit_buffer_from_pipe_const(&desc[idx].buffer, &((struct pipe_constant_buffer){0}), device->pscreen);
961 }
962 break;
963 }
964
965 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
966 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
967 VkDescriptorBufferInfo *info = (VkDescriptorBufferInfo *)pSrc;
968 LVP_FROM_HANDLE(lvp_buffer, buffer, info->buffer);
969 assert(bind_layout->stride == 1);
970
971 if (buffer) {
972 struct pipe_shader_buffer ubo = {
973 .buffer = buffer->bo,
974 .buffer_offset = info->offset,
975 .buffer_size = info->range,
976 };
977
978 if (info->range == VK_WHOLE_SIZE)
979 ubo.buffer_size = buffer->bo->width0 - ubo.buffer_offset;
980
981 lp_jit_buffer_from_pipe(&desc[idx].buffer, &ubo);
982 } else {
983 lp_jit_buffer_from_pipe(&desc[idx].buffer, &((struct pipe_shader_buffer){0}));
984 }
985 break;
986 }
987 default:
988 break;
989 }
990
991 if (push)
992 pSrc += lvp_descriptor_update_template_entry_size(entry->descriptorType);
993 else
994 pSrc += entry->stride;
995 }
996 }
997 }
998
999 VKAPI_ATTR void VKAPI_CALL
lvp_UpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)1000 lvp_UpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet,
1001 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1002 const void *pData)
1003 {
1004 lvp_descriptor_set_update_with_template(device, descriptorSet, descriptorUpdateTemplate, pData, false);
1005 }
1006
lvp_GetDescriptorSetLayoutSizeEXT(VkDevice _device,VkDescriptorSetLayout _layout,VkDeviceSize * pSize)1007 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutSizeEXT(
1008 VkDevice _device,
1009 VkDescriptorSetLayout _layout,
1010 VkDeviceSize* pSize)
1011 {
1012 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout, _layout);
1013
1014 *pSize = layout->size * sizeof(struct lp_descriptor);
1015
1016 for (unsigned i = 0; i < layout->binding_count; i++)
1017 *pSize += layout->binding[i].uniform_block_size;
1018 }
1019
lvp_GetDescriptorSetLayoutBindingOffsetEXT(VkDevice _device,VkDescriptorSetLayout _layout,uint32_t binding,VkDeviceSize * pOffset)1020 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutBindingOffsetEXT(
1021 VkDevice _device,
1022 VkDescriptorSetLayout _layout,
1023 uint32_t binding,
1024 VkDeviceSize* pOffset)
1025 {
1026 LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout, _layout);
1027 assert(binding < layout->binding_count);
1028
1029 const struct lvp_descriptor_set_binding_layout *bind_layout = &layout->binding[binding];
1030 if (bind_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
1031 *pOffset = bind_layout->uniform_block_offset;
1032 else
1033 *pOffset = bind_layout->descriptor_index * sizeof(struct lp_descriptor);
1034 }
1035
lvp_GetDescriptorEXT(VkDevice _device,const VkDescriptorGetInfoEXT * pCreateInfo,size_t size,void * pDescriptor)1036 VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorEXT(
1037 VkDevice _device,
1038 const VkDescriptorGetInfoEXT* pCreateInfo,
1039 size_t size,
1040 void* pDescriptor)
1041 {
1042 LVP_FROM_HANDLE(lvp_device, device, _device);
1043
1044 struct lp_descriptor *desc = pDescriptor;
1045
1046 struct pipe_sampler_state sampler = {
1047 .seamless_cube_map = 1,
1048 .max_lod = 0.25,
1049 };
1050
1051 switch (pCreateInfo->type) {
1052 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
1053 unreachable("this is a spec violation");
1054 break;
1055 }
1056 case VK_DESCRIPTOR_TYPE_SAMPLER: {
1057 if (pCreateInfo->data.pSampler) {
1058 LVP_FROM_HANDLE(lvp_sampler, sampler, pCreateInfo->data.pSampler[0]);
1059 desc->sampler = sampler->desc.sampler;
1060 desc->texture.sampler_index = sampler->desc.texture.sampler_index;
1061 } else {
1062 lp_jit_sampler_from_pipe(&desc->sampler, &sampler);
1063 desc->texture.sampler_index = 0;
1064 }
1065 break;
1066 }
1067
1068 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1069 const VkDescriptorImageInfo *info = pCreateInfo->data.pCombinedImageSampler;
1070 if (info && info->imageView) {
1071 LVP_FROM_HANDLE(lvp_image_view, iview, info->imageView);
1072
1073 lp_jit_texture_from_pipe(&desc->texture, iview->planes[0].sv);
1074 desc->functions = iview->planes[0].texture_handle->functions;
1075
1076 if (info->sampler) {
1077 LVP_FROM_HANDLE(lvp_sampler, sampler, info->sampler);
1078 desc->sampler = sampler->desc.sampler;
1079 desc->texture.sampler_index = sampler->desc.texture.sampler_index;
1080 } else {
1081 lp_jit_sampler_from_pipe(&desc->sampler, &sampler);
1082 desc->texture.sampler_index = 0;
1083 }
1084 } else {
1085 desc->functions = device->null_texture_handle->functions;
1086 desc->texture.sampler_index = 0;
1087 }
1088
1089 break;
1090 }
1091
1092 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
1093 if (pCreateInfo->data.pSampledImage && pCreateInfo->data.pSampledImage->imageView) {
1094 LVP_FROM_HANDLE(lvp_image_view, iview, pCreateInfo->data.pSampledImage->imageView);
1095 lp_jit_texture_from_pipe(&desc->texture, iview->planes[0].sv);
1096 desc->functions = iview->planes[0].texture_handle->functions;
1097 } else {
1098 desc->functions = device->null_texture_handle->functions;
1099 desc->texture.sampler_index = 0;
1100 }
1101 break;
1102 }
1103
1104 /* technically these use different pointers, but it's a union, so they're all the same */
1105 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1106 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
1107 if (pCreateInfo->data.pStorageImage && pCreateInfo->data.pStorageImage->imageView) {
1108 LVP_FROM_HANDLE(lvp_image_view, iview, pCreateInfo->data.pStorageImage->imageView);
1109 lp_jit_image_from_pipe(&desc->image, &iview->planes[0].iv);
1110 desc->functions = iview->planes[0].image_handle->functions;
1111 } else {
1112 desc->functions = device->null_image_handle->functions;
1113 }
1114 break;
1115 }
1116 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
1117 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pUniformTexelBuffer;
1118 if (bda && bda->address) {
1119 enum pipe_format pformat = vk_format_to_pipe_format(bda->format);
1120 lp_jit_texture_buffer_from_bda(&desc->texture, (void*)(uintptr_t)bda->address, bda->range, pformat);
1121 desc->functions = get_texture_handle_bda(device, bda, pformat).functions;
1122 } else {
1123 desc->functions = device->null_texture_handle->functions;
1124 desc->texture.sampler_index = 0;
1125 }
1126 break;
1127 }
1128 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
1129 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pStorageTexelBuffer;
1130 if (bda && bda->address) {
1131 enum pipe_format pformat = vk_format_to_pipe_format(bda->format);
1132 lp_jit_image_buffer_from_bda(&desc->image, (void *)(uintptr_t)bda->address, bda->range, pformat);
1133 desc->functions = get_image_handle_bda(device, bda, pformat).functions;
1134 } else {
1135 desc->functions = device->null_image_handle->functions;
1136 }
1137 break;
1138 }
1139 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
1140 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pUniformBuffer;
1141 if (bda && bda->address) {
1142 struct pipe_constant_buffer ubo = {
1143 .user_buffer = (void *)(uintptr_t)bda->address,
1144 .buffer_size = bda->range,
1145 };
1146
1147 lp_jit_buffer_from_pipe_const(&desc->buffer, &ubo, device->pscreen);
1148 } else {
1149 lp_jit_buffer_from_pipe_const(&desc->buffer, &((struct pipe_constant_buffer){0}), device->pscreen);
1150 }
1151 break;
1152 }
1153 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
1154 const VkDescriptorAddressInfoEXT *bda = pCreateInfo->data.pStorageBuffer;
1155 if (bda && bda->address) {
1156 lp_jit_buffer_from_bda(&desc->buffer, (void *)(uintptr_t)bda->address, bda->range);
1157 } else {
1158 lp_jit_buffer_from_pipe(&desc->buffer, &((struct pipe_shader_buffer){0}));
1159 }
1160 break;
1161 }
1162 default:
1163 break;
1164 }
1165 }
1166