1 /*
2 * Copyright 2024 Valve Corporation
3 * Copyright 2024 Alyssa Rosenzweig
4 * Copyright 2022-2023 Collabora Ltd. and Red Hat Inc.
5 * SPDX-License-Identifier: MIT
6 */
7 #include "hk_descriptor_set_layout.h"
8
9 #include "hk_descriptor_set.h"
10 #include "hk_device.h"
11 #include "hk_entrypoints.h"
12 #include "hk_physical_device.h"
13 #include "hk_sampler.h"
14
15 #include "vk_pipeline_layout.h"
16
17 static bool
binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding * binding)18 binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding *binding)
19 {
20 switch (binding->descriptorType) {
21 case VK_DESCRIPTOR_TYPE_SAMPLER:
22 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
23 return binding->pImmutableSamplers != NULL;
24
25 default:
26 return false;
27 }
28 }
29
30 void
hk_descriptor_stride_align_for_type(const struct hk_physical_device * pdev,VkDescriptorType type,const VkMutableDescriptorTypeListEXT * type_list,uint32_t * stride,uint32_t * alignment)31 hk_descriptor_stride_align_for_type(
32 const struct hk_physical_device *pdev, VkDescriptorType type,
33 const VkMutableDescriptorTypeListEXT *type_list, uint32_t *stride,
34 uint32_t *alignment)
35 {
36 switch (type) {
37 case VK_DESCRIPTOR_TYPE_SAMPLER:
38 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
39 /* TODO: How do samplers work? */
40 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
41 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
42 *stride = *alignment = sizeof(struct hk_sampled_image_descriptor);
43 break;
44
45 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
46 *stride = *alignment = sizeof(struct hk_storage_image_descriptor);
47 break;
48
49 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
50 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
51 *stride = *alignment = sizeof(struct hk_buffer_view_descriptor);
52 break;
53
54 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
55 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
56 *stride = *alignment = sizeof(struct hk_buffer_address);
57 break;
58
59 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
60 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
61 *stride = *alignment = 0; /* These don't take up buffer space */
62 break;
63
64 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
65 *stride = 1; /* Array size is bytes */
66 *alignment = HK_MIN_UBO_ALIGNMENT;
67 break;
68
69 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
70 *stride = *alignment = 0;
71 if (type_list == NULL)
72 *stride = *alignment = HK_MAX_DESCRIPTOR_SIZE;
73 for (unsigned i = 0; type_list && i < type_list->descriptorTypeCount;
74 i++) {
75 /* This shouldn't recurse */
76 assert(type_list->pDescriptorTypes[i] !=
77 VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
78 uint32_t desc_stride, desc_align;
79 hk_descriptor_stride_align_for_type(pdev,
80 type_list->pDescriptorTypes[i],
81 NULL, &desc_stride, &desc_align);
82 *stride = MAX2(*stride, desc_stride);
83 *alignment = MAX2(*alignment, desc_align);
84 }
85 *stride = ALIGN(*stride, *alignment);
86 break;
87
88 default:
89 unreachable("Invalid descriptor type");
90 }
91
92 assert(*stride <= HK_MAX_DESCRIPTOR_SIZE);
93 }
94
95 static const VkMutableDescriptorTypeListEXT *
hk_descriptor_get_type_list(VkDescriptorType type,const VkMutableDescriptorTypeCreateInfoEXT * info,const uint32_t info_idx)96 hk_descriptor_get_type_list(VkDescriptorType type,
97 const VkMutableDescriptorTypeCreateInfoEXT *info,
98 const uint32_t info_idx)
99 {
100 const VkMutableDescriptorTypeListEXT *type_list = NULL;
101 if (type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
102 assert(info != NULL);
103 assert(info_idx < info->mutableDescriptorTypeListCount);
104 type_list = &info->pMutableDescriptorTypeLists[info_idx];
105 }
106 return type_list;
107 }
108
109 VKAPI_ATTR VkResult VKAPI_CALL
hk_CreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)110 hk_CreateDescriptorSetLayout(VkDevice device,
111 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
112 const VkAllocationCallbacks *pAllocator,
113 VkDescriptorSetLayout *pSetLayout)
114 {
115 VK_FROM_HANDLE(hk_device, dev, device);
116 struct hk_physical_device *pdev = hk_device_physical(dev);
117
118 uint32_t num_bindings = 0;
119 uint32_t immutable_sampler_count = 0;
120 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
121 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
122 num_bindings = MAX2(num_bindings, binding->binding + 1);
123
124 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
125 *
126 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
127 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
128 * pImmutableSamplers can be used to initialize a set of immutable
129 * samplers. [...] If descriptorType is not one of these descriptor
130 * types, then pImmutableSamplers is ignored.
131 *
132 * We need to be careful here and only parse pImmutableSamplers if we
133 * have one of the right descriptor types.
134 */
135 if (binding_has_immutable_samplers(binding))
136 immutable_sampler_count += binding->descriptorCount;
137 }
138
139 VK_MULTIALLOC(ma);
140 VK_MULTIALLOC_DECL(&ma, struct hk_descriptor_set_layout, layout, 1);
141 VK_MULTIALLOC_DECL(&ma, struct hk_descriptor_set_binding_layout, bindings,
142 num_bindings);
143 VK_MULTIALLOC_DECL(&ma, struct hk_sampler *, samplers,
144 immutable_sampler_count);
145
146 if (!vk_descriptor_set_layout_multizalloc(&dev->vk, &ma))
147 return vk_error(dev, VK_ERROR_OUT_OF_HOST_MEMORY);
148
149 layout->binding_count = num_bindings;
150
151 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
152 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
153 uint32_t b = binding->binding;
154 /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
155 * immutable_samplers pointer. This provides us with a quick-and-dirty
156 * way to sort the bindings by binding number.
157 */
158 layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
159 }
160
161 const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
162 vk_find_struct_const(pCreateInfo->pNext,
163 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
164 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
165 vk_find_struct_const(pCreateInfo->pNext,
166 MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
167
168 uint32_t buffer_size = 0;
169 uint8_t dynamic_buffer_count = 0;
170 for (uint32_t b = 0; b < num_bindings; b++) {
171 /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
172 * immutable_samplers pointer. Check for NULL (empty binding) and then
173 * reset it and compute the index.
174 */
175 if (layout->binding[b].immutable_samplers == NULL)
176 continue;
177 const uint32_t info_idx =
178 (uintptr_t)(void *)layout->binding[b].immutable_samplers - 1;
179 layout->binding[b].immutable_samplers = NULL;
180
181 const VkDescriptorSetLayoutBinding *binding =
182 &pCreateInfo->pBindings[info_idx];
183
184 if (binding->descriptorCount == 0)
185 continue;
186
187 layout->binding[b].type = binding->descriptorType;
188
189 if (binding_flags_info && binding_flags_info->bindingCount > 0) {
190 assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
191 layout->binding[b].flags = binding_flags_info->pBindingFlags[info_idx];
192 }
193
194 layout->binding[b].array_size = binding->descriptorCount;
195
196 if (vk_descriptor_type_is_dynamic(binding->descriptorType)) {
197 layout->binding[b].dynamic_buffer_index = dynamic_buffer_count;
198 dynamic_buffer_count += binding->descriptorCount;
199 }
200
201 const VkMutableDescriptorTypeListEXT *type_list =
202 hk_descriptor_get_type_list(binding->descriptorType, mutable_info,
203 info_idx);
204
205 uint32_t stride, alignment;
206 hk_descriptor_stride_align_for_type(pdev, binding->descriptorType,
207 type_list, &stride, &alignment);
208
209 uint8_t max_plane_count = 1;
210
211 if (binding_has_immutable_samplers(binding)) {
212 layout->binding[b].immutable_samplers = samplers;
213 samplers += binding->descriptorCount;
214 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
215 VK_FROM_HANDLE(hk_sampler, sampler, binding->pImmutableSamplers[i]);
216 layout->binding[b].immutable_samplers[i] = sampler;
217 const uint8_t sampler_plane_count =
218 sampler->vk.ycbcr_conversion
219 ? vk_format_get_plane_count(
220 sampler->vk.ycbcr_conversion->state.format)
221 : 1;
222 if (max_plane_count < sampler_plane_count)
223 max_plane_count = sampler_plane_count;
224 }
225 }
226
227 stride *= max_plane_count;
228
229 if (stride > 0) {
230 assert(stride <= UINT8_MAX);
231 assert(util_is_power_of_two_nonzero(alignment));
232
233 buffer_size = align64(buffer_size, alignment);
234 layout->binding[b].offset = buffer_size;
235 layout->binding[b].stride = stride;
236
237 if (layout->binding[b].flags &
238 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) {
239 /* From the Vulkan 1.3.256 spec:
240 *
241 * VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-pBindingFlags-03004
242 * "If an element of pBindingFlags includes
243 * VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT, then
244 * all other elements of
245 * VkDescriptorSetLayoutCreateInfo::pBindings must have a
246 * smaller value of binding"
247 *
248 * In other words, it has to be the last binding.
249 */
250 assert(b == num_bindings - 1);
251 } else {
252 /* the allocation size will be computed at descriptor allocation,
253 * but the buffer size will be already aligned as this binding will
254 * be the last
255 */
256 buffer_size += stride * binding->descriptorCount;
257 }
258 }
259 }
260
261 layout->non_variable_descriptor_buffer_size = buffer_size;
262 layout->dynamic_buffer_count = dynamic_buffer_count;
263
264 struct mesa_blake3 blake3_ctx;
265 _mesa_blake3_init(&blake3_ctx);
266
267 #define BLAKE3_UPDATE_VALUE(x) \
268 _mesa_blake3_update(&blake3_ctx, &(x), sizeof(x));
269 BLAKE3_UPDATE_VALUE(layout->non_variable_descriptor_buffer_size);
270 BLAKE3_UPDATE_VALUE(layout->dynamic_buffer_count);
271 BLAKE3_UPDATE_VALUE(layout->binding_count);
272
273 for (uint32_t b = 0; b < num_bindings; b++) {
274 BLAKE3_UPDATE_VALUE(layout->binding[b].type);
275 BLAKE3_UPDATE_VALUE(layout->binding[b].flags);
276 BLAKE3_UPDATE_VALUE(layout->binding[b].array_size);
277 BLAKE3_UPDATE_VALUE(layout->binding[b].offset);
278 BLAKE3_UPDATE_VALUE(layout->binding[b].stride);
279 BLAKE3_UPDATE_VALUE(layout->binding[b].dynamic_buffer_index);
280
281 if (layout->binding[b].immutable_samplers != NULL) {
282 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
283 const struct hk_sampler *sampler =
284 layout->binding[b].immutable_samplers[i];
285
286 /* We zalloc the object, so it's safe to hash the whole thing */
287 if (sampler != NULL && sampler->vk.ycbcr_conversion != NULL)
288 BLAKE3_UPDATE_VALUE(sampler->vk.ycbcr_conversion->state);
289 }
290 }
291 }
292 #undef BLAKE3_UPDATE_VALUE
293
294 _mesa_blake3_final(&blake3_ctx, layout->vk.blake3);
295
296 *pSetLayout = hk_descriptor_set_layout_to_handle(layout);
297
298 return VK_SUCCESS;
299 }
300
301 VKAPI_ATTR void VKAPI_CALL
hk_GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)302 hk_GetDescriptorSetLayoutSupport(
303 VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
304 VkDescriptorSetLayoutSupport *pSupport)
305 {
306 VK_FROM_HANDLE(hk_device, dev, device);
307 struct hk_physical_device *pdev = hk_device_physical(dev);
308
309 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
310 vk_find_struct_const(pCreateInfo->pNext,
311 MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
312 const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags =
313 vk_find_struct_const(pCreateInfo->pNext,
314 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
315
316 /* Figure out the maximum alignment up-front. Otherwise, we need to sort
317 * the list of descriptors by binding number in order to get the size
318 * accumulation right.
319 */
320 uint32_t max_align = 0;
321 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
322 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[i];
323 const VkMutableDescriptorTypeListEXT *type_list =
324 hk_descriptor_get_type_list(binding->descriptorType, mutable_info, i);
325
326 uint32_t stride, alignment;
327 hk_descriptor_stride_align_for_type(pdev, binding->descriptorType,
328 type_list, &stride, &alignment);
329 max_align = MAX2(max_align, alignment);
330 }
331
332 uint64_t non_variable_size = 0;
333 uint32_t variable_stride = 0;
334 uint32_t variable_count = 0;
335 uint8_t dynamic_buffer_count = 0;
336
337 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
338 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[i];
339
340 VkDescriptorBindingFlags flags = 0;
341 if (binding_flags != NULL && binding_flags->bindingCount > 0)
342 flags = binding_flags->pBindingFlags[i];
343
344 if (vk_descriptor_type_is_dynamic(binding->descriptorType))
345 dynamic_buffer_count += binding->descriptorCount;
346
347 const VkMutableDescriptorTypeListEXT *type_list =
348 hk_descriptor_get_type_list(binding->descriptorType, mutable_info, i);
349
350 uint32_t stride, alignment;
351 hk_descriptor_stride_align_for_type(pdev, binding->descriptorType,
352 type_list, &stride, &alignment);
353
354 if (stride > 0) {
355 assert(stride <= UINT8_MAX);
356 assert(util_is_power_of_two_nonzero(alignment));
357
358 if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) {
359 /* From the Vulkan 1.3.256 spec:
360 *
361 * "For the purposes of this command, a variable-sized
362 * descriptor binding with a descriptorCount of zero is treated
363 * as if the descriptorCount is one"
364 */
365 variable_count = MAX2(1, binding->descriptorCount);
366 variable_stride = stride;
367 } else {
368 /* Since we're aligning to the maximum and since this is just a
369 * check for whether or not the max buffer size is big enough, we
370 * keep non_variable_size aligned to max_align.
371 */
372 non_variable_size += stride * binding->descriptorCount;
373 non_variable_size = align64(non_variable_size, max_align);
374 }
375 }
376 }
377
378 uint64_t buffer_size = non_variable_size;
379 if (variable_stride > 0) {
380 buffer_size += variable_stride * variable_count;
381 buffer_size = align64(buffer_size, max_align);
382 }
383
384 uint32_t max_buffer_size;
385 if (pCreateInfo->flags &
386 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR)
387 max_buffer_size = HK_PUSH_DESCRIPTOR_SET_SIZE;
388 else
389 max_buffer_size = HK_MAX_DESCRIPTOR_SET_SIZE;
390
391 pSupport->supported = dynamic_buffer_count <= HK_MAX_DYNAMIC_BUFFERS &&
392 buffer_size <= max_buffer_size;
393
394 vk_foreach_struct(ext, pSupport->pNext) {
395 switch (ext->sType) {
396 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT: {
397 VkDescriptorSetVariableDescriptorCountLayoutSupport *vs = (void *)ext;
398 if (variable_stride > 0) {
399 vs->maxVariableDescriptorCount =
400 (max_buffer_size - non_variable_size) / variable_stride;
401 } else {
402 vs->maxVariableDescriptorCount = 0;
403 }
404 break;
405 }
406
407 default:
408 vk_debug_ignored_stype(ext->sType);
409 break;
410 }
411 }
412 }
413