1 /*
2 * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3 * SPDX-License-Identifier: MIT
4 */
5 #include "nvk_descriptor_set_layout.h"
6
7 #include "nvk_descriptor_set.h"
8 #include "nvk_device.h"
9 #include "nvk_entrypoints.h"
10 #include "nvk_physical_device.h"
11 #include "nvk_sampler.h"
12
13 #include "vk_pipeline_layout.h"
14
15 static bool
binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding * binding)16 binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding *binding)
17 {
18 switch (binding->descriptorType) {
19 case VK_DESCRIPTOR_TYPE_SAMPLER:
20 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
21 return binding->pImmutableSamplers != NULL;
22
23 default:
24 return false;
25 }
26 }
27
28 void
nvk_descriptor_stride_align_for_type(const struct nvk_physical_device * pdev,VkDescriptorType type,const VkMutableDescriptorTypeListEXT * type_list,uint32_t * stride,uint32_t * alignment)29 nvk_descriptor_stride_align_for_type(const struct nvk_physical_device *pdev,
30 VkDescriptorType type,
31 const VkMutableDescriptorTypeListEXT *type_list,
32 uint32_t *stride, uint32_t *alignment)
33 {
34 switch (type) {
35 case VK_DESCRIPTOR_TYPE_SAMPLER:
36 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
37 /* TODO: How do samplers work? */
38 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
39 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
40 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
41 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
42 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
43 *stride = *alignment = sizeof(struct nvk_image_descriptor);
44 break;
45
46 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
47 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
48 *stride = *alignment = sizeof(struct nvk_buffer_address);
49 break;
50
51 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
52 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
53 *stride = *alignment = 0; /* These don't take up buffer space */
54 break;
55
56 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
57 *stride = 1; /* Array size is bytes */
58 *alignment = nvk_min_cbuf_alignment(&pdev->info);
59 break;
60
61 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
62 *stride = *alignment = 0;
63 if (type_list == NULL)
64 *stride = *alignment = NVK_MAX_DESCRIPTOR_SIZE;
65 for (unsigned i = 0; type_list && i < type_list->descriptorTypeCount; i++) {
66 /* This shouldn't recurse */
67 assert(type_list->pDescriptorTypes[i] !=
68 VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
69 uint32_t desc_stride, desc_align;
70 nvk_descriptor_stride_align_for_type(pdev,
71 type_list->pDescriptorTypes[i],
72 NULL, &desc_stride, &desc_align);
73 *stride = MAX2(*stride, desc_stride);
74 *alignment = MAX2(*alignment, desc_align);
75 }
76 *stride = ALIGN(*stride, *alignment);
77 break;
78
79 default:
80 unreachable("Invalid descriptor type");
81 }
82
83 assert(*stride <= NVK_MAX_DESCRIPTOR_SIZE);
84 }
85
86 static const VkMutableDescriptorTypeListEXT *
nvk_descriptor_get_type_list(VkDescriptorType type,const VkMutableDescriptorTypeCreateInfoEXT * info,const uint32_t info_idx)87 nvk_descriptor_get_type_list(VkDescriptorType type,
88 const VkMutableDescriptorTypeCreateInfoEXT *info,
89 const uint32_t info_idx)
90 {
91 const VkMutableDescriptorTypeListEXT *type_list = NULL;
92 if (type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
93 assert(info != NULL);
94 assert(info_idx < info->mutableDescriptorTypeListCount);
95 type_list = &info->pMutableDescriptorTypeLists[info_idx];
96 }
97 return type_list;
98 }
99
100 VKAPI_ATTR VkResult VKAPI_CALL
nvk_CreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)101 nvk_CreateDescriptorSetLayout(VkDevice device,
102 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
103 const VkAllocationCallbacks *pAllocator,
104 VkDescriptorSetLayout *pSetLayout)
105 {
106 VK_FROM_HANDLE(nvk_device, dev, device);
107 struct nvk_physical_device *pdev = nvk_device_physical(dev);
108
109 uint32_t num_bindings = 0;
110 uint32_t immutable_sampler_count = 0;
111 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
112 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
113 num_bindings = MAX2(num_bindings, binding->binding + 1);
114
115 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
116 *
117 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
118 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
119 * pImmutableSamplers can be used to initialize a set of immutable
120 * samplers. [...] If descriptorType is not one of these descriptor
121 * types, then pImmutableSamplers is ignored.
122 *
123 * We need to be careful here and only parse pImmutableSamplers if we
124 * have one of the right descriptor types.
125 */
126 if (binding_has_immutable_samplers(binding))
127 immutable_sampler_count += binding->descriptorCount;
128 }
129
130 VK_MULTIALLOC(ma);
131 VK_MULTIALLOC_DECL(&ma, struct nvk_descriptor_set_layout, layout, 1);
132 VK_MULTIALLOC_DECL(&ma, struct nvk_descriptor_set_binding_layout, bindings,
133 num_bindings);
134 VK_MULTIALLOC_DECL(&ma, struct nvk_sampler *, samplers,
135 immutable_sampler_count);
136
137 if (!vk_descriptor_set_layout_multizalloc(&dev->vk, &ma))
138 return vk_error(dev, VK_ERROR_OUT_OF_HOST_MEMORY);
139
140 layout->binding_count = num_bindings;
141
142 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
143 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
144 uint32_t b = binding->binding;
145 /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
146 * immutable_samplers pointer. This provides us with a quick-and-dirty
147 * way to sort the bindings by binding number.
148 */
149 layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
150 }
151
152 const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
153 vk_find_struct_const(pCreateInfo->pNext,
154 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
155 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
156 vk_find_struct_const(pCreateInfo->pNext,
157 MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
158
159 uint32_t buffer_size = 0;
160 uint8_t dynamic_buffer_count = 0;
161 for (uint32_t b = 0; b < num_bindings; b++) {
162 /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
163 * immutable_samplers pointer. Check for NULL (empty binding) and then
164 * reset it and compute the index.
165 */
166 if (layout->binding[b].immutable_samplers == NULL)
167 continue;
168 const uint32_t info_idx =
169 (uintptr_t)(void *)layout->binding[b].immutable_samplers - 1;
170 layout->binding[b].immutable_samplers = NULL;
171
172 const VkDescriptorSetLayoutBinding *binding =
173 &pCreateInfo->pBindings[info_idx];
174
175 if (binding->descriptorCount == 0)
176 continue;
177
178 layout->binding[b].type = binding->descriptorType;
179
180 if (binding_flags_info && binding_flags_info->bindingCount > 0) {
181 assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
182 layout->binding[b].flags = binding_flags_info->pBindingFlags[info_idx];
183 }
184
185 layout->binding[b].array_size = binding->descriptorCount;
186
187 switch (binding->descriptorType) {
188 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
189 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
190 layout->binding[b].dynamic_buffer_index = dynamic_buffer_count;
191 dynamic_buffer_count += binding->descriptorCount;
192 break;
193 default:
194 break;
195 }
196
197 const VkMutableDescriptorTypeListEXT *type_list =
198 nvk_descriptor_get_type_list(binding->descriptorType,
199 mutable_info, info_idx);
200
201 uint32_t stride, alignment;
202 nvk_descriptor_stride_align_for_type(pdev, binding->descriptorType,
203 type_list, &stride, &alignment);
204
205 uint8_t max_plane_count = 1;
206
207 if (binding_has_immutable_samplers(binding)) {
208 layout->binding[b].immutable_samplers = samplers;
209 samplers += binding->descriptorCount;
210 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
211 VK_FROM_HANDLE(nvk_sampler, sampler, binding->pImmutableSamplers[i]);
212 layout->binding[b].immutable_samplers[i] = sampler;
213 const uint8_t sampler_plane_count = sampler->vk.ycbcr_conversion ?
214 vk_format_get_plane_count(sampler->vk.ycbcr_conversion->state.format) : 1;
215 if (max_plane_count < sampler_plane_count)
216 max_plane_count = sampler_plane_count;
217 }
218 }
219
220 stride *= max_plane_count;
221
222 if (stride > 0) {
223 assert(stride <= UINT8_MAX);
224 assert(util_is_power_of_two_nonzero(alignment));
225
226 buffer_size = align64(buffer_size, alignment);
227 layout->binding[b].offset = buffer_size;
228 layout->binding[b].stride = stride;
229
230 if (layout->binding[b].flags &
231 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) {
232 /* From the Vulkan 1.3.256 spec:
233 *
234 * VUID-VkDescriptorSetLayoutBindingFlagsCreateInfo-pBindingFlags-03004
235 * "If an element of pBindingFlags includes
236 * VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT, then
237 * all other elements of
238 * VkDescriptorSetLayoutCreateInfo::pBindings must have a
239 * smaller value of binding"
240 *
241 * In other words, it has to be the last binding.
242 */
243 assert(b == num_bindings - 1);
244 } else {
245 /* the allocation size will be computed at descriptor allocation,
246 * but the buffer size will be already aligned as this binding will
247 * be the last
248 */
249 buffer_size += stride * binding->descriptorCount;
250 }
251 }
252
253 }
254
255 layout->non_variable_descriptor_buffer_size = buffer_size;
256 layout->dynamic_buffer_count = dynamic_buffer_count;
257
258 struct mesa_blake3 blake3_ctx;
259 _mesa_blake3_init(&blake3_ctx);
260
261 #define BLAKE3_UPDATE_VALUE(x) _mesa_blake3_update(&blake3_ctx, &(x), sizeof(x));
262 BLAKE3_UPDATE_VALUE(layout->non_variable_descriptor_buffer_size);
263 BLAKE3_UPDATE_VALUE(layout->dynamic_buffer_count);
264 BLAKE3_UPDATE_VALUE(layout->binding_count);
265
266 for (uint32_t b = 0; b < num_bindings; b++) {
267 BLAKE3_UPDATE_VALUE(layout->binding[b].type);
268 BLAKE3_UPDATE_VALUE(layout->binding[b].flags);
269 BLAKE3_UPDATE_VALUE(layout->binding[b].array_size);
270 BLAKE3_UPDATE_VALUE(layout->binding[b].offset);
271 BLAKE3_UPDATE_VALUE(layout->binding[b].stride);
272 BLAKE3_UPDATE_VALUE(layout->binding[b].dynamic_buffer_index);
273 /* Immutable samplers are ignored for now */
274 }
275 #undef BLAKE3_UPDATE_VALUE
276
277 _mesa_blake3_final(&blake3_ctx, layout->vk.blake3);
278
279 *pSetLayout = nvk_descriptor_set_layout_to_handle(layout);
280
281 return VK_SUCCESS;
282 }
283
284 VKAPI_ATTR void VKAPI_CALL
nvk_GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)285 nvk_GetDescriptorSetLayoutSupport(VkDevice device,
286 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
287 VkDescriptorSetLayoutSupport *pSupport)
288 {
289 VK_FROM_HANDLE(nvk_device, dev, device);
290 struct nvk_physical_device *pdev = nvk_device_physical(dev);
291
292 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
293 vk_find_struct_const(pCreateInfo->pNext,
294 MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
295 const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags =
296 vk_find_struct_const(pCreateInfo->pNext,
297 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
298
299 /* Figure out the maximum alignment up-front. Otherwise, we need to sort
300 * the list of descriptors by binding number in order to get the size
301 * accumulation right.
302 */
303 uint32_t max_align = 0;
304 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
305 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[i];
306 const VkMutableDescriptorTypeListEXT *type_list =
307 nvk_descriptor_get_type_list(binding->descriptorType,
308 mutable_info, i);
309
310 uint32_t stride, alignment;
311 nvk_descriptor_stride_align_for_type(pdev, binding->descriptorType,
312 type_list, &stride, &alignment);
313 max_align = MAX2(max_align, alignment);
314 }
315
316 uint64_t non_variable_size = 0;
317 uint32_t variable_stride = 0;
318 uint32_t variable_count = 0;
319 uint8_t dynamic_buffer_count = 0;
320
321 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
322 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[i];
323
324 VkDescriptorBindingFlags flags = 0;
325 if (binding_flags != NULL && binding_flags->bindingCount > 0)
326 flags = binding_flags->pBindingFlags[i];
327
328 switch (binding->descriptorType) {
329 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
330 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
331 dynamic_buffer_count += binding->descriptorCount;
332 break;
333 default:
334 break;
335 }
336
337 const VkMutableDescriptorTypeListEXT *type_list =
338 nvk_descriptor_get_type_list(binding->descriptorType,
339 mutable_info, i);
340
341 uint32_t stride, alignment;
342 nvk_descriptor_stride_align_for_type(pdev, binding->descriptorType,
343 type_list, &stride, &alignment);
344
345 if (stride > 0) {
346 assert(stride <= UINT8_MAX);
347 assert(util_is_power_of_two_nonzero(alignment));
348
349 if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT) {
350 /* From the Vulkan 1.3.256 spec:
351 *
352 * "For the purposes of this command, a variable-sized
353 * descriptor binding with a descriptorCount of zero is treated
354 * as if the descriptorCount is one"
355 */
356 variable_count = MAX2(1, binding->descriptorCount);
357 variable_stride = stride;
358 } else {
359 /* Since we're aligning to the maximum and since this is just a
360 * check for whether or not the max buffer size is big enough, we
361 * keep non_variable_size aligned to max_align.
362 */
363 non_variable_size += stride * binding->descriptorCount;
364 non_variable_size = align64(non_variable_size, max_align);
365 }
366 }
367 }
368
369 uint64_t buffer_size = non_variable_size;
370 if (variable_stride > 0) {
371 buffer_size += variable_stride * variable_count;
372 buffer_size = align64(buffer_size, max_align);
373 }
374
375 uint32_t max_buffer_size;
376 if (pCreateInfo->flags &
377 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR)
378 max_buffer_size = NVK_PUSH_DESCRIPTOR_SET_SIZE;
379 else
380 max_buffer_size = NVK_MAX_DESCRIPTOR_SET_SIZE;
381
382 pSupport->supported = dynamic_buffer_count <= NVK_MAX_DYNAMIC_BUFFERS &&
383 buffer_size <= max_buffer_size;
384
385 vk_foreach_struct(ext, pSupport->pNext) {
386 switch (ext->sType) {
387 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT: {
388 VkDescriptorSetVariableDescriptorCountLayoutSupport *vs = (void *)ext;
389 if (variable_stride > 0) {
390 vs->maxVariableDescriptorCount =
391 (max_buffer_size - non_variable_size) / variable_stride;
392 } else {
393 vs->maxVariableDescriptorCount = 0;
394 }
395 break;
396 }
397
398 default:
399 nvk_debug_ignored_stype(ext->sType);
400 break;
401 }
402 }
403 }
404