1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "util/mesa-sha1.h"
31 #include "vk_util.h"
32
33 #include "anv_private.h"
34
35 /*
36 * Descriptor set layouts.
37 */
38
39 static void
anv_descriptor_data_alignment(enum anv_descriptor_data data,enum anv_descriptor_set_layout_type layout_type,unsigned * out_surface_align,unsigned * out_sampler_align)40 anv_descriptor_data_alignment(enum anv_descriptor_data data,
41 enum anv_descriptor_set_layout_type layout_type,
42 unsigned *out_surface_align,
43 unsigned *out_sampler_align)
44 {
45 unsigned surface_align = 1, sampler_align = 1;
46
47 if (data & (ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE |
48 ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE |
49 ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE))
50 surface_align = MAX2(surface_align, 8);
51
52 if (data & ANV_DESCRIPTOR_SURFACE)
53 surface_align = MAX2(surface_align, ANV_SURFACE_STATE_SIZE);
54
55 if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
56 surface_align = MAX2(surface_align, ANV_SURFACE_STATE_SIZE);
57 if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT)
58 sampler_align = MAX2(sampler_align, ANV_SAMPLER_STATE_SIZE);
59 }
60
61 if (data & ANV_DESCRIPTOR_SAMPLER) {
62 if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT)
63 sampler_align = MAX2(sampler_align, ANV_SAMPLER_STATE_SIZE);
64 else
65 surface_align = MAX2(surface_align, ANV_SAMPLER_STATE_SIZE);
66 }
67
68 if (data & ANV_DESCRIPTOR_INLINE_UNIFORM)
69 surface_align = MAX2(surface_align, ANV_UBO_ALIGNMENT);
70
71 *out_surface_align = surface_align;
72 *out_sampler_align = sampler_align;
73 }
74
75 static enum anv_descriptor_data
anv_indirect_descriptor_data_for_type(VkDescriptorType type)76 anv_indirect_descriptor_data_for_type(VkDescriptorType type)
77 {
78 enum anv_descriptor_data data = 0;
79
80 switch (type) {
81 case VK_DESCRIPTOR_TYPE_SAMPLER:
82 data = ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
83 ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
84 break;
85
86 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
87 data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
88 ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
89 ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
90 break;
91
92 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
93 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
94 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
95 data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
96 ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
97 break;
98
99 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
100 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
101 data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
102 ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE;
103 break;
104
105 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
106 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
107 data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
108 ANV_DESCRIPTOR_BUFFER_VIEW;
109 break;
110
111 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
112 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
113 data = ANV_DESCRIPTOR_BTI_SURFACE_STATE;
114 break;
115
116 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
117 data = ANV_DESCRIPTOR_INLINE_UNIFORM;
118 break;
119
120 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
121 data = ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
122 break;
123
124 default:
125 unreachable("Unsupported descriptor type");
126 }
127
128 /* We also need to push SSBO address ranges so that we can use A64
129 * messages in the shader.
130 */
131 if (type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
132 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
133 type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
134 type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
135 data |= ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
136
137 return data;
138 }
139
140 static enum anv_descriptor_data
anv_direct_descriptor_data_for_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,VkDescriptorSetLayoutCreateFlags set_flags,VkDescriptorType type)141 anv_direct_descriptor_data_for_type(const struct anv_physical_device *device,
142 enum anv_descriptor_set_layout_type layout_type,
143 VkDescriptorSetLayoutCreateFlags set_flags,
144 VkDescriptorType type)
145 {
146 enum anv_descriptor_data data = 0;
147
148 switch (type) {
149 case VK_DESCRIPTOR_TYPE_SAMPLER:
150 if (set_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT)
151 return 0;
152 data = ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
153 ANV_DESCRIPTOR_SAMPLER;
154 break;
155
156 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
157 if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT) {
158 data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
159 ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
160 ANV_DESCRIPTOR_SURFACE |
161 ANV_DESCRIPTOR_SAMPLER;
162 } else {
163 data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
164 ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
165 ANV_DESCRIPTOR_SURFACE_SAMPLER;
166 }
167 break;
168
169 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
170 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
171 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
172 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
173 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
174 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
175 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
176 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
177 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
178 data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
179 ANV_DESCRIPTOR_SURFACE;
180 break;
181
182 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
183 data = ANV_DESCRIPTOR_INLINE_UNIFORM;
184 break;
185
186 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
187 data = ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
188 break;
189
190 default:
191 unreachable("Unsupported descriptor type");
192 }
193
194 if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER) {
195 if (set_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
196 /* Push descriptors are special with descriptor buffers. On Gfx12.5+
197 * they have their own pool and are not reachable by the binding
198 * table. On previous generations, they are only reachable through
199 * the binding table.
200 */
201 if (device->uses_ex_bso) {
202 data &= ~(ANV_DESCRIPTOR_BTI_SURFACE_STATE |
203 ANV_DESCRIPTOR_BTI_SAMPLER_STATE);
204 }
205 } else {
206 /* Non push descriptor buffers cannot be accesses through the binding
207 * table on all platforms.
208 */
209 data &= ~(ANV_DESCRIPTOR_BTI_SURFACE_STATE |
210 ANV_DESCRIPTOR_BTI_SAMPLER_STATE);
211 }
212 }
213
214 return data;
215 }
216
217 static enum anv_descriptor_data
anv_descriptor_data_for_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,VkDescriptorSetLayoutCreateFlags set_flags,VkDescriptorType type)218 anv_descriptor_data_for_type(const struct anv_physical_device *device,
219 enum anv_descriptor_set_layout_type layout_type,
220 VkDescriptorSetLayoutCreateFlags set_flags,
221 VkDescriptorType type)
222 {
223 if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER)
224 return anv_direct_descriptor_data_for_type(device, layout_type, set_flags, type);
225 else if (device->indirect_descriptors)
226 return anv_indirect_descriptor_data_for_type(type);
227 else
228 return anv_direct_descriptor_data_for_type(device, layout_type, set_flags, type);
229 }
230
231 static enum anv_descriptor_data
anv_descriptor_data_for_mutable_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,VkDescriptorSetLayoutCreateFlags set_flags,const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,int binding)232 anv_descriptor_data_for_mutable_type(const struct anv_physical_device *device,
233 enum anv_descriptor_set_layout_type layout_type,
234 VkDescriptorSetLayoutCreateFlags set_flags,
235 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
236 int binding)
237 {
238 enum anv_descriptor_data desc_data = 0;
239
240 if (!mutable_info || mutable_info->mutableDescriptorTypeListCount <= binding) {
241 for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
242 if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
243 i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
244 i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
245 continue;
246
247 desc_data |= anv_descriptor_data_for_type(device, layout_type, set_flags, i);
248 }
249
250 desc_data |= anv_descriptor_data_for_type(
251 device, layout_type, set_flags, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
252
253 return desc_data;
254 }
255
256 const VkMutableDescriptorTypeListEXT *type_list =
257 &mutable_info->pMutableDescriptorTypeLists[binding];
258 for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
259 desc_data |=
260 anv_descriptor_data_for_type(device, layout_type, set_flags,
261 type_list->pDescriptorTypes[i]);
262 }
263
264 return desc_data;
265 }
266
267 static void
anv_descriptor_data_size(enum anv_descriptor_data data,enum anv_descriptor_set_layout_type layout_type,uint16_t * out_surface_size,uint16_t * out_sampler_size)268 anv_descriptor_data_size(enum anv_descriptor_data data,
269 enum anv_descriptor_set_layout_type layout_type,
270 uint16_t *out_surface_size,
271 uint16_t *out_sampler_size)
272 {
273 unsigned surface_size = 0;
274 unsigned sampler_size = 0;
275
276 if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE)
277 surface_size += sizeof(struct anv_sampled_image_descriptor);
278
279 if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE)
280 surface_size += sizeof(struct anv_storage_image_descriptor);
281
282 if (data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE)
283 surface_size += sizeof(struct anv_address_range_descriptor);
284
285 if (data & ANV_DESCRIPTOR_SURFACE)
286 surface_size += ANV_SURFACE_STATE_SIZE;
287
288 /* Direct descriptors have sampler states stored separately */
289 if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT) {
290 if (data & ANV_DESCRIPTOR_SAMPLER)
291 sampler_size += ANV_SAMPLER_STATE_SIZE;
292
293 if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
294 surface_size += ANV_SURFACE_STATE_SIZE;
295 sampler_size += ANV_SAMPLER_STATE_SIZE;
296 }
297 } else {
298 if (data & ANV_DESCRIPTOR_SAMPLER)
299 surface_size += ANV_SAMPLER_STATE_SIZE;
300
301 if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
302 surface_size += ALIGN(ANV_SURFACE_STATE_SIZE + ANV_SAMPLER_STATE_SIZE,
303 ANV_SURFACE_STATE_SIZE);
304 }
305 }
306
307 *out_surface_size = surface_size;
308 *out_sampler_size = sampler_size;
309 }
310
311 static bool
anv_needs_descriptor_buffer(VkDescriptorType desc_type,enum anv_descriptor_set_layout_type layout_type,enum anv_descriptor_data desc_data)312 anv_needs_descriptor_buffer(VkDescriptorType desc_type,
313 enum anv_descriptor_set_layout_type layout_type,
314 enum anv_descriptor_data desc_data)
315 {
316 if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
317 return true;
318
319 uint16_t surface_size, sampler_size;
320 anv_descriptor_data_size(desc_data, layout_type,
321 &surface_size, &sampler_size);
322 return surface_size > 0 || sampler_size > 0;
323 }
324
325 /** Returns the size in bytes of each descriptor with the given layout */
326 static void
anv_descriptor_size(const struct anv_descriptor_set_binding_layout * layout,enum anv_descriptor_set_layout_type layout_type,uint16_t * out_surface_stride,uint16_t * out_sampler_stride)327 anv_descriptor_size(const struct anv_descriptor_set_binding_layout *layout,
328 enum anv_descriptor_set_layout_type layout_type,
329 uint16_t *out_surface_stride,
330 uint16_t *out_sampler_stride)
331 {
332 if (layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
333 assert(layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
334 assert(layout->array_size <= UINT16_MAX);
335 *out_surface_stride = layout->array_size;
336 *out_sampler_stride = 0;
337 return;
338 }
339
340 anv_descriptor_data_size(layout->data, layout_type,
341 out_surface_stride,
342 out_sampler_stride);
343 }
344
345 /** Returns size in bytes of the biggest descriptor in the given layout */
346 static void
anv_descriptor_size_for_mutable_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,VkDescriptorSetLayoutCreateFlags set_flags,const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,int binding,uint16_t * out_surface_stride,uint16_t * out_sampler_stride)347 anv_descriptor_size_for_mutable_type(const struct anv_physical_device *device,
348 enum anv_descriptor_set_layout_type layout_type,
349 VkDescriptorSetLayoutCreateFlags set_flags,
350 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
351 int binding,
352 uint16_t *out_surface_stride,
353 uint16_t *out_sampler_stride)
354 {
355 *out_surface_stride = 0;
356 *out_sampler_stride = 0;
357
358 if (!mutable_info ||
359 mutable_info->mutableDescriptorTypeListCount <= binding ||
360 binding >= mutable_info->mutableDescriptorTypeListCount) {
361 for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
362
363 if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
364 i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
365 i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
366 continue;
367
368 enum anv_descriptor_data desc_data =
369 anv_descriptor_data_for_type(device, layout_type, set_flags, i);
370 uint16_t surface_stride, sampler_stride;
371 anv_descriptor_data_size(desc_data, layout_type,
372 &surface_stride, &sampler_stride);
373
374 *out_surface_stride = MAX2(*out_surface_stride, surface_stride);
375 *out_sampler_stride = MAX2(*out_sampler_stride, sampler_stride);
376 }
377
378 enum anv_descriptor_data desc_data =
379 anv_descriptor_data_for_type(device, layout_type, set_flags,
380 VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
381 uint16_t surface_stride, sampler_stride;
382 anv_descriptor_data_size(desc_data, layout_type,
383 &surface_stride, &sampler_stride);
384
385 *out_surface_stride = MAX2(*out_surface_stride, surface_stride);
386 *out_sampler_stride = MAX2(*out_sampler_stride, sampler_stride);
387
388 return;
389 }
390
391 const VkMutableDescriptorTypeListEXT *type_list =
392 &mutable_info->pMutableDescriptorTypeLists[binding];
393 for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
394 enum anv_descriptor_data desc_data =
395 anv_descriptor_data_for_type(device, layout_type, set_flags,
396 type_list->pDescriptorTypes[i]);
397
398 uint16_t surface_stride, sampler_stride;
399 anv_descriptor_data_size(desc_data, layout_type,
400 &surface_stride, &sampler_stride);
401
402 *out_surface_stride = MAX2(*out_surface_stride, surface_stride);
403 *out_sampler_stride = MAX2(*out_sampler_stride, sampler_stride);
404 }
405 }
406
407 static bool
anv_descriptor_data_supports_bindless(const struct anv_physical_device * pdevice,VkDescriptorSetLayoutCreateFlags set_flags,enum anv_descriptor_data data)408 anv_descriptor_data_supports_bindless(const struct anv_physical_device *pdevice,
409 VkDescriptorSetLayoutCreateFlags set_flags,
410 enum anv_descriptor_data data)
411 {
412 if (set_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT) {
413 /* When using descriptor buffers, on platforms that don't have extended
414 * bindless offset, all push descriptors have to go through the binding
415 * tables.
416 */
417 if (!pdevice->uses_ex_bso &&
418 (set_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR)) {
419 return data & (ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE |
420 ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE |
421 ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE);
422 }
423
424 /* Otherwise we can do bindless for everything */
425 return true;
426 } else {
427 if (pdevice->indirect_descriptors) {
428 return data & (ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE |
429 ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE |
430 ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE);
431 }
432
433 /* Direct descriptor support bindless for everything */
434 return true;
435 }
436 }
437
438 bool
anv_descriptor_supports_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_layout * set,const struct anv_descriptor_set_binding_layout * binding)439 anv_descriptor_supports_bindless(const struct anv_physical_device *pdevice,
440 const struct anv_descriptor_set_layout *set,
441 const struct anv_descriptor_set_binding_layout *binding)
442 {
443 return anv_descriptor_data_supports_bindless(pdevice, set->flags, binding->data);
444 }
445
446 bool
anv_descriptor_requires_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_layout * set,const struct anv_descriptor_set_binding_layout * binding)447 anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,
448 const struct anv_descriptor_set_layout *set,
449 const struct anv_descriptor_set_binding_layout *binding)
450 {
451 if (pdevice->always_use_bindless)
452 return anv_descriptor_supports_bindless(pdevice, set, binding);
453
454 if (set->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR)
455 return false;
456
457 if (set->flags & (VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT |
458 VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT))
459 return true;
460
461 static const VkDescriptorBindingFlagBits flags_requiring_bindless =
462 VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
463 VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
464 VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
465
466 return (binding->flags & flags_requiring_bindless) != 0;
467 }
468
469 static enum anv_descriptor_set_layout_type
anv_descriptor_set_layout_type_for_flags(const struct anv_physical_device * device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo)470 anv_descriptor_set_layout_type_for_flags(const struct anv_physical_device *device,
471 const VkDescriptorSetLayoutCreateInfo *pCreateInfo)
472 {
473 if (pCreateInfo->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT)
474 return ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER;
475 else if (device->indirect_descriptors)
476 return ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_INDIRECT;
477 else
478 return ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT;
479 }
480
481 static bool
mutable_list_includes_type(const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,uint32_t binding,VkDescriptorType type)482 mutable_list_includes_type(const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
483 uint32_t binding, VkDescriptorType type)
484 {
485 if (!mutable_info || mutable_info->mutableDescriptorTypeListCount == 0)
486 return true;
487
488 const VkMutableDescriptorTypeListEXT *type_list =
489 &mutable_info->pMutableDescriptorTypeLists[binding];
490 for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
491 if (type_list->pDescriptorTypes[i] == type)
492 return true;
493 }
494
495 return false;
496 }
497
anv_GetDescriptorSetLayoutSupport(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)498 void anv_GetDescriptorSetLayoutSupport(
499 VkDevice _device,
500 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
501 VkDescriptorSetLayoutSupport* pSupport)
502 {
503 ANV_FROM_HANDLE(anv_device, device, _device);
504 const struct anv_physical_device *pdevice = device->physical;
505
506 uint32_t surface_count[MESA_VULKAN_SHADER_STAGES] = { 0, };
507 VkDescriptorType varying_desc_type = VK_DESCRIPTOR_TYPE_MAX_ENUM;
508 bool needs_descriptor_buffer = false;
509
510 const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
511 vk_find_struct_const(pCreateInfo->pNext,
512 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
513 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
514 vk_find_struct_const(pCreateInfo->pNext,
515 MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
516
517 enum anv_descriptor_set_layout_type layout_type =
518 anv_descriptor_set_layout_type_for_flags(pdevice, pCreateInfo);
519
520 for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
521 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
522
523 VkDescriptorBindingFlags flags = 0;
524 if (binding_flags_info && binding_flags_info->bindingCount > 0) {
525 assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
526 flags = binding_flags_info->pBindingFlags[b];
527 }
528
529 /* Combined image/sampler descriptor are not supported with descriptor
530 * buffers & mutable descriptor types because we cannot know from the
531 * shader where to find the sampler structure. It can be written to the
532 * beginning of the descriptor (at offset 0) or in the second part (at
533 * offset 64bytes).
534 */
535 if ((pCreateInfo->flags &
536 VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT) &&
537 binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT &&
538 mutable_list_includes_type(mutable_info, b,
539 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)) {
540 pSupport->supported = false;
541 return;
542 }
543
544 enum anv_descriptor_data desc_data =
545 binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
546 anv_descriptor_data_for_mutable_type(pdevice, layout_type,
547 pCreateInfo->flags,
548 mutable_info, b) :
549 anv_descriptor_data_for_type(pdevice, layout_type,
550 pCreateInfo->flags,
551 binding->descriptorType);
552
553 if (anv_needs_descriptor_buffer(binding->descriptorType,
554 layout_type, desc_data))
555 needs_descriptor_buffer = true;
556
557 if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
558 varying_desc_type = binding->descriptorType;
559
560 switch (binding->descriptorType) {
561 case VK_DESCRIPTOR_TYPE_SAMPLER:
562 /* There is no real limit on samplers */
563 break;
564
565 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
566 /* Inline uniforms don't use a binding */
567 break;
568
569 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
570 if (anv_descriptor_data_supports_bindless(pdevice,
571 pCreateInfo->flags,
572 desc_data))
573 break;
574
575 if (binding->pImmutableSamplers) {
576 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
577 ANV_FROM_HANDLE(anv_sampler, sampler,
578 binding->pImmutableSamplers[i]);
579 anv_foreach_stage(s, binding->stageFlags)
580 surface_count[s] += sampler->n_planes;
581 }
582 } else {
583 anv_foreach_stage(s, binding->stageFlags)
584 surface_count[s] += binding->descriptorCount;
585 }
586 break;
587
588 default:
589 if (anv_descriptor_data_supports_bindless(pdevice,
590 pCreateInfo->flags,
591 desc_data))
592 break;
593
594 anv_foreach_stage(s, binding->stageFlags)
595 surface_count[s] += binding->descriptorCount;
596 break;
597 }
598 }
599
600 for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
601 if (needs_descriptor_buffer)
602 surface_count[s] += 1;
603 }
604
605 VkDescriptorSetVariableDescriptorCountLayoutSupport *vdcls =
606 vk_find_struct(pSupport->pNext,
607 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
608 if (vdcls != NULL) {
609 if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
610 vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE;
611 } else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) {
612 vdcls->maxVariableDescriptorCount = UINT16_MAX;
613 } else {
614 vdcls->maxVariableDescriptorCount = 0;
615 }
616 }
617
618 bool supported = true;
619 for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
620 /* Our maximum binding table size is 240 and we need to reserve 8 for
621 * render targets.
622 */
623 if (surface_count[s] > MAX_BINDING_TABLE_SIZE - MAX_RTS)
624 supported = false;
625 }
626
627 pSupport->supported = supported;
628 }
629
anv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)630 VkResult anv_CreateDescriptorSetLayout(
631 VkDevice _device,
632 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
633 const VkAllocationCallbacks* pAllocator,
634 VkDescriptorSetLayout* pSetLayout)
635 {
636 ANV_FROM_HANDLE(anv_device, device, _device);
637
638 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
639
640 uint32_t num_bindings = 0;
641 uint32_t immutable_sampler_count = 0;
642 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
643 num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
644
645 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
646 *
647 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
648 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
649 * pImmutableSamplers can be used to initialize a set of immutable
650 * samplers. [...] If descriptorType is not one of these descriptor
651 * types, then pImmutableSamplers is ignored.
652 *
653 * We need to be careful here and only parse pImmutableSamplers if we
654 * have one of the right descriptor types.
655 */
656 VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
657 if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
658 desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
659 pCreateInfo->pBindings[j].pImmutableSamplers)
660 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
661 }
662
663 /* We need to allocate descriptor set layouts off the device allocator
664 * with DEVICE scope because they are reference counted and may not be
665 * destroyed when vkDestroyDescriptorSetLayout is called.
666 */
667 VK_MULTIALLOC(ma);
668 VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_layout, set_layout, 1);
669 VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_binding_layout,
670 bindings, num_bindings);
671 VK_MULTIALLOC_DECL(&ma, struct anv_sampler *, samplers,
672 immutable_sampler_count);
673
674 if (!vk_object_multizalloc(&device->vk, &ma, NULL,
675 VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT))
676 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
677
678 set_layout->ref_cnt = 1;
679 set_layout->binding_count = num_bindings;
680 set_layout->flags = pCreateInfo->flags;
681 set_layout->type = anv_descriptor_set_layout_type_for_flags(device->physical,
682 pCreateInfo);
683
684 for (uint32_t b = 0; b < num_bindings; b++) {
685 /* Initialize all binding_layout entries to -1 */
686 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
687
688 set_layout->binding[b].flags = 0;
689 set_layout->binding[b].data = 0;
690 set_layout->binding[b].max_plane_count = 0;
691 set_layout->binding[b].array_size = 0;
692 set_layout->binding[b].immutable_samplers = NULL;
693 }
694
695 /* Initialize all samplers to 0 */
696 assert((samplers == NULL) == (immutable_sampler_count == 0));
697 if (samplers != NULL) {
698 memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
699 }
700
701 uint32_t buffer_view_count = 0;
702 uint32_t dynamic_offset_count = 0;
703 uint32_t descriptor_buffer_surface_size = 0;
704 uint32_t descriptor_buffer_sampler_size = 0;
705 uint32_t sampler_count = 0;
706
707 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
708 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
709 uint32_t b = binding->binding;
710 /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
711 * immutable_samplers pointer. This provides us with a quick-and-dirty
712 * way to sort the bindings by binding number.
713 */
714 set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
715 }
716
717 const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
718 vk_find_struct_const(pCreateInfo->pNext,
719 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
720
721 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
722 vk_find_struct_const(pCreateInfo->pNext,
723 MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
724
725 for (uint32_t b = 0; b < num_bindings; b++) {
726 /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
727 * immutable_samplers pointer. Check for NULL (empty binding) and then
728 * reset it and compute the index.
729 */
730 if (set_layout->binding[b].immutable_samplers == NULL)
731 continue;
732 const uint32_t info_idx =
733 (uintptr_t)(void *)set_layout->binding[b].immutable_samplers - 1;
734 set_layout->binding[b].immutable_samplers = NULL;
735
736 const VkDescriptorSetLayoutBinding *binding =
737 &pCreateInfo->pBindings[info_idx];
738
739 if (binding->descriptorCount == 0)
740 continue;
741
742 set_layout->binding[b].type = binding->descriptorType;
743
744 if (binding_flags_info && binding_flags_info->bindingCount > 0) {
745 assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
746 set_layout->binding[b].flags =
747 binding_flags_info->pBindingFlags[info_idx];
748
749 /* From the Vulkan spec:
750 *
751 * "If VkDescriptorSetLayoutCreateInfo::flags includes
752 * VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then
753 * all elements of pBindingFlags must not include
754 * VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
755 * VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or
756 * VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"
757 */
758 if (pCreateInfo->flags &
759 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
760 assert(!(set_layout->binding[b].flags &
761 (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
762 VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
763 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)));
764 }
765 }
766
767 set_layout->binding[b].data =
768 binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
769 anv_descriptor_data_for_mutable_type(device->physical,
770 set_layout->type,
771 pCreateInfo->flags,
772 mutable_info, b) :
773 anv_descriptor_data_for_type(device->physical,
774 set_layout->type,
775 pCreateInfo->flags,
776 binding->descriptorType);
777
778 set_layout->binding[b].array_size = binding->descriptorCount;
779 set_layout->binding[b].descriptor_index = set_layout->descriptor_count;
780 set_layout->descriptor_count += binding->descriptorCount;
781
782 if (set_layout->binding[b].data & ANV_DESCRIPTOR_BUFFER_VIEW) {
783 set_layout->binding[b].buffer_view_index = buffer_view_count;
784 buffer_view_count += binding->descriptorCount;
785 }
786
787 set_layout->binding[b].max_plane_count = 1;
788 switch (binding->descriptorType) {
789 case VK_DESCRIPTOR_TYPE_SAMPLER:
790 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
791 case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
792 if (binding->pImmutableSamplers) {
793 set_layout->binding[b].immutable_samplers = samplers;
794 samplers += binding->descriptorCount;
795
796 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
797 ANV_FROM_HANDLE(anv_sampler, sampler,
798 binding->pImmutableSamplers[i]);
799
800 set_layout->binding[b].immutable_samplers[i] = sampler;
801 if (set_layout->binding[b].max_plane_count < sampler->n_planes)
802 set_layout->binding[b].max_plane_count = sampler->n_planes;
803 }
804 }
805 break;
806
807 default:
808 break;
809 }
810
811 switch (binding->descriptorType) {
812 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
813 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
814 set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
815 set_layout->dynamic_offset_stages[dynamic_offset_count] = binding->stageFlags;
816 dynamic_offset_count += binding->descriptorCount;
817 assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
818 break;
819
820 default:
821 break;
822 }
823
824 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
825 anv_descriptor_size_for_mutable_type(
826 device->physical, set_layout->type,
827 pCreateInfo->flags, mutable_info, b,
828 &set_layout->binding[b].descriptor_data_surface_size,
829 &set_layout->binding[b].descriptor_data_sampler_size);
830 } else {
831 anv_descriptor_size(&set_layout->binding[b],
832 set_layout->type,
833 &set_layout->binding[b].descriptor_data_surface_size,
834 &set_layout->binding[b].descriptor_data_sampler_size);
835 }
836
837 /* For multi-planar bindings, we make every descriptor consume the maximum
838 * number of planes so we don't have to bother with walking arrays and
839 * adding things up every time. Fortunately, YCbCr samplers aren't all
840 * that common and likely won't be in the middle of big arrays.
841 */
842 set_layout->binding[b].descriptor_surface_stride =
843 set_layout->binding[b].max_plane_count *
844 set_layout->binding[b].descriptor_data_surface_size;
845 set_layout->binding[b].descriptor_sampler_stride =
846 set_layout->binding[b].max_plane_count *
847 set_layout->binding[b].descriptor_data_sampler_size;
848
849 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) {
850 sampler_count += binding->descriptorCount *
851 set_layout->binding[b].max_plane_count;
852 }
853
854 unsigned surface_align, sampler_align;
855 anv_descriptor_data_alignment(set_layout->binding[b].data,
856 set_layout->type,
857 &surface_align,
858 &sampler_align);
859 descriptor_buffer_surface_size =
860 align(descriptor_buffer_surface_size, surface_align);
861 descriptor_buffer_sampler_size =
862 align(descriptor_buffer_sampler_size, sampler_align);
863
864 if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
865 set_layout->binding[b].descriptor_surface_offset = descriptor_buffer_surface_size;
866 descriptor_buffer_surface_size += binding->descriptorCount;
867 } else {
868 set_layout->binding[b].descriptor_surface_offset = descriptor_buffer_surface_size;
869 descriptor_buffer_surface_size +=
870 set_layout->binding[b].descriptor_surface_stride * binding->descriptorCount;
871 }
872
873 set_layout->binding[b].descriptor_sampler_offset = descriptor_buffer_sampler_size;
874 descriptor_buffer_sampler_size +=
875 set_layout->binding[b].descriptor_sampler_stride * binding->descriptorCount;
876
877 set_layout->shader_stages |= binding->stageFlags;
878 }
879
880 /* Sanity checks */
881 assert(descriptor_buffer_sampler_size == 0 ||
882 set_layout->type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT);
883
884 set_layout->buffer_view_count = buffer_view_count;
885 set_layout->dynamic_offset_count = dynamic_offset_count;
886 set_layout->descriptor_buffer_surface_size = descriptor_buffer_surface_size;
887 set_layout->descriptor_buffer_sampler_size = descriptor_buffer_sampler_size;
888
889 if (pCreateInfo->flags &
890 VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT) {
891 assert(set_layout->descriptor_buffer_surface_size == 0);
892 assert(set_layout->descriptor_buffer_sampler_size == 0);
893 set_layout->embedded_sampler_count = sampler_count;
894 }
895
896 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
897
898 return VK_SUCCESS;
899 }
900
901 void
anv_descriptor_set_layout_destroy(struct anv_device * device,struct anv_descriptor_set_layout * layout)902 anv_descriptor_set_layout_destroy(struct anv_device *device,
903 struct anv_descriptor_set_layout *layout)
904 {
905 assert(layout->ref_cnt == 0);
906 vk_object_free(&device->vk, NULL, layout);
907 }
908
909 static const struct anv_descriptor_set_binding_layout *
set_layout_dynamic_binding(const struct anv_descriptor_set_layout * set_layout)910 set_layout_dynamic_binding(const struct anv_descriptor_set_layout *set_layout)
911 {
912 if (set_layout->binding_count == 0)
913 return NULL;
914
915 const struct anv_descriptor_set_binding_layout *last_binding =
916 &set_layout->binding[set_layout->binding_count - 1];
917 if (!(last_binding->flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))
918 return NULL;
919
920 return last_binding;
921 }
922
923 static uint32_t
set_layout_descriptor_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)924 set_layout_descriptor_count(const struct anv_descriptor_set_layout *set_layout,
925 uint32_t var_desc_count)
926 {
927 const struct anv_descriptor_set_binding_layout *dynamic_binding =
928 set_layout_dynamic_binding(set_layout);
929 if (dynamic_binding == NULL)
930 return set_layout->descriptor_count;
931
932 assert(var_desc_count <= dynamic_binding->array_size);
933 uint32_t shrink = dynamic_binding->array_size - var_desc_count;
934
935 if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
936 return set_layout->descriptor_count;
937
938 return set_layout->descriptor_count - shrink;
939 }
940
941 static uint32_t
set_layout_buffer_view_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)942 set_layout_buffer_view_count(const struct anv_descriptor_set_layout *set_layout,
943 uint32_t var_desc_count)
944 {
945 const struct anv_descriptor_set_binding_layout *dynamic_binding =
946 set_layout_dynamic_binding(set_layout);
947 if (dynamic_binding == NULL)
948 return set_layout->buffer_view_count;
949
950 assert(var_desc_count <= dynamic_binding->array_size);
951 uint32_t shrink = dynamic_binding->array_size - var_desc_count;
952
953 if (!(dynamic_binding->data & ANV_DESCRIPTOR_BUFFER_VIEW))
954 return set_layout->buffer_view_count;
955
956 return set_layout->buffer_view_count - shrink;
957 }
958
959 static bool
anv_descriptor_set_layout_empty(const struct anv_descriptor_set_layout * set_layout)960 anv_descriptor_set_layout_empty(const struct anv_descriptor_set_layout *set_layout)
961 {
962 return set_layout->binding_count == 0;
963 }
964
965 static void
anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count,uint32_t * out_surface_size,uint32_t * out_sampler_size)966 anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout *set_layout,
967 uint32_t var_desc_count,
968 uint32_t *out_surface_size,
969 uint32_t *out_sampler_size)
970 {
971 const struct anv_descriptor_set_binding_layout *dynamic_binding =
972 set_layout_dynamic_binding(set_layout);
973 if (dynamic_binding == NULL) {
974 *out_surface_size = ALIGN(set_layout->descriptor_buffer_surface_size,
975 ANV_UBO_ALIGNMENT);
976 *out_sampler_size = set_layout->descriptor_buffer_sampler_size;
977 return;
978 }
979
980 assert(var_desc_count <= dynamic_binding->array_size);
981 uint32_t shrink = dynamic_binding->array_size - var_desc_count;
982 uint32_t set_surface_size, set_sampler_size;
983
984 if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
985 /* Inline uniform blocks are specified to use the descriptor array
986 * size as the size in bytes of the block.
987 */
988 set_surface_size = set_layout->descriptor_buffer_surface_size - shrink;
989 set_sampler_size = 0;
990 } else {
991 set_surface_size =
992 set_layout->descriptor_buffer_surface_size > 0 ?
993 (set_layout->descriptor_buffer_surface_size -
994 shrink * dynamic_binding->descriptor_surface_stride) : 0;
995 set_sampler_size =
996 set_layout->descriptor_buffer_sampler_size > 0 ?
997 (set_layout->descriptor_buffer_sampler_size -
998 shrink * dynamic_binding->descriptor_sampler_stride) : 0;
999 }
1000
1001 *out_surface_size = ALIGN(set_surface_size, ANV_UBO_ALIGNMENT);
1002 *out_sampler_size = set_sampler_size;
1003 }
1004
anv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)1005 void anv_DestroyDescriptorSetLayout(
1006 VkDevice _device,
1007 VkDescriptorSetLayout _set_layout,
1008 const VkAllocationCallbacks* pAllocator)
1009 {
1010 ANV_FROM_HANDLE(anv_device, device, _device);
1011 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
1012
1013 if (!set_layout)
1014 return;
1015
1016 anv_descriptor_set_layout_unref(device, set_layout);
1017 }
1018
1019 void
anv_descriptor_set_layout_print(const struct anv_descriptor_set_layout * layout)1020 anv_descriptor_set_layout_print(const struct anv_descriptor_set_layout *layout)
1021 {
1022 fprintf(stderr, "set layout:\n");
1023 for (uint32_t b = 0; b < layout->binding_count; b++) {
1024 fprintf(stderr, " binding%03u: offsets=0x%08x/0x%08x sizes=%04u/%04u strides=%03u/%03u planes=%hhu count=%03u\n",
1025 b,
1026 layout->binding[b].descriptor_surface_offset,
1027 layout->binding[b].descriptor_sampler_offset,
1028 layout->binding[b].descriptor_data_surface_size,
1029 layout->binding[b].descriptor_data_sampler_size,
1030 layout->binding[b].descriptor_surface_stride,
1031 layout->binding[b].descriptor_sampler_stride,
1032 layout->binding[b].max_plane_count,
1033 layout->binding[b].array_size);
1034 }
1035 }
1036
1037 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
1038
1039 static void
sha1_update_immutable_sampler(struct mesa_sha1 * ctx,bool embedded_sampler,const struct anv_sampler * sampler)1040 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
1041 bool embedded_sampler,
1042 const struct anv_sampler *sampler)
1043 {
1044 if (!sampler->vk.ycbcr_conversion)
1045 return;
1046
1047 /* Hash the conversion if any as this affect placement of descriptors in
1048 * the set due to the number of planes.
1049 */
1050 SHA1_UPDATE_VALUE(ctx, sampler->vk.ycbcr_conversion->state);
1051
1052 /* For embedded samplers, we need to hash the sampler parameters as the
1053 * sampler handle is baked into the shader and this ultimately is part of
1054 * the shader hash key. We can only consider 2 shaders identical if all
1055 * their embedded samplers parameters are identical.
1056 */
1057 if (embedded_sampler)
1058 SHA1_UPDATE_VALUE(ctx, sampler->sha1);
1059 }
1060
1061 static void
sha1_update_descriptor_set_binding_layout(struct mesa_sha1 * ctx,bool embedded_samplers,const struct anv_descriptor_set_binding_layout * layout)1062 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
1063 bool embedded_samplers,
1064 const struct anv_descriptor_set_binding_layout *layout)
1065 {
1066 SHA1_UPDATE_VALUE(ctx, layout->flags);
1067 SHA1_UPDATE_VALUE(ctx, layout->data);
1068 SHA1_UPDATE_VALUE(ctx, layout->max_plane_count);
1069 SHA1_UPDATE_VALUE(ctx, layout->array_size);
1070 SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
1071 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
1072 SHA1_UPDATE_VALUE(ctx, layout->buffer_view_index);
1073 SHA1_UPDATE_VALUE(ctx, layout->descriptor_surface_offset);
1074 SHA1_UPDATE_VALUE(ctx, layout->descriptor_sampler_offset);
1075
1076 if (layout->immutable_samplers) {
1077 for (uint16_t i = 0; i < layout->array_size; i++) {
1078 sha1_update_immutable_sampler(ctx, embedded_samplers,
1079 layout->immutable_samplers[i]);
1080 }
1081 }
1082 }
1083
1084 static void
sha1_update_descriptor_set_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_layout * layout)1085 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
1086 const struct anv_descriptor_set_layout *layout)
1087 {
1088 SHA1_UPDATE_VALUE(ctx, layout->flags);
1089 SHA1_UPDATE_VALUE(ctx, layout->binding_count);
1090 SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);
1091 SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
1092 SHA1_UPDATE_VALUE(ctx, layout->buffer_view_count);
1093 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
1094 SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_surface_size);
1095 SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_sampler_size);
1096
1097 bool embedded_samplers =
1098 layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT;
1099
1100 for (uint16_t i = 0; i < layout->binding_count; i++) {
1101 sha1_update_descriptor_set_binding_layout(ctx, embedded_samplers,
1102 &layout->binding[i]);
1103 }
1104 }
1105
1106 /*
1107 * Pipeline layouts. These have nothing to do with the pipeline. They are
1108 * just multiple descriptor set layouts pasted together
1109 */
1110
1111 void
anv_pipeline_sets_layout_init(struct anv_pipeline_sets_layout * layout,struct anv_device * device,bool independent_sets)1112 anv_pipeline_sets_layout_init(struct anv_pipeline_sets_layout *layout,
1113 struct anv_device *device,
1114 bool independent_sets)
1115 {
1116 memset(layout, 0, sizeof(*layout));
1117
1118 layout->device = device;
1119 layout->push_descriptor_set_index = -1;
1120 layout->independent_sets = independent_sets;
1121 }
1122
1123 void
anv_pipeline_sets_layout_add(struct anv_pipeline_sets_layout * layout,uint32_t set_idx,struct anv_descriptor_set_layout * set_layout)1124 anv_pipeline_sets_layout_add(struct anv_pipeline_sets_layout *layout,
1125 uint32_t set_idx,
1126 struct anv_descriptor_set_layout *set_layout)
1127 {
1128 if (layout->set[set_idx].layout)
1129 return;
1130
1131 /* Workaround CTS : Internal CTS issue 3584 */
1132 if (layout->independent_sets && anv_descriptor_set_layout_empty(set_layout))
1133 return;
1134
1135 if (layout->type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_UNKNOWN)
1136 layout->type = set_layout->type;
1137 else
1138 assert(layout->type == set_layout->type);
1139
1140 layout->num_sets = MAX2(set_idx + 1, layout->num_sets);
1141
1142 layout->set[set_idx].layout =
1143 anv_descriptor_set_layout_ref(set_layout);
1144
1145 layout->set[set_idx].dynamic_offset_start = layout->num_dynamic_buffers;
1146 layout->num_dynamic_buffers += set_layout->dynamic_offset_count;
1147
1148 assert(layout->num_dynamic_buffers < MAX_DYNAMIC_BUFFERS);
1149
1150 if (set_layout->flags &
1151 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
1152 assert(layout->push_descriptor_set_index == -1);
1153 layout->push_descriptor_set_index = set_idx;
1154 }
1155 }
1156
1157 uint32_t
anv_pipeline_sets_layout_embedded_sampler_count(const struct anv_pipeline_sets_layout * layout)1158 anv_pipeline_sets_layout_embedded_sampler_count(const struct anv_pipeline_sets_layout *layout)
1159 {
1160 uint32_t count = 0;
1161 for (unsigned s = 0; s < layout->num_sets; s++) {
1162 if (!layout->set[s].layout)
1163 continue;
1164 count += layout->set[s].layout->embedded_sampler_count;
1165 }
1166 return count;
1167 }
1168
1169 void
anv_pipeline_sets_layout_hash(struct anv_pipeline_sets_layout * layout)1170 anv_pipeline_sets_layout_hash(struct anv_pipeline_sets_layout *layout)
1171 {
1172 struct mesa_sha1 ctx;
1173 _mesa_sha1_init(&ctx);
1174 for (unsigned s = 0; s < layout->num_sets; s++) {
1175 if (!layout->set[s].layout)
1176 continue;
1177 sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
1178 _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
1179 sizeof(layout->set[s].dynamic_offset_start));
1180 }
1181 _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
1182 _mesa_sha1_final(&ctx, layout->sha1);
1183 }
1184
1185 void
anv_pipeline_sets_layout_fini(struct anv_pipeline_sets_layout * layout)1186 anv_pipeline_sets_layout_fini(struct anv_pipeline_sets_layout *layout)
1187 {
1188 for (unsigned s = 0; s < layout->num_sets; s++) {
1189 if (!layout->set[s].layout)
1190 continue;
1191
1192 anv_descriptor_set_layout_unref(layout->device, layout->set[s].layout);
1193 }
1194 }
1195
1196 void
anv_pipeline_sets_layout_print(const struct anv_pipeline_sets_layout * layout)1197 anv_pipeline_sets_layout_print(const struct anv_pipeline_sets_layout *layout)
1198 {
1199 fprintf(stderr, "layout: dyn_count=%u sets=%u ind=%u\n",
1200 layout->num_dynamic_buffers,
1201 layout->num_sets,
1202 layout->independent_sets);
1203 for (unsigned s = 0; s < layout->num_sets; s++) {
1204 if (!layout->set[s].layout)
1205 continue;
1206
1207 fprintf(stderr, " set%i: dyn_start=%u flags=0x%x\n",
1208 s, layout->set[s].dynamic_offset_start, layout->set[s].layout->flags);
1209 }
1210 }
1211
anv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)1212 VkResult anv_CreatePipelineLayout(
1213 VkDevice _device,
1214 const VkPipelineLayoutCreateInfo* pCreateInfo,
1215 const VkAllocationCallbacks* pAllocator,
1216 VkPipelineLayout* pPipelineLayout)
1217 {
1218 ANV_FROM_HANDLE(anv_device, device, _device);
1219 struct anv_pipeline_layout *layout;
1220
1221 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
1222
1223 layout = vk_object_zalloc(&device->vk, pAllocator, sizeof(*layout),
1224 VK_OBJECT_TYPE_PIPELINE_LAYOUT);
1225 if (layout == NULL)
1226 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1227
1228 anv_pipeline_sets_layout_init(&layout->sets_layout, device,
1229 pCreateInfo->flags & VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT);
1230
1231 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
1232 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
1233 pCreateInfo->pSetLayouts[set]);
1234
1235 /* VUID-VkPipelineLayoutCreateInfo-graphicsPipelineLibrary-06753
1236 *
1237 * "If graphicsPipelineLibrary is not enabled, elements of
1238 * pSetLayouts must be valid VkDescriptorSetLayout objects"
1239 *
1240 * As a result of supporting graphicsPipelineLibrary, we need to allow
1241 * null descriptor set layouts.
1242 */
1243 if (set_layout == NULL)
1244 continue;
1245
1246 anv_pipeline_sets_layout_add(&layout->sets_layout, set, set_layout);
1247 }
1248
1249 anv_pipeline_sets_layout_hash(&layout->sets_layout);
1250
1251 *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
1252
1253 return VK_SUCCESS;
1254 }
1255
anv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)1256 void anv_DestroyPipelineLayout(
1257 VkDevice _device,
1258 VkPipelineLayout _pipelineLayout,
1259 const VkAllocationCallbacks* pAllocator)
1260 {
1261 ANV_FROM_HANDLE(anv_device, device, _device);
1262 ANV_FROM_HANDLE(anv_pipeline_layout, layout, _pipelineLayout);
1263
1264 if (!layout)
1265 return;
1266
1267 anv_pipeline_sets_layout_fini(&layout->sets_layout);
1268
1269 vk_object_free(&device->vk, pAllocator, layout);
1270 }
1271
1272 /*
1273 * Descriptor pools.
1274 *
1275 * These are implemented using a big pool of memory and a vma heap for the
1276 * host memory allocations and a state_stream and a free list for the buffer
1277 * view surface state. The spec allows us to fail to allocate due to
1278 * fragmentation in all cases but two: 1) after pool reset, allocating up
1279 * until the pool size with no freeing must succeed and 2) allocating and
1280 * freeing only descriptor sets with the same layout. Case 1) is easy enough,
1281 * and the vma heap ensures case 2).
1282 */
1283
1284 /* The vma heap reserves 0 to mean NULL; we have to offset by some amount to
1285 * ensure we can allocate the entire BO without hitting zero. The actual
1286 * amount doesn't matter.
1287 */
1288 #define POOL_HEAP_OFFSET 64
1289
1290 #define EMPTY 1
1291
1292 static VkResult
anv_descriptor_pool_heap_init(struct anv_device * device,struct anv_descriptor_pool_heap * heap,uint32_t size,bool host_only,bool samplers)1293 anv_descriptor_pool_heap_init(struct anv_device *device,
1294 struct anv_descriptor_pool_heap *heap,
1295 uint32_t size,
1296 bool host_only,
1297 bool samplers)
1298 {
1299 if (size == 0)
1300 return VK_SUCCESS;
1301
1302 if (host_only) {
1303 heap->size = size;
1304 heap->host_mem = vk_zalloc(&device->vk.alloc, size, 8,
1305 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1306 if (heap->host_mem == NULL)
1307 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1308 } else {
1309 const char *bo_name =
1310 device->physical->indirect_descriptors ? "indirect descriptors" :
1311 samplers ? "direct sampler" : "direct surfaces";
1312
1313 heap->size = align(size, 4096);
1314
1315 VkResult result = anv_device_alloc_bo(device,
1316 bo_name, heap->size,
1317 ANV_BO_ALLOC_CAPTURE |
1318 ANV_BO_ALLOC_MAPPED |
1319 ANV_BO_ALLOC_HOST_CACHED_COHERENT |
1320 (samplers ?
1321 ANV_BO_ALLOC_DYNAMIC_VISIBLE_POOL :
1322 ANV_BO_ALLOC_DESCRIPTOR_POOL),
1323 0 /* explicit_address */,
1324 &heap->bo);
1325 if (result != VK_SUCCESS)
1326 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
1327 }
1328
1329 util_vma_heap_init(&heap->heap, POOL_HEAP_OFFSET, heap->size);
1330
1331 return VK_SUCCESS;
1332 }
1333
1334 static void
anv_descriptor_pool_heap_fini(struct anv_device * device,struct anv_descriptor_pool_heap * heap)1335 anv_descriptor_pool_heap_fini(struct anv_device *device,
1336 struct anv_descriptor_pool_heap *heap)
1337 {
1338 if (heap->size == 0)
1339 return;
1340
1341 util_vma_heap_finish(&heap->heap);
1342
1343 if (heap->bo)
1344 anv_device_release_bo(device, heap->bo);
1345
1346 if (heap->host_mem)
1347 vk_free(&device->vk.alloc, heap->host_mem);
1348 }
1349
1350 static void
anv_descriptor_pool_heap_reset(struct anv_device * device,struct anv_descriptor_pool_heap * heap)1351 anv_descriptor_pool_heap_reset(struct anv_device *device,
1352 struct anv_descriptor_pool_heap *heap)
1353 {
1354 if (heap->size == 0)
1355 return;
1356
1357 util_vma_heap_finish(&heap->heap);
1358 util_vma_heap_init(&heap->heap, POOL_HEAP_OFFSET, heap->size);
1359 }
1360
1361 static VkResult
anv_descriptor_pool_heap_alloc(struct anv_descriptor_pool * pool,struct anv_descriptor_pool_heap * heap,uint32_t size,uint32_t alignment,struct anv_state * state)1362 anv_descriptor_pool_heap_alloc(struct anv_descriptor_pool *pool,
1363 struct anv_descriptor_pool_heap *heap,
1364 uint32_t size, uint32_t alignment,
1365 struct anv_state *state)
1366 {
1367 uint64_t pool_vma_offset =
1368 util_vma_heap_alloc(&heap->heap, size, alignment);
1369 if (pool_vma_offset == 0) {
1370 if (size > heap->size - heap->alloc_size)
1371 return vk_error(pool, VK_ERROR_OUT_OF_POOL_MEMORY);
1372 else
1373 return vk_error(pool, VK_ERROR_FRAGMENTED_POOL);
1374 }
1375
1376 assert(pool_vma_offset >= POOL_HEAP_OFFSET &&
1377 pool_vma_offset - POOL_HEAP_OFFSET <= INT32_MAX);
1378
1379 state->offset = pool_vma_offset - POOL_HEAP_OFFSET;
1380 state->alloc_size = size;
1381 if (heap->host_mem)
1382 state->map = heap->host_mem + state->offset;
1383 else
1384 state->map = heap->bo->map + state->offset;
1385 heap->alloc_size += size;
1386
1387 return VK_SUCCESS;
1388 }
1389
1390 static void
anv_descriptor_pool_heap_free(struct anv_descriptor_pool_heap * heap,struct anv_state state)1391 anv_descriptor_pool_heap_free(struct anv_descriptor_pool_heap *heap,
1392 struct anv_state state)
1393 {
1394 heap->alloc_size -= state.alloc_size;
1395 util_vma_heap_free(&heap->heap,
1396 (uint64_t)state.offset + POOL_HEAP_OFFSET,
1397 state.alloc_size);
1398 }
1399
anv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)1400 VkResult anv_CreateDescriptorPool(
1401 VkDevice _device,
1402 const VkDescriptorPoolCreateInfo* pCreateInfo,
1403 const VkAllocationCallbacks* pAllocator,
1404 VkDescriptorPool* pDescriptorPool)
1405 {
1406 ANV_FROM_HANDLE(anv_device, device, _device);
1407 struct anv_descriptor_pool *pool;
1408
1409 const VkDescriptorPoolInlineUniformBlockCreateInfo *inline_info =
1410 vk_find_struct_const(pCreateInfo->pNext,
1411 DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO);
1412 const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
1413 vk_find_struct_const(pCreateInfo->pNext,
1414 MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
1415
1416 uint32_t descriptor_count = 0;
1417 uint32_t buffer_view_count = 0;
1418 uint32_t descriptor_bo_surface_size = 0;
1419 uint32_t descriptor_bo_sampler_size = 0;
1420
1421 const enum anv_descriptor_set_layout_type layout_type =
1422 device->physical->indirect_descriptors ?
1423 ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_INDIRECT :
1424 ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT;
1425
1426 /* Workaround application bugs when we're allocating surfaces & samplers in
1427 * separate heaps (!indirect_descriptors). Some applications will specify a
1428 * count of samplers too small and we might fail allocations in
1429 * vkAllocateDescriptorsSets().
1430 *
1431 * Find the highest count across all descriptor types and use that for
1432 * samplers.
1433 */
1434 uint32_t max_descriptor_count = 0;
1435 if (device->physical->instance->anv_upper_bound_descriptor_pool_sampler &&
1436 !device->physical->indirect_descriptors) {
1437 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
1438 max_descriptor_count = MAX2(pCreateInfo->pPoolSizes[i].descriptorCount,
1439 max_descriptor_count);
1440 }
1441 }
1442
1443 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
1444 enum anv_descriptor_data desc_data =
1445 pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
1446 anv_descriptor_data_for_mutable_type(device->physical, layout_type,
1447 pCreateInfo->flags,
1448 mutable_info, i) :
1449 anv_descriptor_data_for_type(device->physical, layout_type,
1450 pCreateInfo->flags,
1451 pCreateInfo->pPoolSizes[i].type);
1452
1453 if (desc_data & ANV_DESCRIPTOR_BUFFER_VIEW)
1454 buffer_view_count += pCreateInfo->pPoolSizes[i].descriptorCount;
1455
1456 uint16_t desc_surface_size, desc_sampler_size;
1457 if (pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
1458 anv_descriptor_size_for_mutable_type(device->physical, layout_type,
1459 pCreateInfo->flags, mutable_info, i,
1460 &desc_surface_size, &desc_sampler_size);
1461 } else {
1462 anv_descriptor_data_size(desc_data, layout_type,
1463 &desc_surface_size, &desc_sampler_size);
1464 }
1465
1466 uint32_t desc_data_surface_size =
1467 desc_surface_size * pCreateInfo->pPoolSizes[i].descriptorCount;
1468 uint32_t desc_data_sampler_size =
1469 desc_sampler_size * MAX2(max_descriptor_count,
1470 pCreateInfo->pPoolSizes[i].descriptorCount);
1471
1472 /* Combined image sampler descriptors can take up to 3 slots if they
1473 * hold a YCbCr image.
1474 */
1475 if (pCreateInfo->pPoolSizes[i].type ==
1476 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
1477 desc_data_surface_size *= 3;
1478 desc_data_sampler_size *= 3;
1479 }
1480
1481 if (pCreateInfo->pPoolSizes[i].type ==
1482 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
1483 /* Inline uniform blocks are specified to use the descriptor array
1484 * size as the size in bytes of the block.
1485 */
1486 assert(inline_info);
1487 desc_data_surface_size += pCreateInfo->pPoolSizes[i].descriptorCount;
1488 }
1489
1490 descriptor_bo_surface_size += desc_data_surface_size;
1491 descriptor_bo_sampler_size += desc_data_sampler_size;
1492
1493 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
1494 }
1495 /* We have to align descriptor buffer allocations to 32B so that we can
1496 * push descriptor buffers. This means that each descriptor buffer
1497 * allocated may burn up to 32B of extra space to get the right alignment.
1498 * (Technically, it's at most 28B because we're always going to start at
1499 * least 4B aligned but we're being conservative here.) Allocate enough
1500 * extra space that we can chop it into maxSets pieces and align each one
1501 * of them to 32B.
1502 */
1503 descriptor_bo_surface_size += ANV_UBO_ALIGNMENT * pCreateInfo->maxSets;
1504 /* We align inline uniform blocks to ANV_UBO_ALIGNMENT */
1505 if (inline_info) {
1506 descriptor_bo_surface_size +=
1507 ANV_UBO_ALIGNMENT * inline_info->maxInlineUniformBlockBindings;
1508 }
1509
1510 const bool host_only =
1511 pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT;
1512
1513 /* For host_only pools, allocate some memory to hold the written surface
1514 * states of the internal anv_buffer_view. With normal pools, the memory
1515 * holding surface state is allocated from the device surface_state_pool.
1516 */
1517 const size_t host_mem_size =
1518 pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
1519 descriptor_count * sizeof(struct anv_descriptor) +
1520 buffer_view_count * sizeof(struct anv_buffer_view) +
1521 (host_only ? buffer_view_count * ANV_SURFACE_STATE_SIZE : 0);
1522
1523 pool = vk_object_zalloc(&device->vk, pAllocator,
1524 sizeof(*pool) + host_mem_size,
1525 VK_OBJECT_TYPE_DESCRIPTOR_POOL);
1526 if (!pool)
1527 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1528
1529 pool->host_mem_size = host_mem_size;
1530 util_vma_heap_init(&pool->host_heap, POOL_HEAP_OFFSET, host_mem_size);
1531
1532 pool->host_only = host_only;
1533
1534 VkResult result = anv_descriptor_pool_heap_init(device,
1535 &pool->surfaces,
1536 descriptor_bo_surface_size,
1537 pool->host_only,
1538 false /* samplers */);
1539 if (result != VK_SUCCESS) {
1540 vk_object_free(&device->vk, pAllocator, pool);
1541 return result;
1542 }
1543
1544 result = anv_descriptor_pool_heap_init(device,
1545 &pool->samplers,
1546 descriptor_bo_sampler_size,
1547 pool->host_only,
1548 true /* samplers */);
1549 if (result != VK_SUCCESS) {
1550 anv_descriptor_pool_heap_fini(device, &pool->surfaces);
1551 vk_object_free(&device->vk, pAllocator, pool);
1552 return result;
1553 }
1554
1555 /* All the surface states allocated by the descriptor pool are internal. We
1556 * have to allocate them to handle the fact that we do not have surface
1557 * states for VkBuffers.
1558 */
1559 anv_state_stream_init(&pool->surface_state_stream,
1560 &device->internal_surface_state_pool, 4096);
1561 pool->surface_state_free_list = NULL;
1562
1563 list_inithead(&pool->desc_sets);
1564
1565 ANV_RMV(descriptor_pool_create, device, pCreateInfo, pool, false);
1566
1567 *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
1568
1569 return VK_SUCCESS;
1570 }
1571
anv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)1572 void anv_DestroyDescriptorPool(
1573 VkDevice _device,
1574 VkDescriptorPool _pool,
1575 const VkAllocationCallbacks* pAllocator)
1576 {
1577 ANV_FROM_HANDLE(anv_device, device, _device);
1578 ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
1579
1580 if (!pool)
1581 return;
1582
1583 ANV_RMV(resource_destroy, device, pool);
1584
1585 list_for_each_entry_safe(struct anv_descriptor_set, set,
1586 &pool->desc_sets, pool_link) {
1587 anv_descriptor_set_layout_unref(device, set->layout);
1588 }
1589
1590 util_vma_heap_finish(&pool->host_heap);
1591
1592 anv_state_stream_finish(&pool->surface_state_stream);
1593
1594 anv_descriptor_pool_heap_fini(device, &pool->surfaces);
1595 anv_descriptor_pool_heap_fini(device, &pool->samplers);
1596
1597 vk_object_free(&device->vk, pAllocator, pool);
1598 }
1599
anv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)1600 VkResult anv_ResetDescriptorPool(
1601 VkDevice _device,
1602 VkDescriptorPool descriptorPool,
1603 VkDescriptorPoolResetFlags flags)
1604 {
1605 ANV_FROM_HANDLE(anv_device, device, _device);
1606 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1607
1608 list_for_each_entry_safe(struct anv_descriptor_set, set,
1609 &pool->desc_sets, pool_link) {
1610 anv_descriptor_set_layout_unref(device, set->layout);
1611 }
1612 list_inithead(&pool->desc_sets);
1613
1614 util_vma_heap_finish(&pool->host_heap);
1615 util_vma_heap_init(&pool->host_heap, POOL_HEAP_OFFSET, pool->host_mem_size);
1616
1617 anv_descriptor_pool_heap_reset(device, &pool->surfaces);
1618 anv_descriptor_pool_heap_reset(device, &pool->samplers);
1619
1620 anv_state_stream_finish(&pool->surface_state_stream);
1621 anv_state_stream_init(&pool->surface_state_stream,
1622 &device->internal_surface_state_pool, 4096);
1623 pool->surface_state_free_list = NULL;
1624
1625 return VK_SUCCESS;
1626 }
1627
1628 static VkResult
anv_descriptor_pool_alloc_set(struct anv_descriptor_pool * pool,uint32_t size,struct anv_descriptor_set ** set)1629 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,
1630 uint32_t size,
1631 struct anv_descriptor_set **set)
1632 {
1633 uint64_t vma_offset = util_vma_heap_alloc(&pool->host_heap, size, 1);
1634
1635 if (vma_offset == 0) {
1636 if (size <= pool->host_heap.free_size) {
1637 return VK_ERROR_FRAGMENTED_POOL;
1638 } else {
1639 return VK_ERROR_OUT_OF_POOL_MEMORY;
1640 }
1641 }
1642
1643 assert(vma_offset >= POOL_HEAP_OFFSET);
1644 uint64_t host_mem_offset = vma_offset - POOL_HEAP_OFFSET;
1645
1646 *set = (struct anv_descriptor_set *) (pool->host_mem + host_mem_offset);
1647 (*set)->size = size;
1648
1649 return VK_SUCCESS;
1650 }
1651
1652 static void
anv_descriptor_pool_free_set(struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1653 anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,
1654 struct anv_descriptor_set *set)
1655 {
1656 util_vma_heap_free(&pool->host_heap,
1657 ((char *) set - pool->host_mem) + POOL_HEAP_OFFSET,
1658 set->size);
1659 }
1660
1661 struct surface_state_free_list_entry {
1662 void *next;
1663 struct anv_state state;
1664 };
1665
1666 static struct anv_state
anv_descriptor_pool_alloc_state(struct anv_descriptor_pool * pool)1667 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)
1668 {
1669 assert(!pool->host_only);
1670
1671 struct surface_state_free_list_entry *entry =
1672 pool->surface_state_free_list;
1673
1674 if (entry) {
1675 struct anv_state state = entry->state;
1676 pool->surface_state_free_list = entry->next;
1677 assert(state.alloc_size == ANV_SURFACE_STATE_SIZE);
1678 return state;
1679 } else {
1680 struct anv_state state =
1681 anv_state_stream_alloc(&pool->surface_state_stream,
1682 ANV_SURFACE_STATE_SIZE, 64);
1683 return state;
1684 }
1685 }
1686
1687 static void
anv_descriptor_pool_free_state(struct anv_descriptor_pool * pool,struct anv_state state)1688 anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,
1689 struct anv_state state)
1690 {
1691 assert(state.alloc_size);
1692 /* Put the buffer view surface state back on the free list. */
1693 struct surface_state_free_list_entry *entry = state.map;
1694 entry->next = pool->surface_state_free_list;
1695 entry->state = state;
1696 pool->surface_state_free_list = entry;
1697 }
1698
1699 static size_t
anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout * layout,bool host_only,uint32_t var_desc_count)1700 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout,
1701 bool host_only, uint32_t var_desc_count)
1702 {
1703 const uint32_t descriptor_count =
1704 set_layout_descriptor_count(layout, var_desc_count);
1705 const uint32_t buffer_view_count =
1706 set_layout_buffer_view_count(layout, var_desc_count);
1707
1708 return sizeof(struct anv_descriptor_set) +
1709 descriptor_count * sizeof(struct anv_descriptor) +
1710 buffer_view_count * sizeof(struct anv_buffer_view) +
1711 (host_only ? buffer_view_count * ANV_SURFACE_STATE_SIZE : 0);
1712 }
1713
1714 static VkResult
anv_descriptor_set_create(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set_layout * layout,uint32_t var_desc_count,struct anv_descriptor_set ** out_set)1715 anv_descriptor_set_create(struct anv_device *device,
1716 struct anv_descriptor_pool *pool,
1717 struct anv_descriptor_set_layout *layout,
1718 uint32_t var_desc_count,
1719 struct anv_descriptor_set **out_set)
1720 {
1721 struct anv_descriptor_set *set;
1722 const size_t size = anv_descriptor_set_layout_size(layout,
1723 pool->host_only,
1724 var_desc_count);
1725
1726 VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);
1727 if (result != VK_SUCCESS)
1728 return result;
1729
1730 uint32_t descriptor_buffer_surface_size, descriptor_buffer_sampler_size;
1731 anv_descriptor_set_layout_descriptor_buffer_size(layout, var_desc_count,
1732 &descriptor_buffer_surface_size,
1733 &descriptor_buffer_sampler_size);
1734
1735 set->desc_surface_state = ANV_STATE_NULL;
1736 set->is_push = false;
1737
1738 if (descriptor_buffer_surface_size) {
1739 result = anv_descriptor_pool_heap_alloc(pool, &pool->surfaces,
1740 descriptor_buffer_surface_size,
1741 ANV_UBO_ALIGNMENT,
1742 &set->desc_surface_mem);
1743 if (result != VK_SUCCESS) {
1744 anv_descriptor_pool_free_set(pool, set);
1745 return result;
1746 }
1747
1748 set->desc_surface_addr = (struct anv_address) {
1749 .bo = pool->surfaces.bo,
1750 .offset = set->desc_surface_mem.offset,
1751 };
1752 set->desc_offset = anv_address_physical(set->desc_surface_addr) -
1753 device->physical->va.internal_surface_state_pool.addr;
1754
1755 enum isl_format format =
1756 anv_isl_format_for_descriptor_type(device,
1757 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1758
1759 if (!pool->host_only) {
1760 set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);
1761 if (set->desc_surface_state.map == NULL) {
1762 anv_descriptor_pool_free_set(pool, set);
1763 return vk_error(pool, VK_ERROR_OUT_OF_DEVICE_MEMORY);
1764 }
1765
1766 anv_fill_buffer_surface_state(device, set->desc_surface_state.map,
1767 format, ISL_SWIZZLE_IDENTITY,
1768 ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
1769 set->desc_surface_addr,
1770 descriptor_buffer_surface_size, 1);
1771 }
1772 } else {
1773 set->desc_surface_mem = ANV_STATE_NULL;
1774 set->desc_surface_addr = ANV_NULL_ADDRESS;
1775 }
1776
1777 if (descriptor_buffer_sampler_size) {
1778 result = anv_descriptor_pool_heap_alloc(pool, &pool->samplers,
1779 descriptor_buffer_sampler_size,
1780 ANV_SAMPLER_STATE_SIZE,
1781 &set->desc_sampler_mem);
1782 if (result != VK_SUCCESS) {
1783 anv_descriptor_pool_free_set(pool, set);
1784 return result;
1785 }
1786
1787 set->desc_sampler_addr = (struct anv_address) {
1788 .bo = pool->samplers.bo,
1789 .offset = set->desc_sampler_mem.offset,
1790 };
1791 } else {
1792 set->desc_sampler_mem = ANV_STATE_NULL;
1793 set->desc_sampler_addr = ANV_NULL_ADDRESS;
1794 }
1795
1796 vk_object_base_init(&device->vk, &set->base,
1797 VK_OBJECT_TYPE_DESCRIPTOR_SET);
1798 set->pool = pool;
1799 set->layout = layout;
1800 anv_descriptor_set_layout_ref(layout);
1801
1802 set->buffer_view_count =
1803 set_layout_buffer_view_count(layout, var_desc_count);
1804 set->descriptor_count =
1805 set_layout_descriptor_count(layout, var_desc_count);
1806
1807 set->buffer_views =
1808 (struct anv_buffer_view *) &set->descriptors[set->descriptor_count];
1809
1810 /* By defining the descriptors to be zero now, we can later verify that
1811 * a descriptor has not been populated with user data.
1812 */
1813 memset(set->descriptors, 0,
1814 sizeof(struct anv_descriptor) * set->descriptor_count);
1815
1816 /* Go through and fill out immutable samplers if we have any */
1817 for (uint32_t b = 0; b < layout->binding_count; b++) {
1818 if (layout->binding[b].immutable_samplers) {
1819 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
1820 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
1821 * UpdateDescriptorSets if needed. However, if the descriptor
1822 * set has an immutable sampler, UpdateDescriptorSets may never
1823 * touch it, so we need to make sure it's 100% valid now.
1824 *
1825 * We don't need to actually provide a sampler because the helper
1826 * will always write in the immutable sampler regardless of what
1827 * is in the sampler parameter.
1828 */
1829 VkDescriptorImageInfo info = { };
1830 anv_descriptor_set_write_image_view(device, set, &info,
1831 VK_DESCRIPTOR_TYPE_SAMPLER,
1832 b, i);
1833 }
1834 }
1835 }
1836
1837 /* Allocate surface states for real descriptor sets if we're using indirect
1838 * descriptors. For host only sets, we just store the surface state data in
1839 * malloc memory.
1840 */
1841 if (device->physical->indirect_descriptors) {
1842 if (!pool->host_only) {
1843 for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1844 set->buffer_views[b].general.state =
1845 anv_descriptor_pool_alloc_state(pool);
1846 }
1847 } else {
1848 void *host_surface_states =
1849 set->buffer_views + set->buffer_view_count;
1850 memset(host_surface_states, 0,
1851 set->buffer_view_count * ANV_SURFACE_STATE_SIZE);
1852 for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1853 set->buffer_views[b].general.state = (struct anv_state) {
1854 .alloc_size = ANV_SURFACE_STATE_SIZE,
1855 .map = host_surface_states + b * ANV_SURFACE_STATE_SIZE,
1856 };
1857 }
1858 }
1859 }
1860
1861 list_addtail(&set->pool_link, &pool->desc_sets);
1862
1863 *out_set = set;
1864
1865 return VK_SUCCESS;
1866 }
1867
1868 static void
anv_descriptor_set_destroy(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1869 anv_descriptor_set_destroy(struct anv_device *device,
1870 struct anv_descriptor_pool *pool,
1871 struct anv_descriptor_set *set)
1872 {
1873 anv_descriptor_set_layout_unref(device, set->layout);
1874
1875 if (set->desc_surface_mem.alloc_size) {
1876 anv_descriptor_pool_heap_free(&pool->surfaces, set->desc_surface_mem);
1877 if (set->desc_surface_state.alloc_size)
1878 anv_descriptor_pool_free_state(pool, set->desc_surface_state);
1879 }
1880
1881 if (set->desc_sampler_mem.alloc_size)
1882 anv_descriptor_pool_heap_free(&pool->samplers, set->desc_sampler_mem);
1883
1884 if (device->physical->indirect_descriptors) {
1885 if (!pool->host_only) {
1886 for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1887 if (set->buffer_views[b].general.state.alloc_size) {
1888 anv_descriptor_pool_free_state(
1889 pool, set->buffer_views[b].general.state);
1890 }
1891 }
1892 }
1893 }
1894
1895 list_del(&set->pool_link);
1896
1897 vk_object_base_finish(&set->base);
1898 anv_descriptor_pool_free_set(pool, set);
1899 }
1900
anv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1901 VkResult anv_AllocateDescriptorSets(
1902 VkDevice _device,
1903 const VkDescriptorSetAllocateInfo* pAllocateInfo,
1904 VkDescriptorSet* pDescriptorSets)
1905 {
1906 ANV_FROM_HANDLE(anv_device, device, _device);
1907 ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
1908
1909 VkResult result = VK_SUCCESS;
1910 struct anv_descriptor_set *set = NULL;
1911 uint32_t i;
1912
1913 const VkDescriptorSetVariableDescriptorCountAllocateInfo *vdcai =
1914 vk_find_struct_const(pAllocateInfo->pNext,
1915 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
1916
1917 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1918 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
1919 pAllocateInfo->pSetLayouts[i]);
1920
1921 uint32_t var_desc_count = 0;
1922 if (vdcai != NULL && vdcai->descriptorSetCount > 0) {
1923 assert(vdcai->descriptorSetCount == pAllocateInfo->descriptorSetCount);
1924 var_desc_count = vdcai->pDescriptorCounts[i];
1925 }
1926
1927 result = anv_descriptor_set_create(device, pool, layout,
1928 var_desc_count, &set);
1929 if (result != VK_SUCCESS)
1930 break;
1931
1932 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1933 }
1934
1935 if (result != VK_SUCCESS) {
1936 anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
1937 i, pDescriptorSets);
1938 /* The Vulkan 1.3.228 spec, section 14.2.3. Allocation of Descriptor Sets:
1939 *
1940 * "If the creation of any of those descriptor sets fails, then the
1941 * implementation must destroy all successfully created descriptor
1942 * set objects from this command, set all entries of the
1943 * pDescriptorSets array to VK_NULL_HANDLE and return the error."
1944 */
1945 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++)
1946 pDescriptorSets[i] = VK_NULL_HANDLE;
1947
1948 }
1949
1950 return result;
1951 }
1952
anv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1953 VkResult anv_FreeDescriptorSets(
1954 VkDevice _device,
1955 VkDescriptorPool descriptorPool,
1956 uint32_t count,
1957 const VkDescriptorSet* pDescriptorSets)
1958 {
1959 ANV_FROM_HANDLE(anv_device, device, _device);
1960 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1961
1962 for (uint32_t i = 0; i < count; i++) {
1963 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1964
1965 if (!set)
1966 continue;
1967
1968 anv_descriptor_set_destroy(device, pool, set);
1969 }
1970
1971 return VK_SUCCESS;
1972 }
1973
1974 bool
anv_push_descriptor_set_init(struct anv_cmd_buffer * cmd_buffer,struct anv_push_descriptor_set * push_set,struct anv_descriptor_set_layout * layout)1975 anv_push_descriptor_set_init(struct anv_cmd_buffer *cmd_buffer,
1976 struct anv_push_descriptor_set *push_set,
1977 struct anv_descriptor_set_layout *layout)
1978 {
1979 struct anv_descriptor_set *set = &push_set->set;
1980 /* Only copy the old descriptor data if needed :
1981 * - not if there was no previous layout
1982 * - not if the layout is different (descriptor set data becomes
1983 * undefined)
1984 * - not if there is only one descriptor, we know the entire data will
1985 * be replaced
1986 *
1987 * TODO: we could optimizer further, try to keep a copy of the old data on
1988 * the host, try to copy only the non newly written bits, ...
1989 */
1990 const bool copy_old_descriptors = set->layout != NULL &&
1991 set->layout == layout &&
1992 layout->descriptor_count > 1;
1993
1994 if (set->layout != layout) {
1995 if (set->layout) {
1996 anv_descriptor_set_layout_unref(cmd_buffer->device, set->layout);
1997 } else {
1998 /* one-time initialization */
1999 vk_object_base_init(&cmd_buffer->device->vk, &set->base,
2000 VK_OBJECT_TYPE_DESCRIPTOR_SET);
2001 set->is_push = true;
2002 set->buffer_views = push_set->buffer_views;
2003 }
2004
2005 anv_descriptor_set_layout_ref(layout);
2006 set->layout = layout;
2007 set->generate_surface_states = 0;
2008 }
2009
2010 assert(set->is_push && set->buffer_views);
2011 set->size = anv_descriptor_set_layout_size(layout, false /* host_only */, 0);
2012 set->buffer_view_count = layout->buffer_view_count;
2013 set->descriptor_count = layout->descriptor_count;
2014
2015 if (layout->descriptor_buffer_surface_size &&
2016 (push_set->set_used_on_gpu ||
2017 set->desc_surface_mem.alloc_size < layout->descriptor_buffer_surface_size)) {
2018 struct anv_physical_device *pdevice = cmd_buffer->device->physical;
2019 struct anv_state_stream *push_stream;
2020 uint64_t push_base_address;
2021
2022 if (layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT) {
2023 push_stream = pdevice->uses_ex_bso ?
2024 &cmd_buffer->push_descriptor_buffer_stream :
2025 &cmd_buffer->surface_state_stream;
2026 push_base_address = pdevice->uses_ex_bso ?
2027 pdevice->va.push_descriptor_buffer_pool.addr :
2028 pdevice->va.internal_surface_state_pool.addr;
2029 } else {
2030 push_stream = pdevice->indirect_descriptors ?
2031 &cmd_buffer->indirect_push_descriptor_stream :
2032 &cmd_buffer->surface_state_stream;
2033 push_base_address = pdevice->indirect_descriptors ?
2034 pdevice->va.indirect_push_descriptor_pool.addr :
2035 pdevice->va.internal_surface_state_pool.addr;
2036 }
2037
2038 uint32_t surface_size, sampler_size;
2039 anv_descriptor_set_layout_descriptor_buffer_size(layout, 0,
2040 &surface_size,
2041 &sampler_size);
2042
2043 /* The previous buffer is either actively used by some GPU command (so
2044 * we can't modify it) or is too small. Allocate a new one.
2045 */
2046 struct anv_state desc_surface_mem =
2047 anv_state_stream_alloc(push_stream, surface_size, ANV_UBO_ALIGNMENT);
2048 if (desc_surface_mem.map == NULL)
2049 return false;
2050
2051 if (copy_old_descriptors) {
2052 memcpy(desc_surface_mem.map, set->desc_surface_mem.map,
2053 MIN2(desc_surface_mem.alloc_size,
2054 set->desc_surface_mem.alloc_size));
2055 }
2056 set->desc_surface_mem = desc_surface_mem;
2057
2058 set->desc_surface_addr = anv_state_pool_state_address(
2059 push_stream->state_pool,
2060 set->desc_surface_mem);
2061 set->desc_offset = anv_address_physical(set->desc_surface_addr) -
2062 push_base_address;
2063 }
2064
2065 if (layout->descriptor_buffer_sampler_size &&
2066 (push_set->set_used_on_gpu ||
2067 set->desc_sampler_mem.alloc_size < layout->descriptor_buffer_sampler_size)) {
2068 struct anv_physical_device *pdevice = cmd_buffer->device->physical;
2069 assert(!pdevice->indirect_descriptors);
2070 struct anv_state_stream *push_stream = &cmd_buffer->dynamic_state_stream;
2071
2072 uint32_t surface_size, sampler_size;
2073 anv_descriptor_set_layout_descriptor_buffer_size(layout, 0,
2074 &surface_size,
2075 &sampler_size);
2076
2077 /* The previous buffer is either actively used by some GPU command (so
2078 * we can't modify it) or is too small. Allocate a new one.
2079 */
2080 struct anv_state desc_sampler_mem =
2081 anv_state_stream_alloc(push_stream, sampler_size, ANV_SAMPLER_STATE_SIZE);
2082 if (desc_sampler_mem.map == NULL)
2083 return false;
2084
2085 if (copy_old_descriptors) {
2086 memcpy(desc_sampler_mem.map, set->desc_sampler_mem.map,
2087 MIN2(desc_sampler_mem.alloc_size,
2088 set->desc_sampler_mem.alloc_size));
2089 }
2090 set->desc_sampler_mem = desc_sampler_mem;
2091
2092 set->desc_sampler_addr = anv_state_pool_state_address(
2093 push_stream->state_pool,
2094 set->desc_sampler_mem);
2095 }
2096
2097 if (push_set->set_used_on_gpu) {
2098 set->desc_surface_state = ANV_STATE_NULL;
2099 push_set->set_used_on_gpu = false;
2100 }
2101
2102 return true;
2103 }
2104
2105 void
anv_push_descriptor_set_finish(struct anv_push_descriptor_set * push_set)2106 anv_push_descriptor_set_finish(struct anv_push_descriptor_set *push_set)
2107 {
2108 struct anv_descriptor_set *set = &push_set->set;
2109 if (set->layout) {
2110 struct anv_device *device =
2111 container_of(set->base.device, struct anv_device, vk);
2112 anv_descriptor_set_layout_unref(device, set->layout);
2113 }
2114 }
2115
2116 static uint32_t
anv_surface_state_to_handle(struct anv_physical_device * device,struct anv_state state)2117 anv_surface_state_to_handle(struct anv_physical_device *device,
2118 struct anv_state state)
2119 {
2120 /* Bits 31:12 of the bindless surface offset in the extended message
2121 * descriptor is bits 25:6 of the byte-based address.
2122 */
2123 assert(state.offset >= 0);
2124 uint32_t offset = state.offset;
2125 if (device->uses_ex_bso) {
2126 assert((offset & 0x3f) == 0);
2127 return offset;
2128 } else {
2129 assert((offset & 0x3f) == 0 && offset < (1 << 26));
2130 return offset << 6;
2131 }
2132 }
2133
2134 static const void *
anv_image_view_surface_data_for_plane_layout(struct anv_image_view * image_view,VkDescriptorType desc_type,unsigned plane,VkImageLayout layout)2135 anv_image_view_surface_data_for_plane_layout(struct anv_image_view *image_view,
2136 VkDescriptorType desc_type,
2137 unsigned plane,
2138 VkImageLayout layout)
2139 {
2140 if (desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
2141 desc_type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
2142 desc_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) {
2143 return (layout == VK_IMAGE_LAYOUT_GENERAL ||
2144 layout == VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR) ?
2145 &image_view->planes[plane].general_sampler.state_data :
2146 &image_view->planes[plane].optimal_sampler.state_data;
2147 }
2148
2149 if (desc_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2150 return &image_view->planes[plane].storage.state_data;
2151
2152 unreachable("Invalid descriptor type");
2153 }
2154
2155 static const uint32_t *
anv_sampler_state_for_descriptor_set(const struct anv_sampler * sampler,const struct anv_descriptor_set * set,uint32_t plane)2156 anv_sampler_state_for_descriptor_set(const struct anv_sampler *sampler,
2157 const struct anv_descriptor_set *set,
2158 uint32_t plane)
2159 {
2160 return sampler->state[plane];
2161 }
2162
2163 void
anv_descriptor_set_write_image_view(struct anv_device * device,struct anv_descriptor_set * set,const VkDescriptorImageInfo * const info,VkDescriptorType type,uint32_t binding,uint32_t element)2164 anv_descriptor_set_write_image_view(struct anv_device *device,
2165 struct anv_descriptor_set *set,
2166 const VkDescriptorImageInfo * const info,
2167 VkDescriptorType type,
2168 uint32_t binding,
2169 uint32_t element)
2170 {
2171 const struct anv_descriptor_set_binding_layout *bind_layout =
2172 &set->layout->binding[binding];
2173 struct anv_descriptor *desc =
2174 &set->descriptors[bind_layout->descriptor_index + element];
2175 struct anv_image_view *image_view = NULL;
2176 struct anv_sampler *sampler = NULL;
2177
2178 /* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor
2179 * set initialization to set the bindless samplers.
2180 */
2181 assert(type == bind_layout->type ||
2182 type == VK_DESCRIPTOR_TYPE_SAMPLER ||
2183 bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
2184
2185 switch (type) {
2186 case VK_DESCRIPTOR_TYPE_SAMPLER:
2187 sampler = bind_layout->immutable_samplers ?
2188 bind_layout->immutable_samplers[element] :
2189 anv_sampler_from_handle(info->sampler);
2190 break;
2191
2192 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2193 image_view = anv_image_view_from_handle(info->imageView);
2194 sampler = bind_layout->immutable_samplers ?
2195 bind_layout->immutable_samplers[element] :
2196 anv_sampler_from_handle(info->sampler);
2197 break;
2198
2199 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2200 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2201 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2202 image_view = anv_image_view_from_handle(info->imageView);
2203 break;
2204
2205 default:
2206 unreachable("invalid descriptor type");
2207 }
2208
2209 *desc = (struct anv_descriptor) {
2210 .type = type,
2211 .layout = info->imageLayout,
2212 .image_view = image_view,
2213 .sampler = sampler,
2214 };
2215
2216 void *desc_surface_map = set->desc_surface_mem.map +
2217 bind_layout->descriptor_surface_offset +
2218 element * bind_layout->descriptor_surface_stride;
2219
2220 enum anv_descriptor_data data =
2221 bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2222 anv_descriptor_data_for_type(device->physical, set->layout->type,
2223 set->layout->flags, type) :
2224 bind_layout->data;
2225
2226 if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE) {
2227 struct anv_sampled_image_descriptor desc_data[3];
2228 memset(desc_data, 0, sizeof(desc_data));
2229
2230 if (image_view) {
2231 for (unsigned p = 0; p < image_view->n_planes; p++) {
2232 const struct anv_surface_state *sstate =
2233 anv_image_view_texture_surface_state(image_view, p,
2234 desc->layout);
2235 desc_data[p].image =
2236 anv_surface_state_to_handle(device->physical, sstate->state);
2237 }
2238 }
2239
2240 if (sampler) {
2241 for (unsigned p = 0; p < sampler->n_planes; p++)
2242 desc_data[p].sampler = sampler->bindless_state.offset + p * 32;
2243 }
2244
2245 /* We may have max_plane_count < 0 if this isn't a sampled image but it
2246 * can be no more than the size of our array of handles.
2247 */
2248 assert(bind_layout->max_plane_count <= ARRAY_SIZE(desc_data));
2249 memcpy(desc_surface_map, desc_data,
2250 bind_layout->max_plane_count * sizeof(desc_data[0]));
2251 }
2252
2253 if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE) {
2254 if (image_view) {
2255 assert(image_view->n_planes == 1);
2256 struct anv_storage_image_descriptor desc_data = {
2257 .vanilla = anv_surface_state_to_handle(
2258 device->physical,
2259 anv_image_view_storage_surface_state(image_view)->state),
2260 .image_depth = image_view->vk.storage.z_slice_count,
2261 };
2262 memcpy(desc_surface_map, &desc_data, sizeof(desc_data));
2263 } else {
2264 memset(desc_surface_map, 0, bind_layout->descriptor_surface_stride);
2265 }
2266 }
2267
2268 if (data & ANV_DESCRIPTOR_SAMPLER) {
2269 void *sampler_map =
2270 set->layout->type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT ?
2271 (set->desc_sampler_mem.map +
2272 bind_layout->descriptor_sampler_offset +
2273 element * bind_layout->descriptor_sampler_stride) : desc_surface_map;
2274 if (sampler) {
2275 for (unsigned p = 0; p < sampler->n_planes; p++) {
2276 memcpy(sampler_map + p * ANV_SAMPLER_STATE_SIZE,
2277 anv_sampler_state_for_descriptor_set(sampler, set, p),
2278 ANV_SAMPLER_STATE_SIZE);
2279 }
2280 } else {
2281 memset(sampler_map, 0, bind_layout->descriptor_sampler_stride);
2282 }
2283 }
2284
2285 if (data & ANV_DESCRIPTOR_SURFACE) {
2286 unsigned max_plane_count = image_view ? image_view->n_planes : 1;
2287
2288 for (unsigned p = 0; p < max_plane_count; p++) {
2289 void *plane_map = desc_surface_map + p * ANV_SURFACE_STATE_SIZE;
2290
2291 if (image_view) {
2292 memcpy(plane_map,
2293 anv_image_view_surface_data_for_plane_layout(image_view, type,
2294 p, desc->layout),
2295 ANV_SURFACE_STATE_SIZE);
2296 } else {
2297 memcpy(plane_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2298 }
2299 }
2300 }
2301
2302 if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
2303 unsigned max_plane_count =
2304 MAX2(image_view ? image_view->n_planes : 1,
2305 sampler ? sampler->n_planes : 1);
2306
2307 for (unsigned p = 0; p < max_plane_count; p++) {
2308 void *plane_map = desc_surface_map + p * 2 * ANV_SURFACE_STATE_SIZE;
2309
2310 if (image_view) {
2311 memcpy(plane_map,
2312 anv_image_view_surface_data_for_plane_layout(image_view, type,
2313 p, desc->layout),
2314 ANV_SURFACE_STATE_SIZE);
2315 } else {
2316 memcpy(plane_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2317 }
2318
2319 if (sampler) {
2320 memcpy(plane_map + ANV_SURFACE_STATE_SIZE,
2321 anv_sampler_state_for_descriptor_set(sampler, set, p),
2322 ANV_SAMPLER_STATE_SIZE);
2323 } else {
2324 memset(plane_map + ANV_SURFACE_STATE_SIZE, 0,
2325 ANV_SAMPLER_STATE_SIZE);
2326 }
2327 }
2328 }
2329 }
2330
2331 static const void *
anv_buffer_view_surface_data(struct anv_buffer_view * buffer_view,VkDescriptorType desc_type)2332 anv_buffer_view_surface_data(struct anv_buffer_view *buffer_view,
2333 VkDescriptorType desc_type)
2334 {
2335 if (desc_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER)
2336 return &buffer_view->general.state_data;
2337
2338 if (desc_type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
2339 return &buffer_view->storage.state_data;
2340
2341 unreachable("Invalid descriptor type");
2342 }
2343
2344 void
anv_descriptor_set_write_buffer_view(struct anv_device * device,struct anv_descriptor_set * set,VkDescriptorType type,struct anv_buffer_view * buffer_view,uint32_t binding,uint32_t element)2345 anv_descriptor_set_write_buffer_view(struct anv_device *device,
2346 struct anv_descriptor_set *set,
2347 VkDescriptorType type,
2348 struct anv_buffer_view *buffer_view,
2349 uint32_t binding,
2350 uint32_t element)
2351 {
2352 const struct anv_descriptor_set_binding_layout *bind_layout =
2353 &set->layout->binding[binding];
2354 struct anv_descriptor *desc =
2355 &set->descriptors[bind_layout->descriptor_index + element];
2356
2357 assert(type == bind_layout->type ||
2358 bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
2359
2360 *desc = (struct anv_descriptor) {
2361 .type = type,
2362 .buffer_view = buffer_view,
2363 };
2364
2365 enum anv_descriptor_data data =
2366 bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2367 anv_descriptor_data_for_type(device->physical, set->layout->type,
2368 set->layout->flags, type) :
2369 bind_layout->data;
2370
2371 void *desc_map = set->desc_surface_mem.map +
2372 bind_layout->descriptor_surface_offset +
2373 element * bind_layout->descriptor_surface_stride;
2374
2375 if (buffer_view == NULL) {
2376 if (data & ANV_DESCRIPTOR_SURFACE)
2377 memcpy(desc_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2378 else
2379 memset(desc_map, 0, bind_layout->descriptor_surface_stride);
2380 return;
2381 }
2382
2383 if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE) {
2384 struct anv_sampled_image_descriptor desc_data = {
2385 .image = anv_surface_state_to_handle(
2386 device->physical, buffer_view->general.state),
2387 };
2388 memcpy(desc_map, &desc_data, sizeof(desc_data));
2389 }
2390
2391 if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE) {
2392 struct anv_storage_image_descriptor desc_data = {
2393 .vanilla = anv_surface_state_to_handle(
2394 device->physical, buffer_view->storage.state),
2395 };
2396 memcpy(desc_map, &desc_data, sizeof(desc_data));
2397 }
2398
2399 if (data & ANV_DESCRIPTOR_SURFACE) {
2400 memcpy(desc_map,
2401 anv_buffer_view_surface_data(buffer_view, type),
2402 ANV_SURFACE_STATE_SIZE);
2403 }
2404 }
2405
2406 void
anv_descriptor_write_surface_state(struct anv_device * device,struct anv_descriptor * desc,struct anv_state surface_state)2407 anv_descriptor_write_surface_state(struct anv_device *device,
2408 struct anv_descriptor *desc,
2409 struct anv_state surface_state)
2410 {
2411 assert(surface_state.alloc_size);
2412
2413 struct anv_buffer_view *bview = desc->buffer_view;
2414
2415 bview->general.state = surface_state;
2416
2417 isl_surf_usage_flags_t usage =
2418 anv_isl_usage_for_descriptor_type(desc->type);
2419
2420 enum isl_format format =
2421 anv_isl_format_for_descriptor_type(device, desc->type);
2422 anv_fill_buffer_surface_state(device, bview->general.state.map,
2423 format, ISL_SWIZZLE_IDENTITY,
2424 usage, bview->address, bview->vk.range, 1);
2425 }
2426
2427 void
anv_descriptor_set_write_buffer(struct anv_device * device,struct anv_descriptor_set * set,VkDescriptorType type,struct anv_buffer * buffer,uint32_t binding,uint32_t element,VkDeviceSize offset,VkDeviceSize range)2428 anv_descriptor_set_write_buffer(struct anv_device *device,
2429 struct anv_descriptor_set *set,
2430 VkDescriptorType type,
2431 struct anv_buffer *buffer,
2432 uint32_t binding,
2433 uint32_t element,
2434 VkDeviceSize offset,
2435 VkDeviceSize range)
2436 {
2437 const struct anv_descriptor_set_binding_layout *bind_layout =
2438 &set->layout->binding[binding];
2439 const uint32_t descriptor_index = bind_layout->descriptor_index + element;
2440 struct anv_descriptor *desc = &set->descriptors[descriptor_index];
2441
2442 assert(type == bind_layout->type ||
2443 bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
2444
2445 *desc = (struct anv_descriptor) {
2446 .type = type,
2447 .offset = offset,
2448 .range = range,
2449 .buffer = buffer,
2450 };
2451
2452 enum anv_descriptor_data data =
2453 bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2454 anv_descriptor_data_for_type(device->physical, set->layout->type,
2455 set->layout->flags, type) :
2456 bind_layout->data;
2457
2458 void *desc_map = set->desc_surface_mem.map +
2459 bind_layout->descriptor_surface_offset +
2460 element * bind_layout->descriptor_surface_stride;
2461
2462 if (buffer == NULL) {
2463 if (data & ANV_DESCRIPTOR_SURFACE)
2464 memcpy(desc_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2465 else
2466 memset(desc_map, 0, bind_layout->descriptor_surface_stride);
2467 return;
2468 }
2469
2470 struct anv_address bind_addr = anv_address_add(buffer->address, offset);
2471 desc->bind_range = vk_buffer_range(&buffer->vk, offset, range);
2472
2473 /* We report a bounds checking alignment of ANV_UBO_ALIGNMENT in
2474 * VkPhysicalDeviceRobustness2PropertiesEXT::robustUniformBufferAccessSizeAlignment
2475 * so align the range to that.
2476 */
2477 if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
2478 type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
2479 desc->bind_range = align64(desc->bind_range, ANV_UBO_ALIGNMENT);
2480
2481 if (data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE) {
2482 struct anv_address_range_descriptor desc_data = {
2483 .address = anv_address_physical(bind_addr),
2484 .range = desc->bind_range,
2485 };
2486 memcpy(desc_map, &desc_data, sizeof(desc_data));
2487 }
2488
2489 if (data & ANV_DESCRIPTOR_SURFACE) {
2490 isl_surf_usage_flags_t usage =
2491 anv_isl_usage_for_descriptor_type(desc->type);
2492
2493 enum isl_format format =
2494 anv_isl_format_for_descriptor_type(device, desc->type);
2495
2496 if (bind_addr.bo && bind_addr.bo->alloc_flags & ANV_BO_ALLOC_PROTECTED)
2497 usage |= ISL_SURF_USAGE_PROTECTED_BIT;
2498 isl_buffer_fill_state(&device->isl_dev, desc_map,
2499 .address = anv_address_physical(bind_addr),
2500 .mocs = isl_mocs(&device->isl_dev, usage,
2501 bind_addr.bo && anv_bo_is_external(bind_addr.bo)),
2502 .size_B = desc->bind_range,
2503 .format = format,
2504 .swizzle = ISL_SWIZZLE_IDENTITY,
2505 .stride_B = 1);
2506 }
2507
2508 if (vk_descriptor_type_is_dynamic(type))
2509 return;
2510
2511 if (data & ANV_DESCRIPTOR_BUFFER_VIEW) {
2512 struct anv_buffer_view *bview =
2513 &set->buffer_views[bind_layout->buffer_view_index + element];
2514
2515 desc->set_buffer_view = bview;
2516
2517 bview->vk.range = desc->bind_range;
2518 bview->address = bind_addr;
2519
2520 if (set->is_push) {
2521 set->generate_surface_states |= BITFIELD_BIT(descriptor_index);
2522 /* Reset the surface state to make sure
2523 * genX(cmd_buffer_emit_push_descriptor_surfaces) generates a new
2524 * one.
2525 */
2526 bview->general.state = ANV_STATE_NULL;
2527 } else {
2528 anv_descriptor_write_surface_state(device, desc, bview->general.state);
2529 }
2530 }
2531 }
2532
2533 void
anv_descriptor_set_write_inline_uniform_data(struct anv_device * device,struct anv_descriptor_set * set,uint32_t binding,const void * data,size_t offset,size_t size)2534 anv_descriptor_set_write_inline_uniform_data(struct anv_device *device,
2535 struct anv_descriptor_set *set,
2536 uint32_t binding,
2537 const void *data,
2538 size_t offset,
2539 size_t size)
2540 {
2541 const struct anv_descriptor_set_binding_layout *bind_layout =
2542 &set->layout->binding[binding];
2543
2544 assert(bind_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM);
2545
2546 void *desc_map = set->desc_surface_mem.map +
2547 bind_layout->descriptor_surface_offset;
2548
2549 memcpy(desc_map + offset, data, size);
2550 }
2551
2552 void
anv_descriptor_set_write_acceleration_structure(struct anv_device * device,struct anv_descriptor_set * set,struct vk_acceleration_structure * accel,uint32_t binding,uint32_t element)2553 anv_descriptor_set_write_acceleration_structure(struct anv_device *device,
2554 struct anv_descriptor_set *set,
2555 struct vk_acceleration_structure *accel,
2556 uint32_t binding,
2557 uint32_t element)
2558 {
2559 const struct anv_descriptor_set_binding_layout *bind_layout =
2560 &set->layout->binding[binding];
2561 struct anv_descriptor *desc =
2562 &set->descriptors[bind_layout->descriptor_index + element];
2563
2564 assert(bind_layout->data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE);
2565 *desc = (struct anv_descriptor) {
2566 .type = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
2567 .accel_struct = accel,
2568 };
2569
2570 struct anv_address_range_descriptor desc_data = { };
2571 if (accel != NULL) {
2572 desc_data.address = vk_acceleration_structure_get_va(accel);
2573 desc_data.range = accel->size;
2574 }
2575 assert(sizeof(desc_data) <= bind_layout->descriptor_surface_stride);
2576
2577 void *desc_map = set->desc_surface_mem.map +
2578 bind_layout->descriptor_surface_offset +
2579 element * bind_layout->descriptor_surface_stride;
2580 memcpy(desc_map, &desc_data, sizeof(desc_data));
2581 }
2582
2583 void
anv_descriptor_set_write(struct anv_device * device,struct anv_descriptor_set * set_override,uint32_t write_count,const VkWriteDescriptorSet * writes)2584 anv_descriptor_set_write(struct anv_device *device,
2585 struct anv_descriptor_set *set_override,
2586 uint32_t write_count,
2587 const VkWriteDescriptorSet *writes)
2588 {
2589 for (uint32_t i = 0; i < write_count; i++) {
2590 const VkWriteDescriptorSet *write = &writes[i];
2591 struct anv_descriptor_set *set = unlikely(set_override) ?
2592 set_override :
2593 anv_descriptor_set_from_handle(write->dstSet);
2594
2595 switch (write->descriptorType) {
2596 case VK_DESCRIPTOR_TYPE_SAMPLER:
2597 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2598 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2599 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2600 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2601 for (uint32_t j = 0; j < write->descriptorCount; j++) {
2602 anv_descriptor_set_write_image_view(device, set,
2603 write->pImageInfo + j,
2604 write->descriptorType,
2605 write->dstBinding,
2606 write->dstArrayElement + j);
2607 }
2608 break;
2609
2610 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2611 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2612 for (uint32_t j = 0; j < write->descriptorCount; j++) {
2613 ANV_FROM_HANDLE(anv_buffer_view, bview,
2614 write->pTexelBufferView[j]);
2615
2616 anv_descriptor_set_write_buffer_view(device, set,
2617 write->descriptorType,
2618 bview,
2619 write->dstBinding,
2620 write->dstArrayElement + j);
2621 }
2622 break;
2623
2624 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2625 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2626 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2627 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2628 for (uint32_t j = 0; j < write->descriptorCount; j++) {
2629 ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
2630
2631 anv_descriptor_set_write_buffer(device, set,
2632 write->descriptorType,
2633 buffer,
2634 write->dstBinding,
2635 write->dstArrayElement + j,
2636 write->pBufferInfo[j].offset,
2637 write->pBufferInfo[j].range);
2638 }
2639 break;
2640
2641 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
2642 const VkWriteDescriptorSetInlineUniformBlock *inline_write =
2643 vk_find_struct_const(write->pNext,
2644 WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
2645 assert(inline_write->dataSize == write->descriptorCount);
2646 anv_descriptor_set_write_inline_uniform_data(device, set,
2647 write->dstBinding,
2648 inline_write->pData,
2649 write->dstArrayElement,
2650 inline_write->dataSize);
2651 break;
2652 }
2653
2654 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
2655 const VkWriteDescriptorSetAccelerationStructureKHR *accel_write =
2656 vk_find_struct_const(write, WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR);
2657 assert(accel_write->accelerationStructureCount ==
2658 write->descriptorCount);
2659 for (uint32_t j = 0; j < write->descriptorCount; j++) {
2660 ANV_FROM_HANDLE(vk_acceleration_structure, accel,
2661 accel_write->pAccelerationStructures[j]);
2662 anv_descriptor_set_write_acceleration_structure(device, set, accel,
2663 write->dstBinding,
2664 write->dstArrayElement + j);
2665 }
2666 break;
2667 }
2668
2669 default:
2670 break;
2671 }
2672 }
2673 }
2674
anv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2675 void anv_UpdateDescriptorSets(
2676 VkDevice _device,
2677 uint32_t descriptorWriteCount,
2678 const VkWriteDescriptorSet* pDescriptorWrites,
2679 uint32_t descriptorCopyCount,
2680 const VkCopyDescriptorSet* pDescriptorCopies)
2681 {
2682 ANV_FROM_HANDLE(anv_device, device, _device);
2683
2684 anv_descriptor_set_write(device, NULL, descriptorWriteCount,
2685 pDescriptorWrites);
2686
2687 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
2688 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
2689 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
2690 ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
2691
2692 const struct anv_descriptor_set_binding_layout *src_layout =
2693 &src->layout->binding[copy->srcBinding];
2694 const struct anv_descriptor_set_binding_layout *dst_layout =
2695 &dst->layout->binding[copy->dstBinding];
2696
2697 if (src_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
2698 anv_descriptor_set_write_inline_uniform_data(device, dst,
2699 copy->dstBinding,
2700 src->desc_surface_mem.map +
2701 src_layout->descriptor_surface_offset + copy->srcArrayElement,
2702 copy->dstArrayElement,
2703 copy->descriptorCount);
2704 continue;
2705 }
2706
2707 uint32_t copy_surface_element_size =
2708 MIN2(src_layout->descriptor_surface_stride,
2709 dst_layout->descriptor_surface_stride);
2710 uint32_t copy_sampler_element_size =
2711 MIN2(src_layout->descriptor_sampler_stride,
2712 dst_layout->descriptor_sampler_stride);
2713 for (uint32_t j = 0; j < copy->descriptorCount; j++) {
2714 struct anv_descriptor *src_desc =
2715 &src->descriptors[src_layout->descriptor_index +
2716 copy->srcArrayElement + j];
2717 struct anv_descriptor *dst_desc =
2718 &dst->descriptors[dst_layout->descriptor_index +
2719 copy->dstArrayElement + j];
2720
2721 /* Copy the memory containing one of the following structure read by
2722 * the shaders :
2723 * - anv_sampled_image_descriptor
2724 * - anv_storage_image_descriptor
2725 * - anv_address_range_descriptor
2726 * - RENDER_SURFACE_STATE
2727 * - SAMPLER_STATE
2728 */
2729 if (copy_surface_element_size > 0) {
2730 assert(dst->desc_surface_mem.map != NULL);
2731 assert(src->desc_surface_mem.map != NULL);
2732 memcpy(dst->desc_surface_mem.map +
2733 dst_layout->descriptor_surface_offset +
2734 (copy->dstArrayElement + j) * dst_layout->descriptor_surface_stride,
2735 src->desc_surface_mem.map +
2736 src_layout->descriptor_surface_offset +
2737 (copy->srcArrayElement + j) * src_layout->descriptor_surface_stride,
2738 copy_surface_element_size);
2739 }
2740
2741 if (copy_sampler_element_size) {
2742 assert(dst->desc_sampler_mem.map != NULL);
2743 assert(src->desc_sampler_mem.map != NULL);
2744 memcpy(dst->desc_sampler_mem.map +
2745 dst_layout->descriptor_sampler_offset +
2746 (copy->dstArrayElement + j) * dst_layout->descriptor_sampler_stride,
2747 src->desc_sampler_mem.map +
2748 src_layout->descriptor_sampler_offset +
2749 (copy->srcArrayElement + j) * src_layout->descriptor_sampler_stride,
2750 copy_sampler_element_size);
2751 }
2752
2753 /* Copy the CPU side data anv_descriptor */
2754 *dst_desc = *src_desc;
2755
2756 /* If the CPU side may contain a buffer view, we need to copy that as
2757 * well
2758 */
2759 const enum anv_descriptor_data data =
2760 src_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2761 anv_descriptor_data_for_type(device->physical,
2762 src->layout->type,
2763 src->layout->flags,
2764 src_desc->type) :
2765 src_layout->data;
2766 if (data & ANV_DESCRIPTOR_BUFFER_VIEW) {
2767 struct anv_buffer_view *src_bview =
2768 &src->buffer_views[src_layout->buffer_view_index +
2769 copy->srcArrayElement + j];
2770 struct anv_buffer_view *dst_bview =
2771 &dst->buffer_views[dst_layout->buffer_view_index +
2772 copy->dstArrayElement + j];
2773
2774 dst_desc->set_buffer_view = dst_bview;
2775
2776 dst_bview->vk.range = src_bview->vk.range;
2777 dst_bview->address = src_bview->address;
2778
2779 memcpy(dst_bview->general.state.map,
2780 src_bview->general.state.map,
2781 ANV_SURFACE_STATE_SIZE);
2782 }
2783 }
2784 }
2785 }
2786
2787 /*
2788 * Descriptor update templates.
2789 */
2790
2791 void
anv_descriptor_set_write_template(struct anv_device * device,struct anv_descriptor_set * set,const struct vk_descriptor_update_template * template,const void * data)2792 anv_descriptor_set_write_template(struct anv_device *device,
2793 struct anv_descriptor_set *set,
2794 const struct vk_descriptor_update_template *template,
2795 const void *data)
2796 {
2797 for (uint32_t i = 0; i < template->entry_count; i++) {
2798 const struct vk_descriptor_template_entry *entry =
2799 &template->entries[i];
2800
2801 switch (entry->type) {
2802 case VK_DESCRIPTOR_TYPE_SAMPLER:
2803 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2804 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2805 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2806 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2807 for (uint32_t j = 0; j < entry->array_count; j++) {
2808 const VkDescriptorImageInfo *info =
2809 data + entry->offset + j * entry->stride;
2810 anv_descriptor_set_write_image_view(device, set,
2811 info, entry->type,
2812 entry->binding,
2813 entry->array_element + j);
2814 }
2815 break;
2816
2817 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2818 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2819 for (uint32_t j = 0; j < entry->array_count; j++) {
2820 const VkBufferView *_bview =
2821 data + entry->offset + j * entry->stride;
2822 ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
2823
2824 anv_descriptor_set_write_buffer_view(device, set,
2825 entry->type,
2826 bview,
2827 entry->binding,
2828 entry->array_element + j);
2829 }
2830 break;
2831
2832 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2833 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2834 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2835 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2836 for (uint32_t j = 0; j < entry->array_count; j++) {
2837 const VkDescriptorBufferInfo *info =
2838 data + entry->offset + j * entry->stride;
2839 ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
2840
2841 anv_descriptor_set_write_buffer(device, set,
2842 entry->type,
2843 buffer,
2844 entry->binding,
2845 entry->array_element + j,
2846 info->offset, info->range);
2847 }
2848 break;
2849
2850 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
2851 anv_descriptor_set_write_inline_uniform_data(device, set,
2852 entry->binding,
2853 data + entry->offset,
2854 entry->array_element,
2855 entry->array_count);
2856 break;
2857
2858 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2859 for (uint32_t j = 0; j < entry->array_count; j++) {
2860 VkAccelerationStructureKHR *accel_obj =
2861 (VkAccelerationStructureKHR *)(data + entry->offset + j * entry->stride);
2862 ANV_FROM_HANDLE(vk_acceleration_structure, accel, *accel_obj);
2863
2864 anv_descriptor_set_write_acceleration_structure(device, set,
2865 accel,
2866 entry->binding,
2867 entry->array_element + j);
2868 }
2869 break;
2870
2871 default:
2872 break;
2873 }
2874 }
2875 }
2876
anv_UpdateDescriptorSetWithTemplate(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)2877 void anv_UpdateDescriptorSetWithTemplate(
2878 VkDevice _device,
2879 VkDescriptorSet descriptorSet,
2880 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
2881 const void* pData)
2882 {
2883 ANV_FROM_HANDLE(anv_device, device, _device);
2884 ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
2885 VK_FROM_HANDLE(vk_descriptor_update_template, template,
2886 descriptorUpdateTemplate);
2887
2888 anv_descriptor_set_write_template(device, set, template, pData);
2889 }
2890
anv_GetDescriptorSetLayoutSizeEXT(VkDevice device,VkDescriptorSetLayout layout,VkDeviceSize * pLayoutSizeInBytes)2891 void anv_GetDescriptorSetLayoutSizeEXT(
2892 VkDevice device,
2893 VkDescriptorSetLayout layout,
2894 VkDeviceSize* pLayoutSizeInBytes)
2895 {
2896 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, layout);
2897
2898 *pLayoutSizeInBytes = set_layout->descriptor_buffer_surface_size;
2899 }
2900
anv_GetDescriptorSetLayoutBindingOffsetEXT(VkDevice device,VkDescriptorSetLayout layout,uint32_t binding,VkDeviceSize * pOffset)2901 void anv_GetDescriptorSetLayoutBindingOffsetEXT(
2902 VkDevice device,
2903 VkDescriptorSetLayout layout,
2904 uint32_t binding,
2905 VkDeviceSize* pOffset)
2906 {
2907 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, layout);
2908 assert(binding < set_layout->binding_count);
2909 const struct anv_descriptor_set_binding_layout *bind_layout =
2910 &set_layout->binding[binding];
2911
2912 *pOffset = bind_layout->descriptor_surface_offset;
2913 }
2914
2915 static bool
address_info_is_null(const VkDescriptorAddressInfoEXT * addr_info)2916 address_info_is_null(const VkDescriptorAddressInfoEXT *addr_info)
2917 {
2918 return addr_info == NULL || addr_info->address == 0 || addr_info->range == 0;
2919 }
2920
anv_GetDescriptorEXT(VkDevice _device,const VkDescriptorGetInfoEXT * pDescriptorInfo,size_t dataSize,void * pDescriptor)2921 void anv_GetDescriptorEXT(
2922 VkDevice _device,
2923 const VkDescriptorGetInfoEXT* pDescriptorInfo,
2924 size_t dataSize,
2925 void* pDescriptor)
2926 {
2927 ANV_FROM_HANDLE(anv_device, device, _device);
2928 struct anv_sampler *sampler;
2929 struct anv_image_view *image_view;
2930
2931 switch (pDescriptorInfo->type) {
2932 case VK_DESCRIPTOR_TYPE_SAMPLER:
2933 if (pDescriptorInfo->data.pSampler &&
2934 (sampler = anv_sampler_from_handle(*pDescriptorInfo->data.pSampler))) {
2935 memcpy(pDescriptor, sampler->state[0], ANV_SAMPLER_STATE_SIZE);
2936 } else {
2937 memset(pDescriptor, 0, ANV_SAMPLER_STATE_SIZE);
2938 }
2939 break;
2940
2941 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2942 for (uint32_t i = 0; i < dataSize / (2 * ANV_SURFACE_STATE_SIZE); i++) {
2943 uint32_t desc_offset = i * 2 * ANV_SURFACE_STATE_SIZE;
2944
2945 if (pDescriptorInfo->data.pCombinedImageSampler &&
2946 (image_view = anv_image_view_from_handle(
2947 pDescriptorInfo->data.pCombinedImageSampler->imageView))) {
2948 const VkImageLayout layout =
2949 pDescriptorInfo->data.pCombinedImageSampler->imageLayout;
2950 memcpy(pDescriptor + desc_offset,
2951 anv_image_view_surface_data_for_plane_layout(image_view,
2952 pDescriptorInfo->type,
2953 i,
2954 layout),
2955 ANV_SURFACE_STATE_SIZE);
2956 } else {
2957 memcpy(pDescriptor + desc_offset,
2958 device->host_null_surface_state,
2959 ANV_SURFACE_STATE_SIZE);
2960 }
2961
2962 if (pDescriptorInfo->data.pCombinedImageSampler &&
2963 (sampler = anv_sampler_from_handle(
2964 pDescriptorInfo->data.pCombinedImageSampler->sampler))) {
2965 memcpy(pDescriptor + desc_offset + ANV_SURFACE_STATE_SIZE,
2966 sampler->state[i], ANV_SAMPLER_STATE_SIZE);
2967 } else {
2968 memset(pDescriptor + desc_offset + ANV_SURFACE_STATE_SIZE,
2969 0, ANV_SAMPLER_STATE_SIZE);
2970 }
2971 }
2972 break;
2973
2974 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2975 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2976 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2977 if (pDescriptorInfo->data.pSampledImage &&
2978 (image_view = anv_image_view_from_handle(
2979 pDescriptorInfo->data.pSampledImage->imageView))) {
2980 const VkImageLayout layout =
2981 pDescriptorInfo->data.pSampledImage->imageLayout;
2982
2983 memcpy(pDescriptor,
2984 anv_image_view_surface_data_for_plane_layout(image_view,
2985 pDescriptorInfo->type,
2986 0,
2987 layout),
2988 ANV_SURFACE_STATE_SIZE);
2989 } else {
2990 memcpy(pDescriptor, device->host_null_surface_state,
2991 ANV_SURFACE_STATE_SIZE);
2992 }
2993 break;
2994
2995 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
2996 const VkDescriptorAddressInfoEXT *addr_info =
2997 pDescriptorInfo->data.pUniformTexelBuffer;
2998
2999 if (!address_info_is_null(addr_info)) {
3000 struct anv_format_plane format =
3001 anv_get_format_plane(device->info,
3002 addr_info->format,
3003 0, VK_IMAGE_TILING_LINEAR);
3004 const uint32_t format_bs =
3005 isl_format_get_layout(format.isl_format)->bpb / 8;
3006
3007 anv_fill_buffer_surface_state(device, pDescriptor,
3008 format.isl_format, format.swizzle,
3009 ISL_SURF_USAGE_TEXTURE_BIT,
3010 anv_address_from_u64(addr_info->address),
3011 align_down_npot_u32(addr_info->range, format_bs),
3012 format_bs);
3013 } else {
3014 memcpy(pDescriptor, device->host_null_surface_state,
3015 ANV_SURFACE_STATE_SIZE);
3016 }
3017 break;
3018 }
3019
3020 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
3021 const VkDescriptorAddressInfoEXT *addr_info =
3022 pDescriptorInfo->data.pStorageTexelBuffer;
3023
3024 if (!address_info_is_null(addr_info)) {
3025 struct anv_format_plane format =
3026 anv_get_format_plane(device->info,
3027 addr_info->format,
3028 0, VK_IMAGE_TILING_LINEAR);
3029 const uint32_t format_bs =
3030 isl_format_get_layout(format.isl_format)->bpb / 8;
3031
3032 anv_fill_buffer_surface_state(device, pDescriptor,
3033 format.isl_format, format.swizzle,
3034 ISL_SURF_USAGE_STORAGE_BIT,
3035 anv_address_from_u64(addr_info->address),
3036 align_down_npot_u32(addr_info->range, format_bs),
3037 format_bs);
3038 } else {
3039 memcpy(pDescriptor, device->host_null_surface_state,
3040 ANV_SURFACE_STATE_SIZE);
3041 }
3042 break;
3043 }
3044
3045 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
3046 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
3047 const VkDescriptorAddressInfoEXT *addr_info =
3048 pDescriptorInfo->data.pStorageBuffer;
3049
3050 if (!address_info_is_null(addr_info)) {
3051 VkDeviceSize range = addr_info->range;
3052
3053 /* We report a bounds checking alignment of 32B for the sake of block
3054 * messages which read an entire register worth at a time.
3055 */
3056 if (pDescriptorInfo->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
3057 range = align64(range, ANV_UBO_ALIGNMENT);
3058
3059 isl_surf_usage_flags_t usage =
3060 pDescriptorInfo->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ?
3061 ISL_SURF_USAGE_CONSTANT_BUFFER_BIT :
3062 ISL_SURF_USAGE_STORAGE_BIT;
3063
3064 enum isl_format format =
3065 anv_isl_format_for_descriptor_type(device, pDescriptorInfo->type);
3066
3067 isl_buffer_fill_state(&device->isl_dev, pDescriptor,
3068 .address = addr_info->address,
3069 .mocs = isl_mocs(&device->isl_dev, usage, false),
3070 .size_B = range,
3071 .format = format,
3072 .swizzle = ISL_SWIZZLE_IDENTITY,
3073 .stride_B = 1);
3074 } else {
3075 memcpy(pDescriptor, device->host_null_surface_state,
3076 ANV_SURFACE_STATE_SIZE);
3077 }
3078 break;
3079 }
3080
3081 case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
3082 struct anv_address_range_descriptor desc_data = {
3083 .address = pDescriptorInfo->data.accelerationStructure,
3084 .range = 0,
3085 };
3086
3087 memcpy(pDescriptor, &desc_data, sizeof(desc_data));
3088 break;
3089 }
3090
3091 default:
3092 unreachable("Invalid descriptor type");
3093 }
3094 }
3095