• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29 
30 #include "util/mesa-sha1.h"
31 #include "vk_util.h"
32 
33 #include "anv_private.h"
34 
35 /*
36  * Descriptor set layouts.
37  */
38 
39 static void
anv_descriptor_data_alignment(enum anv_descriptor_data data,enum anv_descriptor_set_layout_type layout_type,unsigned * out_surface_align,unsigned * out_sampler_align)40 anv_descriptor_data_alignment(enum anv_descriptor_data data,
41                               enum anv_descriptor_set_layout_type layout_type,
42                               unsigned *out_surface_align,
43                               unsigned *out_sampler_align)
44 {
45    unsigned surface_align = 1, sampler_align = 1;
46 
47    if (data & (ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE |
48                ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE |
49                ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE))
50       surface_align = MAX2(surface_align, 8);
51 
52    if (data & ANV_DESCRIPTOR_SURFACE)
53       surface_align = MAX2(surface_align, ANV_SURFACE_STATE_SIZE);
54 
55    if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
56       surface_align = MAX2(surface_align, ANV_SURFACE_STATE_SIZE);
57       if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT)
58          sampler_align = MAX2(sampler_align, ANV_SAMPLER_STATE_SIZE);
59    }
60 
61    if (data & ANV_DESCRIPTOR_SAMPLER) {
62       if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT)
63          sampler_align = MAX2(sampler_align, ANV_SAMPLER_STATE_SIZE);
64       else
65          surface_align = MAX2(surface_align, ANV_SAMPLER_STATE_SIZE);
66    }
67 
68    if (data & ANV_DESCRIPTOR_INLINE_UNIFORM)
69       surface_align = MAX2(surface_align, ANV_UBO_ALIGNMENT);
70 
71    *out_surface_align = surface_align;
72    *out_sampler_align = sampler_align;
73 }
74 
75 static enum anv_descriptor_data
anv_indirect_descriptor_data_for_type(VkDescriptorType type)76 anv_indirect_descriptor_data_for_type(VkDescriptorType type)
77 {
78    enum anv_descriptor_data data = 0;
79 
80    switch (type) {
81    case VK_DESCRIPTOR_TYPE_SAMPLER:
82       data = ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
83              ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
84       break;
85 
86    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
87       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
88              ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
89              ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
90       break;
91 
92    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
93    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
94    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
95       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
96              ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
97       break;
98 
99    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
100    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
101       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
102              ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE;
103       break;
104 
105    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
106    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
107       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
108              ANV_DESCRIPTOR_BUFFER_VIEW;
109       break;
110 
111    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
112    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
113       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE;
114       break;
115 
116    case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
117       data = ANV_DESCRIPTOR_INLINE_UNIFORM;
118       break;
119 
120    case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
121       data = ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
122       break;
123 
124    default:
125       unreachable("Unsupported descriptor type");
126    }
127 
128    /* We also need to push SSBO address ranges so that we can use A64
129     * messages in the shader.
130     */
131    if (type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
132        type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
133        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
134        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
135       data |= ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
136 
137    return data;
138 }
139 
140 static enum anv_descriptor_data
anv_direct_descriptor_data_for_type(enum anv_descriptor_set_layout_type layout_type,VkDescriptorType type)141 anv_direct_descriptor_data_for_type(enum anv_descriptor_set_layout_type layout_type,
142                                     VkDescriptorType type)
143 {
144    enum anv_descriptor_data data = 0;
145 
146    switch (type) {
147    case VK_DESCRIPTOR_TYPE_SAMPLER:
148       data = ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
149              ANV_DESCRIPTOR_SAMPLER;
150       break;
151 
152    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
153       if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT) {
154          data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
155                 ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
156                 ANV_DESCRIPTOR_SURFACE |
157                 ANV_DESCRIPTOR_SAMPLER;
158       } else {
159          data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
160                 ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
161                 ANV_DESCRIPTOR_SURFACE_SAMPLER;
162       }
163       break;
164 
165    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
166    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
167    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
168    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
169    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
170    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
171    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
172    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
173    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
174       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
175              ANV_DESCRIPTOR_SURFACE;
176       break;
177 
178    case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
179       data = ANV_DESCRIPTOR_INLINE_UNIFORM;
180       break;
181 
182    case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
183       data = ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
184       break;
185 
186    default:
187       unreachable("Unsupported descriptor type");
188    }
189 
190    return data;
191 }
192 
193 static enum anv_descriptor_data
anv_descriptor_data_for_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,VkDescriptorType type)194 anv_descriptor_data_for_type(const struct anv_physical_device *device,
195                              enum anv_descriptor_set_layout_type layout_type,
196                              VkDescriptorType type)
197 {
198    if (device->indirect_descriptors)
199       return anv_indirect_descriptor_data_for_type(type);
200    else
201       return anv_direct_descriptor_data_for_type(layout_type, type);
202 }
203 
204 static enum anv_descriptor_data
anv_descriptor_data_for_mutable_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,int binding)205 anv_descriptor_data_for_mutable_type(const struct anv_physical_device *device,
206                                      enum anv_descriptor_set_layout_type layout_type,
207                                      const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
208                                      int binding)
209 {
210    enum anv_descriptor_data desc_data = 0;
211 
212    if (!mutable_info || mutable_info->mutableDescriptorTypeListCount == 0) {
213       for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
214          if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
215              i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
216              i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
217             continue;
218 
219          desc_data |= anv_descriptor_data_for_type(device, layout_type, i);
220       }
221 
222       desc_data |= anv_descriptor_data_for_type(
223          device, layout_type, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
224 
225       return desc_data;
226    }
227 
228    const VkMutableDescriptorTypeListEXT *type_list =
229       &mutable_info->pMutableDescriptorTypeLists[binding];
230    for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
231       desc_data |=
232          anv_descriptor_data_for_type(device, layout_type,
233                                       type_list->pDescriptorTypes[i]);
234    }
235 
236    return desc_data;
237 }
238 
239 static void
anv_descriptor_data_size(enum anv_descriptor_data data,enum anv_descriptor_set_layout_type layout_type,uint16_t * out_surface_size,uint16_t * out_sampler_size)240 anv_descriptor_data_size(enum anv_descriptor_data data,
241                          enum anv_descriptor_set_layout_type layout_type,
242                          uint16_t *out_surface_size,
243                          uint16_t *out_sampler_size)
244 {
245    unsigned surface_size = 0;
246    unsigned sampler_size = 0;
247 
248    if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE)
249       surface_size += sizeof(struct anv_sampled_image_descriptor);
250 
251    if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE)
252       surface_size += sizeof(struct anv_storage_image_descriptor);
253 
254    if (data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE)
255       surface_size += sizeof(struct anv_address_range_descriptor);
256 
257    if (data & ANV_DESCRIPTOR_SURFACE)
258       surface_size += ANV_SURFACE_STATE_SIZE;
259 
260    /* Direct descriptors have sampler states stored separately */
261    if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT) {
262       if (data & ANV_DESCRIPTOR_SAMPLER)
263          sampler_size += ANV_SAMPLER_STATE_SIZE;
264 
265       if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
266          surface_size += ANV_SURFACE_STATE_SIZE;
267          sampler_size += ANV_SAMPLER_STATE_SIZE;
268       }
269    } else {
270       if (data & ANV_DESCRIPTOR_SAMPLER)
271          surface_size += ANV_SAMPLER_STATE_SIZE;
272 
273       if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
274          surface_size += ALIGN(ANV_SURFACE_STATE_SIZE + ANV_SAMPLER_STATE_SIZE,
275                                ANV_SURFACE_STATE_SIZE);
276       }
277    }
278 
279    *out_surface_size = surface_size;
280    *out_sampler_size = sampler_size;
281 }
282 
283 static bool
anv_needs_descriptor_buffer(VkDescriptorType desc_type,enum anv_descriptor_set_layout_type layout_type,enum anv_descriptor_data desc_data)284 anv_needs_descriptor_buffer(VkDescriptorType desc_type,
285                             enum anv_descriptor_set_layout_type layout_type,
286                             enum anv_descriptor_data desc_data)
287 {
288    if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
289       return true;
290 
291    uint16_t surface_size, sampler_size;
292    anv_descriptor_data_size(desc_data, layout_type,
293                             &surface_size, &sampler_size);
294    return surface_size > 0 || sampler_size > 0;
295 }
296 
297 /** Returns the size in bytes of each descriptor with the given layout */
298 static void
anv_descriptor_size(const struct anv_descriptor_set_binding_layout * layout,enum anv_descriptor_set_layout_type layout_type,uint16_t * out_surface_stride,uint16_t * out_sampler_stride)299 anv_descriptor_size(const struct anv_descriptor_set_binding_layout *layout,
300                     enum anv_descriptor_set_layout_type layout_type,
301                     uint16_t *out_surface_stride,
302                     uint16_t *out_sampler_stride)
303 {
304    if (layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
305       assert(layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
306       assert(layout->array_size <= UINT16_MAX);
307       *out_surface_stride = layout->array_size;
308       *out_sampler_stride = 0;
309       return;
310    }
311 
312    anv_descriptor_data_size(layout->data, layout_type,
313                             out_surface_stride,
314                             out_sampler_stride);
315 }
316 
317 /** Returns size in bytes of the biggest descriptor in the given layout */
318 static void
anv_descriptor_size_for_mutable_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,int binding,uint16_t * out_surface_stride,uint16_t * out_sampler_stride)319 anv_descriptor_size_for_mutable_type(const struct anv_physical_device *device,
320                                      enum anv_descriptor_set_layout_type layout_type,
321                                      const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
322                                      int binding,
323                                      uint16_t *out_surface_stride,
324                                      uint16_t *out_sampler_stride)
325 {
326    *out_surface_stride = 0;
327    *out_sampler_stride = 0;
328 
329    if (!mutable_info ||
330        mutable_info->mutableDescriptorTypeListCount == 0 ||
331        binding >= mutable_info->mutableDescriptorTypeListCount) {
332       for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
333 
334          if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
335              i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
336              i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
337             continue;
338 
339          enum anv_descriptor_data desc_data =
340             anv_descriptor_data_for_type(device, layout_type, i);
341          uint16_t surface_stride, sampler_stride;
342          anv_descriptor_data_size(desc_data, layout_type,
343                                   &surface_stride, &sampler_stride);
344 
345          *out_surface_stride = MAX2(*out_surface_stride, surface_stride);
346          *out_sampler_stride = MAX2(*out_sampler_stride, sampler_stride);
347       }
348 
349       enum anv_descriptor_data desc_data = anv_descriptor_data_for_type(
350          device, layout_type, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
351       uint16_t surface_stride, sampler_stride;
352       anv_descriptor_data_size(desc_data, layout_type,
353                                &surface_stride, &sampler_stride);
354 
355       *out_surface_stride = MAX2(*out_surface_stride, surface_stride);
356       *out_sampler_stride = MAX2(*out_sampler_stride, sampler_stride);
357 
358       return;
359    }
360 
361    const VkMutableDescriptorTypeListEXT *type_list =
362       &mutable_info->pMutableDescriptorTypeLists[binding];
363    for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
364       enum anv_descriptor_data desc_data =
365          anv_descriptor_data_for_type(device, layout_type,
366                                       type_list->pDescriptorTypes[i]);
367 
368       uint16_t surface_stride, sampler_stride;
369       anv_descriptor_data_size(desc_data, layout_type,
370                                &surface_stride, &sampler_stride);
371 
372       *out_surface_stride = MAX2(*out_surface_stride, surface_stride);
373       *out_sampler_stride = MAX2(*out_sampler_stride, sampler_stride);
374    }
375 }
376 
377 static bool
anv_descriptor_data_supports_bindless(const struct anv_physical_device * pdevice,enum anv_descriptor_data data,bool sampler)378 anv_descriptor_data_supports_bindless(const struct anv_physical_device *pdevice,
379                                       enum anv_descriptor_data data,
380                                       bool sampler)
381 {
382    if (pdevice->indirect_descriptors) {
383       return data & (ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE |
384                      ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE |
385                      ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE);
386    }
387 
388    /* Directly descriptor support bindless for everything */
389    return true;
390 }
391 
392 bool
anv_descriptor_supports_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_binding_layout * binding,bool sampler)393 anv_descriptor_supports_bindless(const struct anv_physical_device *pdevice,
394                                  const struct anv_descriptor_set_binding_layout *binding,
395                                  bool sampler)
396 {
397    return anv_descriptor_data_supports_bindless(pdevice, binding->data,
398                                                 sampler);
399 }
400 
401 bool
anv_descriptor_requires_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_binding_layout * binding,bool sampler)402 anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,
403                                  const struct anv_descriptor_set_binding_layout *binding,
404                                  bool sampler)
405 {
406    if (pdevice->always_use_bindless)
407       return anv_descriptor_supports_bindless(pdevice, binding, sampler);
408 
409    static const VkDescriptorBindingFlagBits flags_requiring_bindless =
410       VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
411       VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
412       VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
413 
414    return (binding->flags & flags_requiring_bindless) != 0;
415 }
416 
417 static enum anv_descriptor_set_layout_type
anv_descriptor_set_layout_type_for_flags(const struct anv_physical_device * device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo)418 anv_descriptor_set_layout_type_for_flags(const struct anv_physical_device *device,
419                                          const VkDescriptorSetLayoutCreateInfo *pCreateInfo)
420 {
421    if (pCreateInfo->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT)
422       return ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER;
423    else if (device->indirect_descriptors)
424       return ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_INDIRECT;
425    else
426       return ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT;
427 }
428 
anv_GetDescriptorSetLayoutSupport(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)429 void anv_GetDescriptorSetLayoutSupport(
430     VkDevice                                    _device,
431     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
432     VkDescriptorSetLayoutSupport*               pSupport)
433 {
434    ANV_FROM_HANDLE(anv_device, device, _device);
435    const struct anv_physical_device *pdevice = device->physical;
436 
437    uint32_t surface_count[MESA_VULKAN_SHADER_STAGES] = { 0, };
438    VkDescriptorType varying_desc_type = VK_DESCRIPTOR_TYPE_MAX_ENUM;
439    bool needs_descriptor_buffer = false;
440 
441    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
442       vk_find_struct_const(pCreateInfo->pNext,
443                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
444    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
445       vk_find_struct_const(pCreateInfo->pNext,
446                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
447 
448    enum anv_descriptor_set_layout_type layout_type =
449       anv_descriptor_set_layout_type_for_flags(pdevice, pCreateInfo);
450 
451    for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
452       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
453 
454       VkDescriptorBindingFlags flags = 0;
455       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
456          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
457          flags = binding_flags_info->pBindingFlags[b];
458       }
459 
460       enum anv_descriptor_data desc_data =
461          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
462          anv_descriptor_data_for_mutable_type(pdevice, layout_type, mutable_info, b) :
463          anv_descriptor_data_for_type(pdevice, layout_type, binding->descriptorType);
464 
465       if (anv_needs_descriptor_buffer(binding->descriptorType,
466                                       layout_type, desc_data))
467          needs_descriptor_buffer = true;
468 
469       if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
470          varying_desc_type = binding->descriptorType;
471 
472       switch (binding->descriptorType) {
473       case VK_DESCRIPTOR_TYPE_SAMPLER:
474          /* There is no real limit on samplers */
475          break;
476 
477       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
478          /* Inline uniforms don't use a binding */
479          break;
480 
481       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
482          if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
483             break;
484 
485          if (binding->pImmutableSamplers) {
486             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
487                ANV_FROM_HANDLE(anv_sampler, sampler,
488                                binding->pImmutableSamplers[i]);
489                anv_foreach_stage(s, binding->stageFlags)
490                   surface_count[s] += sampler->n_planes;
491             }
492          } else {
493             anv_foreach_stage(s, binding->stageFlags)
494                surface_count[s] += binding->descriptorCount;
495          }
496          break;
497 
498       default:
499          if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
500             break;
501 
502          anv_foreach_stage(s, binding->stageFlags)
503             surface_count[s] += binding->descriptorCount;
504          break;
505       }
506    }
507 
508    for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
509       if (needs_descriptor_buffer)
510          surface_count[s] += 1;
511    }
512 
513    VkDescriptorSetVariableDescriptorCountLayoutSupport *vdcls =
514       vk_find_struct(pSupport->pNext,
515                      DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
516    if (vdcls != NULL) {
517       if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
518          vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE;
519       } else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) {
520          vdcls->maxVariableDescriptorCount = UINT16_MAX;
521       } else {
522          vdcls->maxVariableDescriptorCount = 0;
523       }
524    }
525 
526    bool supported = true;
527    for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
528       /* Our maximum binding table size is 240 and we need to reserve 8 for
529        * render targets.
530        */
531       if (surface_count[s] > MAX_BINDING_TABLE_SIZE - MAX_RTS)
532          supported = false;
533    }
534 
535    pSupport->supported = supported;
536 }
537 
anv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)538 VkResult anv_CreateDescriptorSetLayout(
539     VkDevice                                    _device,
540     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
541     const VkAllocationCallbacks*                pAllocator,
542     VkDescriptorSetLayout*                      pSetLayout)
543 {
544    ANV_FROM_HANDLE(anv_device, device, _device);
545 
546    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
547 
548    uint32_t num_bindings = 0;
549    uint32_t immutable_sampler_count = 0;
550    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
551       num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
552 
553       /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
554        *
555        *    "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
556        *    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
557        *    pImmutableSamplers can be used to initialize a set of immutable
558        *    samplers. [...]  If descriptorType is not one of these descriptor
559        *    types, then pImmutableSamplers is ignored.
560        *
561        * We need to be careful here and only parse pImmutableSamplers if we
562        * have one of the right descriptor types.
563        */
564       VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
565       if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
566            desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
567           pCreateInfo->pBindings[j].pImmutableSamplers)
568          immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
569    }
570 
571    /* We need to allocate descriptor set layouts off the device allocator
572     * with DEVICE scope because they are reference counted and may not be
573     * destroyed when vkDestroyDescriptorSetLayout is called.
574     */
575    VK_MULTIALLOC(ma);
576    VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_layout, set_layout, 1);
577    VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_binding_layout,
578                            bindings, num_bindings);
579    VK_MULTIALLOC_DECL(&ma, struct anv_sampler *, samplers,
580                            immutable_sampler_count);
581 
582    if (!vk_object_multizalloc(&device->vk, &ma, NULL,
583                               VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT))
584       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
585 
586    set_layout->ref_cnt = 1;
587    set_layout->binding_count = num_bindings;
588    set_layout->flags = pCreateInfo->flags;
589    set_layout->type = anv_descriptor_set_layout_type_for_flags(device->physical,
590                                                                pCreateInfo);
591 
592    for (uint32_t b = 0; b < num_bindings; b++) {
593       /* Initialize all binding_layout entries to -1 */
594       memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
595 
596       set_layout->binding[b].flags = 0;
597       set_layout->binding[b].data = 0;
598       set_layout->binding[b].max_plane_count = 0;
599       set_layout->binding[b].array_size = 0;
600       set_layout->binding[b].immutable_samplers = NULL;
601    }
602 
603    /* Initialize all samplers to 0 */
604    memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
605 
606    uint32_t buffer_view_count = 0;
607    uint32_t dynamic_offset_count = 0;
608    uint32_t descriptor_buffer_surface_size = 0;
609    uint32_t descriptor_buffer_sampler_size = 0;
610 
611    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
612       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
613       uint32_t b = binding->binding;
614       /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
615        * immutable_samplers pointer.  This provides us with a quick-and-dirty
616        * way to sort the bindings by binding number.
617        */
618       set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
619    }
620 
621    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
622       vk_find_struct_const(pCreateInfo->pNext,
623                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
624 
625    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
626       vk_find_struct_const(pCreateInfo->pNext,
627                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
628 
629    for (uint32_t b = 0; b < num_bindings; b++) {
630       /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
631        * immutable_samplers pointer.  Check for NULL (empty binding) and then
632        * reset it and compute the index.
633        */
634       if (set_layout->binding[b].immutable_samplers == NULL)
635          continue;
636       const uint32_t info_idx =
637          (uintptr_t)(void *)set_layout->binding[b].immutable_samplers - 1;
638       set_layout->binding[b].immutable_samplers = NULL;
639 
640       const VkDescriptorSetLayoutBinding *binding =
641          &pCreateInfo->pBindings[info_idx];
642 
643       if (binding->descriptorCount == 0)
644          continue;
645 
646       set_layout->binding[b].type = binding->descriptorType;
647 
648       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
649          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
650          set_layout->binding[b].flags =
651             binding_flags_info->pBindingFlags[info_idx];
652 
653          /* From the Vulkan spec:
654           *
655           *    "If VkDescriptorSetLayoutCreateInfo::flags includes
656           *    VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then
657           *    all elements of pBindingFlags must not include
658           *    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
659           *    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or
660           *    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"
661           */
662          if (pCreateInfo->flags &
663              VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
664             assert(!(set_layout->binding[b].flags &
665                (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
666                 VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
667                 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)));
668          }
669       }
670 
671       set_layout->binding[b].data =
672          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
673          anv_descriptor_data_for_mutable_type(device->physical,
674                                               set_layout->type,
675                                               mutable_info, b) :
676          anv_descriptor_data_for_type(device->physical, set_layout->type,
677                                       binding->descriptorType);
678 
679       set_layout->binding[b].array_size = binding->descriptorCount;
680       set_layout->binding[b].descriptor_index = set_layout->descriptor_count;
681       set_layout->descriptor_count += binding->descriptorCount;
682 
683       if (set_layout->binding[b].data & ANV_DESCRIPTOR_BUFFER_VIEW) {
684          set_layout->binding[b].buffer_view_index = buffer_view_count;
685          buffer_view_count += binding->descriptorCount;
686       }
687 
688       switch (binding->descriptorType) {
689       case VK_DESCRIPTOR_TYPE_SAMPLER:
690       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
691       case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
692          set_layout->binding[b].max_plane_count = 1;
693          if (binding->pImmutableSamplers) {
694             set_layout->binding[b].immutable_samplers = samplers;
695             samplers += binding->descriptorCount;
696 
697             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
698                ANV_FROM_HANDLE(anv_sampler, sampler,
699                                binding->pImmutableSamplers[i]);
700 
701                set_layout->binding[b].immutable_samplers[i] = sampler;
702                if (set_layout->binding[b].max_plane_count < sampler->n_planes)
703                   set_layout->binding[b].max_plane_count = sampler->n_planes;
704             }
705          }
706          break;
707 
708       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
709          set_layout->binding[b].max_plane_count = 1;
710          break;
711 
712       default:
713          break;
714       }
715 
716       switch (binding->descriptorType) {
717       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
718       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
719          set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
720          set_layout->dynamic_offset_stages[dynamic_offset_count] = binding->stageFlags;
721          dynamic_offset_count += binding->descriptorCount;
722          assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
723          break;
724 
725       default:
726          break;
727       }
728 
729       if (binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
730          anv_descriptor_size_for_mutable_type(
731             device->physical, set_layout->type, mutable_info, b,
732             &set_layout->binding[b].descriptor_data_surface_size,
733             &set_layout->binding[b].descriptor_data_sampler_size);
734       } else {
735          anv_descriptor_size(&set_layout->binding[b],
736                              set_layout->type,
737                              &set_layout->binding[b].descriptor_data_surface_size,
738                              &set_layout->binding[b].descriptor_data_sampler_size);
739       }
740 
741       /* For multi-planar bindings, we make every descriptor consume the maximum
742        * number of planes so we don't have to bother with walking arrays and
743        * adding things up every time.  Fortunately, YCbCr samplers aren't all
744        * that common and likely won't be in the middle of big arrays.
745        */
746       set_layout->binding[b].descriptor_surface_stride =
747          MAX2(set_layout->binding[b].max_plane_count, 1) *
748          set_layout->binding[b].descriptor_data_surface_size;
749       set_layout->binding[b].descriptor_sampler_stride =
750          MAX2(set_layout->binding[b].max_plane_count, 1) *
751          set_layout->binding[b].descriptor_data_sampler_size;
752 
753       unsigned surface_align, sampler_align;
754       anv_descriptor_data_alignment(set_layout->binding[b].data,
755                                     set_layout->type,
756                                     &surface_align,
757                                     &sampler_align);
758       descriptor_buffer_surface_size =
759          align(descriptor_buffer_surface_size, surface_align);
760       descriptor_buffer_sampler_size =
761          align(descriptor_buffer_sampler_size, sampler_align);
762 
763       if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
764          set_layout->binding[b].descriptor_surface_offset = descriptor_buffer_surface_size;
765          descriptor_buffer_surface_size += binding->descriptorCount;
766       } else {
767          set_layout->binding[b].descriptor_surface_offset = descriptor_buffer_surface_size;
768          descriptor_buffer_surface_size +=
769             set_layout->binding[b].descriptor_surface_stride * binding->descriptorCount;
770       }
771 
772       set_layout->binding[b].descriptor_sampler_offset = descriptor_buffer_sampler_size;
773       descriptor_buffer_sampler_size +=
774          set_layout->binding[b].descriptor_sampler_stride * binding->descriptorCount;
775 
776       set_layout->shader_stages |= binding->stageFlags;
777    }
778 
779    /* Sanity checks */
780    assert(descriptor_buffer_sampler_size == 0 ||
781           set_layout->type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT);
782 
783    set_layout->buffer_view_count = buffer_view_count;
784    set_layout->dynamic_offset_count = dynamic_offset_count;
785    set_layout->descriptor_buffer_surface_size = descriptor_buffer_surface_size;
786    set_layout->descriptor_buffer_sampler_size = descriptor_buffer_sampler_size;
787 
788    *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
789 
790    return VK_SUCCESS;
791 }
792 
793 void
anv_descriptor_set_layout_destroy(struct anv_device * device,struct anv_descriptor_set_layout * layout)794 anv_descriptor_set_layout_destroy(struct anv_device *device,
795                                   struct anv_descriptor_set_layout *layout)
796 {
797    assert(layout->ref_cnt == 0);
798    vk_object_free(&device->vk, NULL, layout);
799 }
800 
801 static const struct anv_descriptor_set_binding_layout *
set_layout_dynamic_binding(const struct anv_descriptor_set_layout * set_layout)802 set_layout_dynamic_binding(const struct anv_descriptor_set_layout *set_layout)
803 {
804    if (set_layout->binding_count == 0)
805       return NULL;
806 
807    const struct anv_descriptor_set_binding_layout *last_binding =
808       &set_layout->binding[set_layout->binding_count - 1];
809    if (!(last_binding->flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))
810       return NULL;
811 
812    return last_binding;
813 }
814 
815 static uint32_t
set_layout_descriptor_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)816 set_layout_descriptor_count(const struct anv_descriptor_set_layout *set_layout,
817                             uint32_t var_desc_count)
818 {
819    const struct anv_descriptor_set_binding_layout *dynamic_binding =
820       set_layout_dynamic_binding(set_layout);
821    if (dynamic_binding == NULL)
822       return set_layout->descriptor_count;
823 
824    assert(var_desc_count <= dynamic_binding->array_size);
825    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
826 
827    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
828       return set_layout->descriptor_count;
829 
830    return set_layout->descriptor_count - shrink;
831 }
832 
833 static uint32_t
set_layout_buffer_view_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)834 set_layout_buffer_view_count(const struct anv_descriptor_set_layout *set_layout,
835                              uint32_t var_desc_count)
836 {
837    const struct anv_descriptor_set_binding_layout *dynamic_binding =
838       set_layout_dynamic_binding(set_layout);
839    if (dynamic_binding == NULL)
840       return set_layout->buffer_view_count;
841 
842    assert(var_desc_count <= dynamic_binding->array_size);
843    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
844 
845    if (!(dynamic_binding->data & ANV_DESCRIPTOR_BUFFER_VIEW))
846       return set_layout->buffer_view_count;
847 
848    return set_layout->buffer_view_count - shrink;
849 }
850 
851 static bool
anv_descriptor_set_layout_empty(const struct anv_descriptor_set_layout * set_layout)852 anv_descriptor_set_layout_empty(const struct anv_descriptor_set_layout *set_layout)
853 {
854    return set_layout->binding_count == 0;
855 }
856 
857 static void
anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count,uint32_t * out_surface_size,uint32_t * out_sampler_size)858 anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout *set_layout,
859                                                  uint32_t var_desc_count,
860                                                  uint32_t *out_surface_size,
861                                                  uint32_t *out_sampler_size)
862 {
863    const struct anv_descriptor_set_binding_layout *dynamic_binding =
864       set_layout_dynamic_binding(set_layout);
865    if (dynamic_binding == NULL) {
866       *out_surface_size = ALIGN(set_layout->descriptor_buffer_surface_size,
867                                 ANV_UBO_ALIGNMENT);
868       *out_sampler_size = set_layout->descriptor_buffer_sampler_size;
869       return;
870    }
871 
872    assert(var_desc_count <= dynamic_binding->array_size);
873    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
874    uint32_t set_surface_size, set_sampler_size;
875 
876    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
877       /* Inline uniform blocks are specified to use the descriptor array
878        * size as the size in bytes of the block.
879        */
880       set_surface_size = set_layout->descriptor_buffer_surface_size - shrink;
881       set_sampler_size = 0;
882    } else {
883       set_surface_size =
884          set_layout->descriptor_buffer_surface_size > 0 ?
885          (set_layout->descriptor_buffer_surface_size -
886           shrink * dynamic_binding->descriptor_surface_stride) : 0;
887       set_sampler_size =
888          set_layout->descriptor_buffer_sampler_size > 0 ?
889          (set_layout->descriptor_buffer_sampler_size -
890           shrink * dynamic_binding->descriptor_sampler_stride) : 0;
891    }
892 
893    *out_surface_size = ALIGN(set_surface_size, ANV_UBO_ALIGNMENT);
894    *out_sampler_size = set_sampler_size;
895 }
896 
anv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)897 void anv_DestroyDescriptorSetLayout(
898     VkDevice                                    _device,
899     VkDescriptorSetLayout                       _set_layout,
900     const VkAllocationCallbacks*                pAllocator)
901 {
902    ANV_FROM_HANDLE(anv_device, device, _device);
903    ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
904 
905    if (!set_layout)
906       return;
907 
908    anv_descriptor_set_layout_unref(device, set_layout);
909 }
910 
911 void
anv_descriptor_set_layout_print(const struct anv_descriptor_set_layout * layout)912 anv_descriptor_set_layout_print(const struct anv_descriptor_set_layout *layout)
913 {
914    fprintf(stderr, "set layout:\n");
915    for (uint32_t b = 0; b < layout->binding_count; b++) {
916       fprintf(stderr, "  binding%03u: offsets=0x%08x/0x%08x sizes=%04u/%04u strides=%03u/%03u planes=%hhu count=%03u\n",
917               b,
918               layout->binding[b].descriptor_surface_offset,
919               layout->binding[b].descriptor_sampler_offset,
920               layout->binding[b].descriptor_data_surface_size,
921               layout->binding[b].descriptor_data_sampler_size,
922               layout->binding[b].descriptor_surface_stride,
923               layout->binding[b].descriptor_sampler_stride,
924               layout->binding[b].max_plane_count,
925               layout->binding[b].array_size);
926    }
927 }
928 
929 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
930 
931 static void
sha1_update_immutable_sampler(struct mesa_sha1 * ctx,const struct anv_sampler * sampler)932 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
933                               const struct anv_sampler *sampler)
934 {
935    if (!sampler->vk.ycbcr_conversion)
936       return;
937 
938    /* The only thing that affects the shader is ycbcr conversion */
939    SHA1_UPDATE_VALUE(ctx, sampler->vk.ycbcr_conversion->state);
940 }
941 
942 static void
sha1_update_descriptor_set_binding_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_binding_layout * layout)943 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
944    const struct anv_descriptor_set_binding_layout *layout)
945 {
946    SHA1_UPDATE_VALUE(ctx, layout->flags);
947    SHA1_UPDATE_VALUE(ctx, layout->data);
948    SHA1_UPDATE_VALUE(ctx, layout->max_plane_count);
949    SHA1_UPDATE_VALUE(ctx, layout->array_size);
950    SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
951    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
952    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_index);
953    SHA1_UPDATE_VALUE(ctx, layout->descriptor_surface_offset);
954    SHA1_UPDATE_VALUE(ctx, layout->descriptor_sampler_offset);
955 
956    if (layout->immutable_samplers) {
957       for (uint16_t i = 0; i < layout->array_size; i++)
958          sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);
959    }
960 }
961 
962 static void
sha1_update_descriptor_set_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_layout * layout)963 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
964                                   const struct anv_descriptor_set_layout *layout)
965 {
966    SHA1_UPDATE_VALUE(ctx, layout->flags);
967    SHA1_UPDATE_VALUE(ctx, layout->binding_count);
968    SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);
969    SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
970    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_count);
971    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
972    SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_surface_size);
973    SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_sampler_size);
974 
975    for (uint16_t i = 0; i < layout->binding_count; i++)
976       sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
977 }
978 
979 /*
980  * Pipeline layouts.  These have nothing to do with the pipeline.  They are
981  * just multiple descriptor set layouts pasted together
982  */
983 
984 void
anv_pipeline_sets_layout_init(struct anv_pipeline_sets_layout * layout,struct anv_device * device,bool independent_sets)985 anv_pipeline_sets_layout_init(struct anv_pipeline_sets_layout *layout,
986                               struct anv_device *device,
987                               bool independent_sets)
988 {
989    memset(layout, 0, sizeof(*layout));
990 
991    layout->device = device;
992    layout->push_descriptor_set_index = -1;
993    layout->independent_sets = independent_sets;
994 }
995 
996 void
anv_pipeline_sets_layout_add(struct anv_pipeline_sets_layout * layout,uint32_t set_idx,struct anv_descriptor_set_layout * set_layout)997 anv_pipeline_sets_layout_add(struct anv_pipeline_sets_layout *layout,
998                              uint32_t set_idx,
999                              struct anv_descriptor_set_layout *set_layout)
1000 {
1001    if (layout->set[set_idx].layout)
1002       return;
1003 
1004    /* Workaround CTS : Internal CTS issue 3584 */
1005    if (layout->independent_sets && anv_descriptor_set_layout_empty(set_layout))
1006       return;
1007 
1008    if (layout->type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_UNKNOWN)
1009       layout->type = set_layout->type;
1010    else
1011       assert(layout->type == set_layout->type);
1012 
1013    layout->num_sets = MAX2(set_idx + 1, layout->num_sets);
1014 
1015    layout->set[set_idx].layout =
1016       anv_descriptor_set_layout_ref(set_layout);
1017 
1018    layout->set[set_idx].dynamic_offset_start = layout->num_dynamic_buffers;
1019    layout->num_dynamic_buffers += set_layout->dynamic_offset_count;
1020 
1021    assert(layout->num_dynamic_buffers < MAX_DYNAMIC_BUFFERS);
1022 
1023    if (set_layout->flags &
1024        VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
1025       assert(layout->push_descriptor_set_index == -1);
1026       layout->push_descriptor_set_index = set_idx;
1027    }
1028 }
1029 
1030 void
anv_pipeline_sets_layout_hash(struct anv_pipeline_sets_layout * layout)1031 anv_pipeline_sets_layout_hash(struct anv_pipeline_sets_layout *layout)
1032 {
1033    struct mesa_sha1 ctx;
1034    _mesa_sha1_init(&ctx);
1035    for (unsigned s = 0; s < layout->num_sets; s++) {
1036       if (!layout->set[s].layout)
1037          continue;
1038       sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
1039       _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
1040                         sizeof(layout->set[s].dynamic_offset_start));
1041    }
1042    _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
1043    _mesa_sha1_final(&ctx, layout->sha1);
1044 }
1045 
1046 void
anv_pipeline_sets_layout_fini(struct anv_pipeline_sets_layout * layout)1047 anv_pipeline_sets_layout_fini(struct anv_pipeline_sets_layout *layout)
1048 {
1049    for (unsigned s = 0; s < layout->num_sets; s++) {
1050       if (!layout->set[s].layout)
1051          continue;
1052 
1053       anv_descriptor_set_layout_unref(layout->device, layout->set[s].layout);
1054    }
1055 }
1056 
1057 void
anv_pipeline_sets_layout_print(const struct anv_pipeline_sets_layout * layout)1058 anv_pipeline_sets_layout_print(const struct anv_pipeline_sets_layout *layout)
1059 {
1060    fprintf(stderr, "layout: dyn_count=%u sets=%u ind=%u\n",
1061            layout->num_dynamic_buffers,
1062            layout->num_sets,
1063            layout->independent_sets);
1064    for (unsigned s = 0; s < layout->num_sets; s++) {
1065       if (!layout->set[s].layout)
1066          continue;
1067 
1068       fprintf(stderr, "   set%i: dyn_start=%u flags=0x%x\n",
1069               s, layout->set[s].dynamic_offset_start, layout->set[s].layout->flags);
1070    }
1071 }
1072 
anv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)1073 VkResult anv_CreatePipelineLayout(
1074     VkDevice                                    _device,
1075     const VkPipelineLayoutCreateInfo*           pCreateInfo,
1076     const VkAllocationCallbacks*                pAllocator,
1077     VkPipelineLayout*                           pPipelineLayout)
1078 {
1079    ANV_FROM_HANDLE(anv_device, device, _device);
1080    struct anv_pipeline_layout *layout;
1081 
1082    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
1083 
1084    layout = vk_object_zalloc(&device->vk, pAllocator, sizeof(*layout),
1085                              VK_OBJECT_TYPE_PIPELINE_LAYOUT);
1086    if (layout == NULL)
1087       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1088 
1089    anv_pipeline_sets_layout_init(&layout->sets_layout, device,
1090                                  pCreateInfo->flags & VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT);
1091 
1092    for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
1093       ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
1094                       pCreateInfo->pSetLayouts[set]);
1095 
1096       /* VUID-VkPipelineLayoutCreateInfo-graphicsPipelineLibrary-06753
1097        *
1098        *    "If graphicsPipelineLibrary is not enabled, elements of
1099        *     pSetLayouts must be valid VkDescriptorSetLayout objects"
1100        *
1101        * As a result of supporting graphicsPipelineLibrary, we need to allow
1102        * null descriptor set layouts.
1103        */
1104       if (set_layout == NULL)
1105          continue;
1106 
1107       anv_pipeline_sets_layout_add(&layout->sets_layout, set, set_layout);
1108    }
1109 
1110    anv_pipeline_sets_layout_hash(&layout->sets_layout);
1111 
1112    *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
1113 
1114    return VK_SUCCESS;
1115 }
1116 
anv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)1117 void anv_DestroyPipelineLayout(
1118     VkDevice                                    _device,
1119     VkPipelineLayout                            _pipelineLayout,
1120     const VkAllocationCallbacks*                pAllocator)
1121 {
1122    ANV_FROM_HANDLE(anv_device, device, _device);
1123    ANV_FROM_HANDLE(anv_pipeline_layout, layout, _pipelineLayout);
1124 
1125    if (!layout)
1126       return;
1127 
1128    anv_pipeline_sets_layout_fini(&layout->sets_layout);
1129 
1130    vk_object_free(&device->vk, pAllocator, layout);
1131 }
1132 
1133 /*
1134  * Descriptor pools.
1135  *
1136  * These are implemented using a big pool of memory and a vma heap for the
1137  * host memory allocations and a state_stream and a free list for the buffer
1138  * view surface state. The spec allows us to fail to allocate due to
1139  * fragmentation in all cases but two: 1) after pool reset, allocating up
1140  * until the pool size with no freeing must succeed and 2) allocating and
1141  * freeing only descriptor sets with the same layout. Case 1) is easy enough,
1142  * and the vma heap ensures case 2).
1143  */
1144 
1145 /* The vma heap reserves 0 to mean NULL; we have to offset by some amount to
1146  * ensure we can allocate the entire BO without hitting zero.  The actual
1147  * amount doesn't matter.
1148  */
1149 #define POOL_HEAP_OFFSET 64
1150 
1151 #define EMPTY 1
1152 
1153 static VkResult
anv_descriptor_pool_heap_init(struct anv_device * device,struct anv_descriptor_pool_heap * heap,uint32_t size,bool host_only,bool samplers)1154 anv_descriptor_pool_heap_init(struct anv_device *device,
1155                               struct anv_descriptor_pool_heap *heap,
1156                               uint32_t size,
1157                               bool host_only,
1158                               bool samplers)
1159 {
1160    if (size == 0)
1161       return VK_SUCCESS;
1162 
1163    if (host_only) {
1164       heap->size = size;
1165       heap->host_mem = vk_zalloc(&device->vk.alloc, size, 8,
1166                                  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1167       if (heap->host_mem == NULL)
1168          return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1169    } else {
1170       const char *bo_name =
1171          device->physical->indirect_descriptors ? "indirect descriptors" :
1172          samplers ? "direct sampler" : "direct surfaces";
1173 
1174       heap->size = align(size, 4096);
1175 
1176       VkResult result = anv_device_alloc_bo(device,
1177                                             bo_name, heap->size,
1178                                             ANV_BO_ALLOC_CAPTURE |
1179                                             ANV_BO_ALLOC_MAPPED |
1180                                             ANV_BO_ALLOC_HOST_CACHED_COHERENT |
1181                                             (samplers ?
1182                                              ANV_BO_ALLOC_SAMPLER_POOL :
1183                                              ANV_BO_ALLOC_DESCRIPTOR_POOL),
1184                                             0 /* explicit_address */,
1185                                             &heap->bo);
1186       if (result != VK_SUCCESS)
1187          return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
1188    }
1189 
1190    util_vma_heap_init(&heap->heap, POOL_HEAP_OFFSET, heap->size);
1191 
1192    return VK_SUCCESS;
1193 }
1194 
1195 static void
anv_descriptor_pool_heap_fini(struct anv_device * device,struct anv_descriptor_pool_heap * heap)1196 anv_descriptor_pool_heap_fini(struct anv_device *device,
1197                               struct anv_descriptor_pool_heap *heap)
1198 {
1199    if (heap->size == 0)
1200       return;
1201 
1202    util_vma_heap_finish(&heap->heap);
1203 
1204    if (heap->bo)
1205       anv_device_release_bo(device, heap->bo);
1206 
1207    if (heap->host_mem)
1208       vk_free(&device->vk.alloc, heap->host_mem);
1209 }
1210 
1211 static void
anv_descriptor_pool_heap_reset(struct anv_device * device,struct anv_descriptor_pool_heap * heap)1212 anv_descriptor_pool_heap_reset(struct anv_device *device,
1213                                struct anv_descriptor_pool_heap *heap)
1214 {
1215    if (heap->size == 0)
1216       return;
1217 
1218    util_vma_heap_finish(&heap->heap);
1219    util_vma_heap_init(&heap->heap, POOL_HEAP_OFFSET, heap->size);
1220 }
1221 
1222 static VkResult
anv_descriptor_pool_heap_alloc(struct anv_descriptor_pool * pool,struct anv_descriptor_pool_heap * heap,uint32_t size,uint32_t alignment,struct anv_state * state)1223 anv_descriptor_pool_heap_alloc(struct anv_descriptor_pool *pool,
1224                                struct anv_descriptor_pool_heap *heap,
1225                                uint32_t size, uint32_t alignment,
1226                                struct anv_state *state)
1227 {
1228    uint64_t pool_vma_offset =
1229       util_vma_heap_alloc(&heap->heap, size, alignment);
1230    if (pool_vma_offset == 0)
1231       return vk_error(pool, VK_ERROR_FRAGMENTED_POOL);
1232 
1233    assert(pool_vma_offset >= POOL_HEAP_OFFSET &&
1234           pool_vma_offset - POOL_HEAP_OFFSET <= INT32_MAX);
1235 
1236    state->offset = pool_vma_offset - POOL_HEAP_OFFSET;
1237    state->alloc_size = size;
1238    if (heap->host_mem)
1239       state->map = heap->host_mem + state->offset;
1240    else
1241       state->map = heap->bo->map + state->offset;
1242 
1243    return VK_SUCCESS;
1244 }
1245 
1246 static void
anv_descriptor_pool_heap_free(struct anv_descriptor_pool_heap * heap,struct anv_state state)1247 anv_descriptor_pool_heap_free(struct anv_descriptor_pool_heap *heap,
1248                               struct anv_state state)
1249 {
1250    util_vma_heap_free(&heap->heap,
1251                       (uint64_t)state.offset + POOL_HEAP_OFFSET,
1252                       state.alloc_size);
1253 }
1254 
anv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)1255 VkResult anv_CreateDescriptorPool(
1256     VkDevice                                    _device,
1257     const VkDescriptorPoolCreateInfo*           pCreateInfo,
1258     const VkAllocationCallbacks*                pAllocator,
1259     VkDescriptorPool*                           pDescriptorPool)
1260 {
1261    ANV_FROM_HANDLE(anv_device, device, _device);
1262    struct anv_descriptor_pool *pool;
1263 
1264    const VkDescriptorPoolInlineUniformBlockCreateInfo *inline_info =
1265       vk_find_struct_const(pCreateInfo->pNext,
1266                            DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO);
1267    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
1268       vk_find_struct_const(pCreateInfo->pNext,
1269                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
1270 
1271    uint32_t descriptor_count = 0;
1272    uint32_t buffer_view_count = 0;
1273    uint32_t descriptor_bo_surface_size = 0;
1274    uint32_t descriptor_bo_sampler_size = 0;
1275 
1276    const enum anv_descriptor_set_layout_type layout_type =
1277       device->physical->indirect_descriptors ?
1278       ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_INDIRECT :
1279       ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT;
1280 
1281    for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
1282       enum anv_descriptor_data desc_data =
1283          pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
1284          anv_descriptor_data_for_mutable_type(device->physical, layout_type,
1285                                               mutable_info, i) :
1286          anv_descriptor_data_for_type(device->physical, layout_type,
1287                                       pCreateInfo->pPoolSizes[i].type);
1288 
1289       if (desc_data & ANV_DESCRIPTOR_BUFFER_VIEW)
1290          buffer_view_count += pCreateInfo->pPoolSizes[i].descriptorCount;
1291 
1292       uint16_t desc_surface_size, desc_sampler_size;
1293       if (pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
1294          anv_descriptor_size_for_mutable_type(device->physical, layout_type, mutable_info, i,
1295                                               &desc_surface_size, &desc_sampler_size);
1296       } else {
1297          anv_descriptor_data_size(desc_data, layout_type,
1298                                   &desc_surface_size, &desc_sampler_size);
1299       }
1300 
1301       uint32_t desc_data_surface_size =
1302          desc_surface_size * pCreateInfo->pPoolSizes[i].descriptorCount;
1303       uint32_t desc_data_sampler_size =
1304          desc_sampler_size * pCreateInfo->pPoolSizes[i].descriptorCount;
1305 
1306       /* Combined image sampler descriptors can take up to 3 slots if they
1307        * hold a YCbCr image.
1308        */
1309       if (pCreateInfo->pPoolSizes[i].type ==
1310           VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
1311          desc_data_surface_size *= 3;
1312          desc_data_sampler_size *= 3;
1313       }
1314 
1315       if (pCreateInfo->pPoolSizes[i].type ==
1316           VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
1317          /* Inline uniform blocks are specified to use the descriptor array
1318           * size as the size in bytes of the block.
1319           */
1320          assert(inline_info);
1321          desc_data_surface_size += pCreateInfo->pPoolSizes[i].descriptorCount;
1322       }
1323 
1324       descriptor_bo_surface_size += desc_data_surface_size;
1325       descriptor_bo_sampler_size += desc_data_sampler_size;
1326 
1327       descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
1328    }
1329    /* We have to align descriptor buffer allocations to 32B so that we can
1330     * push descriptor buffers.  This means that each descriptor buffer
1331     * allocated may burn up to 32B of extra space to get the right alignment.
1332     * (Technically, it's at most 28B because we're always going to start at
1333     * least 4B aligned but we're being conservative here.)  Allocate enough
1334     * extra space that we can chop it into maxSets pieces and align each one
1335     * of them to 32B.
1336     */
1337    descriptor_bo_surface_size += ANV_UBO_ALIGNMENT * pCreateInfo->maxSets;
1338    /* We align inline uniform blocks to ANV_UBO_ALIGNMENT */
1339    if (inline_info) {
1340       descriptor_bo_surface_size +=
1341          ANV_UBO_ALIGNMENT * inline_info->maxInlineUniformBlockBindings;
1342    }
1343 
1344    const bool host_only =
1345       pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT;
1346 
1347    /* For host_only pools, allocate some memory to hold the written surface
1348     * states of the internal anv_buffer_view. With normal pools, the memory
1349     * holding surface state is allocated from the device surface_state_pool.
1350     */
1351    const size_t host_mem_size =
1352       pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
1353       descriptor_count * sizeof(struct anv_descriptor) +
1354       buffer_view_count * sizeof(struct anv_buffer_view) +
1355       (host_only ? buffer_view_count * ANV_SURFACE_STATE_SIZE : 0);
1356 
1357    pool = vk_object_zalloc(&device->vk, pAllocator,
1358                            sizeof(*pool) + host_mem_size,
1359                            VK_OBJECT_TYPE_DESCRIPTOR_POOL);
1360    if (!pool)
1361       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1362 
1363    pool->host_mem_size = host_mem_size;
1364    util_vma_heap_init(&pool->host_heap, POOL_HEAP_OFFSET, host_mem_size);
1365 
1366    pool->host_only = host_only;
1367 
1368    VkResult result = anv_descriptor_pool_heap_init(device,
1369                                                    &pool->surfaces,
1370                                                    descriptor_bo_surface_size,
1371                                                    pool->host_only,
1372                                                    false /* samplers */);
1373    if (result != VK_SUCCESS) {
1374       vk_object_free(&device->vk, pAllocator, pool);
1375       return result;
1376    }
1377 
1378    result = anv_descriptor_pool_heap_init(device,
1379                                           &pool->samplers,
1380                                           descriptor_bo_sampler_size,
1381                                           pool->host_only,
1382                                           true /* samplers */);
1383    if (result != VK_SUCCESS) {
1384       anv_descriptor_pool_heap_fini(device, &pool->surfaces);
1385       vk_object_free(&device->vk, pAllocator, pool);
1386       return result;
1387    }
1388 
1389    /* All the surface states allocated by the descriptor pool are internal. We
1390     * have to allocate them to handle the fact that we do not have surface
1391     * states for VkBuffers.
1392     */
1393    anv_state_stream_init(&pool->surface_state_stream,
1394                          &device->internal_surface_state_pool, 4096);
1395    pool->surface_state_free_list = NULL;
1396 
1397    list_inithead(&pool->desc_sets);
1398 
1399    ANV_RMV(descriptor_pool_create, device, pCreateInfo, pool, false);
1400 
1401    *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
1402 
1403    return VK_SUCCESS;
1404 }
1405 
anv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)1406 void anv_DestroyDescriptorPool(
1407     VkDevice                                    _device,
1408     VkDescriptorPool                            _pool,
1409     const VkAllocationCallbacks*                pAllocator)
1410 {
1411    ANV_FROM_HANDLE(anv_device, device, _device);
1412    ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
1413 
1414    if (!pool)
1415       return;
1416 
1417    ANV_RMV(resource_destroy, device, pool);
1418 
1419    list_for_each_entry_safe(struct anv_descriptor_set, set,
1420                             &pool->desc_sets, pool_link) {
1421       anv_descriptor_set_layout_unref(device, set->layout);
1422    }
1423 
1424    util_vma_heap_finish(&pool->host_heap);
1425 
1426    anv_state_stream_finish(&pool->surface_state_stream);
1427 
1428    anv_descriptor_pool_heap_fini(device, &pool->surfaces);
1429    anv_descriptor_pool_heap_fini(device, &pool->samplers);
1430 
1431    vk_object_free(&device->vk, pAllocator, pool);
1432 }
1433 
anv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)1434 VkResult anv_ResetDescriptorPool(
1435     VkDevice                                    _device,
1436     VkDescriptorPool                            descriptorPool,
1437     VkDescriptorPoolResetFlags                  flags)
1438 {
1439    ANV_FROM_HANDLE(anv_device, device, _device);
1440    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1441 
1442    list_for_each_entry_safe(struct anv_descriptor_set, set,
1443                             &pool->desc_sets, pool_link) {
1444       anv_descriptor_set_layout_unref(device, set->layout);
1445    }
1446    list_inithead(&pool->desc_sets);
1447 
1448    util_vma_heap_finish(&pool->host_heap);
1449    util_vma_heap_init(&pool->host_heap, POOL_HEAP_OFFSET, pool->host_mem_size);
1450 
1451    anv_descriptor_pool_heap_reset(device, &pool->surfaces);
1452    anv_descriptor_pool_heap_reset(device, &pool->samplers);
1453 
1454    anv_state_stream_finish(&pool->surface_state_stream);
1455    anv_state_stream_init(&pool->surface_state_stream,
1456                          &device->internal_surface_state_pool, 4096);
1457    pool->surface_state_free_list = NULL;
1458 
1459    return VK_SUCCESS;
1460 }
1461 
1462 static VkResult
anv_descriptor_pool_alloc_set(struct anv_descriptor_pool * pool,uint32_t size,struct anv_descriptor_set ** set)1463 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,
1464                               uint32_t size,
1465                               struct anv_descriptor_set **set)
1466 {
1467    uint64_t vma_offset = util_vma_heap_alloc(&pool->host_heap, size, 1);
1468 
1469    if (vma_offset == 0) {
1470       if (size <= pool->host_heap.free_size) {
1471          return VK_ERROR_FRAGMENTED_POOL;
1472       } else {
1473          return VK_ERROR_OUT_OF_POOL_MEMORY;
1474       }
1475    }
1476 
1477    assert(vma_offset >= POOL_HEAP_OFFSET);
1478    uint64_t host_mem_offset = vma_offset - POOL_HEAP_OFFSET;
1479 
1480    *set = (struct anv_descriptor_set *) (pool->host_mem + host_mem_offset);
1481    (*set)->size = size;
1482 
1483    return VK_SUCCESS;
1484 }
1485 
1486 static void
anv_descriptor_pool_free_set(struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1487 anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,
1488                              struct anv_descriptor_set *set)
1489 {
1490    util_vma_heap_free(&pool->host_heap,
1491                       ((char *) set - pool->host_mem) + POOL_HEAP_OFFSET,
1492                       set->size);
1493 }
1494 
1495 struct surface_state_free_list_entry {
1496    void *next;
1497    struct anv_state state;
1498 };
1499 
1500 static struct anv_state
anv_descriptor_pool_alloc_state(struct anv_descriptor_pool * pool)1501 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)
1502 {
1503    assert(!pool->host_only);
1504 
1505    struct surface_state_free_list_entry *entry =
1506       pool->surface_state_free_list;
1507 
1508    if (entry) {
1509       struct anv_state state = entry->state;
1510       pool->surface_state_free_list = entry->next;
1511       assert(state.alloc_size == ANV_SURFACE_STATE_SIZE);
1512       return state;
1513    } else {
1514       struct anv_state state =
1515          anv_state_stream_alloc(&pool->surface_state_stream,
1516                                 ANV_SURFACE_STATE_SIZE, 64);
1517       return state;
1518    }
1519 }
1520 
1521 static void
anv_descriptor_pool_free_state(struct anv_descriptor_pool * pool,struct anv_state state)1522 anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,
1523                                struct anv_state state)
1524 {
1525    assert(state.alloc_size);
1526    /* Put the buffer view surface state back on the free list. */
1527    struct surface_state_free_list_entry *entry = state.map;
1528    entry->next = pool->surface_state_free_list;
1529    entry->state = state;
1530    pool->surface_state_free_list = entry;
1531 }
1532 
1533 static size_t
anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout * layout,bool host_only,uint32_t var_desc_count)1534 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout,
1535                                bool host_only, uint32_t var_desc_count)
1536 {
1537    const uint32_t descriptor_count =
1538       set_layout_descriptor_count(layout, var_desc_count);
1539    const uint32_t buffer_view_count =
1540       set_layout_buffer_view_count(layout, var_desc_count);
1541 
1542    return sizeof(struct anv_descriptor_set) +
1543           descriptor_count * sizeof(struct anv_descriptor) +
1544           buffer_view_count * sizeof(struct anv_buffer_view) +
1545           (host_only ? buffer_view_count * ANV_SURFACE_STATE_SIZE : 0);
1546 }
1547 
1548 static VkResult
anv_descriptor_set_create(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set_layout * layout,uint32_t var_desc_count,struct anv_descriptor_set ** out_set)1549 anv_descriptor_set_create(struct anv_device *device,
1550                           struct anv_descriptor_pool *pool,
1551                           struct anv_descriptor_set_layout *layout,
1552                           uint32_t var_desc_count,
1553                           struct anv_descriptor_set **out_set)
1554 {
1555    struct anv_descriptor_set *set;
1556    const size_t size = anv_descriptor_set_layout_size(layout,
1557                                                       pool->host_only,
1558                                                       var_desc_count);
1559 
1560    VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);
1561    if (result != VK_SUCCESS)
1562       return result;
1563 
1564    uint32_t descriptor_buffer_surface_size, descriptor_buffer_sampler_size;
1565    anv_descriptor_set_layout_descriptor_buffer_size(layout, var_desc_count,
1566                                                     &descriptor_buffer_surface_size,
1567                                                     &descriptor_buffer_sampler_size);
1568 
1569    set->desc_surface_state = ANV_STATE_NULL;
1570    set->is_push = false;
1571 
1572    if (descriptor_buffer_surface_size) {
1573       result = anv_descriptor_pool_heap_alloc(pool, &pool->surfaces,
1574                                               descriptor_buffer_surface_size,
1575                                               ANV_UBO_ALIGNMENT,
1576                                               &set->desc_surface_mem);
1577       if (result != VK_SUCCESS) {
1578          anv_descriptor_pool_free_set(pool, set);
1579          return result;
1580       }
1581 
1582       set->desc_surface_addr = (struct anv_address) {
1583          .bo = pool->surfaces.bo,
1584          .offset = set->desc_surface_mem.offset,
1585       };
1586       set->desc_offset = anv_address_physical(set->desc_surface_addr) -
1587                          device->physical->va.internal_surface_state_pool.addr;
1588 
1589       enum isl_format format =
1590          anv_isl_format_for_descriptor_type(device,
1591                                             VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1592 
1593       if (!pool->host_only) {
1594          set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);
1595          if (set->desc_surface_state.map == NULL) {
1596             anv_descriptor_pool_free_set(pool, set);
1597             return vk_error(pool, VK_ERROR_OUT_OF_DEVICE_MEMORY);
1598          }
1599 
1600          anv_fill_buffer_surface_state(device, set->desc_surface_state.map,
1601                                        format, ISL_SWIZZLE_IDENTITY,
1602                                        ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
1603                                        set->desc_surface_addr,
1604                                        descriptor_buffer_surface_size, 1);
1605       }
1606    } else {
1607       set->desc_surface_mem = ANV_STATE_NULL;
1608       set->desc_surface_addr = ANV_NULL_ADDRESS;
1609    }
1610 
1611    if (descriptor_buffer_sampler_size) {
1612       result = anv_descriptor_pool_heap_alloc(pool, &pool->samplers,
1613                                               descriptor_buffer_sampler_size,
1614                                               ANV_SAMPLER_STATE_SIZE,
1615                                               &set->desc_sampler_mem);
1616       if (result != VK_SUCCESS) {
1617          anv_descriptor_pool_free_set(pool, set);
1618          return result;
1619       }
1620 
1621       set->desc_sampler_addr = (struct anv_address) {
1622          .bo = pool->samplers.bo,
1623          .offset = set->desc_sampler_mem.offset,
1624       };
1625    } else {
1626       set->desc_sampler_mem = ANV_STATE_NULL;
1627       set->desc_sampler_addr = ANV_NULL_ADDRESS;
1628    }
1629 
1630    vk_object_base_init(&device->vk, &set->base,
1631                        VK_OBJECT_TYPE_DESCRIPTOR_SET);
1632    set->pool = pool;
1633    set->layout = layout;
1634    anv_descriptor_set_layout_ref(layout);
1635 
1636    set->buffer_view_count =
1637       set_layout_buffer_view_count(layout, var_desc_count);
1638    set->descriptor_count =
1639       set_layout_descriptor_count(layout, var_desc_count);
1640 
1641    set->buffer_views =
1642       (struct anv_buffer_view *) &set->descriptors[set->descriptor_count];
1643 
1644    /* By defining the descriptors to be zero now, we can later verify that
1645     * a descriptor has not been populated with user data.
1646     */
1647    memset(set->descriptors, 0,
1648           sizeof(struct anv_descriptor) * set->descriptor_count);
1649 
1650    /* Go through and fill out immutable samplers if we have any */
1651    for (uint32_t b = 0; b < layout->binding_count; b++) {
1652       if (layout->binding[b].immutable_samplers) {
1653          for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
1654             /* The type will get changed to COMBINED_IMAGE_SAMPLER in
1655              * UpdateDescriptorSets if needed.  However, if the descriptor
1656              * set has an immutable sampler, UpdateDescriptorSets may never
1657              * touch it, so we need to make sure it's 100% valid now.
1658              *
1659              * We don't need to actually provide a sampler because the helper
1660              * will always write in the immutable sampler regardless of what
1661              * is in the sampler parameter.
1662              */
1663             VkDescriptorImageInfo info = { };
1664             anv_descriptor_set_write_image_view(device, set, &info,
1665                                                 VK_DESCRIPTOR_TYPE_SAMPLER,
1666                                                 b, i);
1667          }
1668       }
1669    }
1670 
1671    /* Allocate surface states for real descriptor sets if we're using indirect
1672     * descriptors. For host only sets, we just store the surface state data in
1673     * malloc memory.
1674     */
1675    if (device->physical->indirect_descriptors) {
1676       if (!pool->host_only) {
1677          for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1678             set->buffer_views[b].general.state =
1679                anv_descriptor_pool_alloc_state(pool);
1680          }
1681       } else {
1682          void *host_surface_states =
1683             set->buffer_views + set->buffer_view_count;
1684          memset(host_surface_states, 0,
1685                 set->buffer_view_count * ANV_SURFACE_STATE_SIZE);
1686          for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1687             set->buffer_views[b].general.state = (struct anv_state) {
1688                .alloc_size = ANV_SURFACE_STATE_SIZE,
1689                .map = host_surface_states + b * ANV_SURFACE_STATE_SIZE,
1690             };
1691          }
1692       }
1693    }
1694 
1695    list_addtail(&set->pool_link, &pool->desc_sets);
1696 
1697    *out_set = set;
1698 
1699    return VK_SUCCESS;
1700 }
1701 
1702 static void
anv_descriptor_set_destroy(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1703 anv_descriptor_set_destroy(struct anv_device *device,
1704                            struct anv_descriptor_pool *pool,
1705                            struct anv_descriptor_set *set)
1706 {
1707    anv_descriptor_set_layout_unref(device, set->layout);
1708 
1709    if (set->desc_surface_mem.alloc_size) {
1710       anv_descriptor_pool_heap_free(&pool->surfaces, set->desc_surface_mem);
1711       if (set->desc_surface_state.alloc_size)
1712          anv_descriptor_pool_free_state(pool, set->desc_surface_state);
1713    }
1714 
1715    if (set->desc_sampler_mem.alloc_size)
1716       anv_descriptor_pool_heap_free(&pool->samplers, set->desc_sampler_mem);
1717 
1718    if (device->physical->indirect_descriptors) {
1719       if (!pool->host_only) {
1720          for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1721             if (set->buffer_views[b].general.state.alloc_size) {
1722                anv_descriptor_pool_free_state(
1723                   pool, set->buffer_views[b].general.state);
1724             }
1725          }
1726       }
1727    }
1728 
1729    list_del(&set->pool_link);
1730 
1731    vk_object_base_finish(&set->base);
1732    anv_descriptor_pool_free_set(pool, set);
1733 }
1734 
anv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1735 VkResult anv_AllocateDescriptorSets(
1736     VkDevice                                    _device,
1737     const VkDescriptorSetAllocateInfo*          pAllocateInfo,
1738     VkDescriptorSet*                            pDescriptorSets)
1739 {
1740    ANV_FROM_HANDLE(anv_device, device, _device);
1741    ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
1742 
1743    VkResult result = VK_SUCCESS;
1744    struct anv_descriptor_set *set = NULL;
1745    uint32_t i;
1746 
1747    const VkDescriptorSetVariableDescriptorCountAllocateInfo *vdcai =
1748       vk_find_struct_const(pAllocateInfo->pNext,
1749                            DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
1750 
1751    for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1752       ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
1753                       pAllocateInfo->pSetLayouts[i]);
1754 
1755       uint32_t var_desc_count = 0;
1756       if (vdcai != NULL && vdcai->descriptorSetCount > 0) {
1757          assert(vdcai->descriptorSetCount == pAllocateInfo->descriptorSetCount);
1758          var_desc_count = vdcai->pDescriptorCounts[i];
1759       }
1760 
1761       result = anv_descriptor_set_create(device, pool, layout,
1762                                          var_desc_count, &set);
1763       if (result != VK_SUCCESS)
1764          break;
1765 
1766       pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1767    }
1768 
1769    if (result != VK_SUCCESS) {
1770       anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
1771                              i, pDescriptorSets);
1772       /* The Vulkan 1.3.228 spec, section 14.2.3. Allocation of Descriptor Sets:
1773        *
1774        *   "If the creation of any of those descriptor sets fails, then the
1775        *    implementation must destroy all successfully created descriptor
1776        *    set objects from this command, set all entries of the
1777        *    pDescriptorSets array to VK_NULL_HANDLE and return the error."
1778        */
1779       for (i = 0; i < pAllocateInfo->descriptorSetCount; i++)
1780          pDescriptorSets[i] = VK_NULL_HANDLE;
1781 
1782    }
1783 
1784    return result;
1785 }
1786 
anv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1787 VkResult anv_FreeDescriptorSets(
1788     VkDevice                                    _device,
1789     VkDescriptorPool                            descriptorPool,
1790     uint32_t                                    count,
1791     const VkDescriptorSet*                      pDescriptorSets)
1792 {
1793    ANV_FROM_HANDLE(anv_device, device, _device);
1794    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1795 
1796    for (uint32_t i = 0; i < count; i++) {
1797       ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1798 
1799       if (!set)
1800          continue;
1801 
1802       anv_descriptor_set_destroy(device, pool, set);
1803    }
1804 
1805    return VK_SUCCESS;
1806 }
1807 
1808 bool
anv_push_descriptor_set_init(struct anv_cmd_buffer * cmd_buffer,struct anv_push_descriptor_set * push_set,struct anv_descriptor_set_layout * layout)1809 anv_push_descriptor_set_init(struct anv_cmd_buffer *cmd_buffer,
1810                              struct anv_push_descriptor_set *push_set,
1811                              struct anv_descriptor_set_layout *layout)
1812 {
1813    struct anv_descriptor_set *set = &push_set->set;
1814 
1815    if (set->layout != layout) {
1816       if (set->layout) {
1817          anv_descriptor_set_layout_unref(cmd_buffer->device, set->layout);
1818       } else {
1819          /* one-time initialization */
1820          vk_object_base_init(&cmd_buffer->device->vk, &set->base,
1821                              VK_OBJECT_TYPE_DESCRIPTOR_SET);
1822          set->is_push = true;
1823          set->buffer_views = push_set->buffer_views;
1824       }
1825 
1826       anv_descriptor_set_layout_ref(layout);
1827       set->layout = layout;
1828       set->generate_surface_states = 0;
1829    }
1830 
1831    assert(set->is_push && set->buffer_views);
1832    set->size = anv_descriptor_set_layout_size(layout, false /* host_only */, 0);
1833    set->buffer_view_count = layout->buffer_view_count;
1834    set->descriptor_count = layout->descriptor_count;
1835 
1836    if (layout->descriptor_buffer_surface_size &&
1837        (push_set->set_used_on_gpu ||
1838         set->desc_surface_mem.alloc_size < layout->descriptor_buffer_surface_size)) {
1839       struct anv_physical_device *pdevice = cmd_buffer->device->physical;
1840       struct anv_state_stream *push_stream =
1841          pdevice->indirect_descriptors ?
1842          &cmd_buffer->indirect_push_descriptor_stream :
1843          &cmd_buffer->surface_state_stream;
1844       uint64_t push_base_address = pdevice->indirect_descriptors ?
1845          pdevice->va.indirect_push_descriptor_pool.addr :
1846          pdevice->va.internal_surface_state_pool.addr;
1847 
1848       uint32_t surface_size, sampler_size;
1849       anv_descriptor_set_layout_descriptor_buffer_size(layout, 0,
1850                                                        &surface_size,
1851                                                        &sampler_size);
1852 
1853       /* The previous buffer is either actively used by some GPU command (so
1854        * we can't modify it) or is too small.  Allocate a new one.
1855        */
1856       struct anv_state desc_surface_mem =
1857          anv_state_stream_alloc(push_stream, surface_size, ANV_UBO_ALIGNMENT);
1858       if (desc_surface_mem.map == NULL)
1859          return false;
1860 
1861       if (set->desc_surface_mem.alloc_size) {
1862          /* TODO: Do we really need to copy all the time? */
1863          memcpy(desc_surface_mem.map, set->desc_surface_mem.map,
1864                 MIN2(desc_surface_mem.alloc_size,
1865                      set->desc_surface_mem.alloc_size));
1866       }
1867       set->desc_surface_mem = desc_surface_mem;
1868 
1869       set->desc_surface_addr = anv_state_pool_state_address(
1870          push_stream->state_pool,
1871          set->desc_surface_mem);
1872       set->desc_offset = anv_address_physical(set->desc_surface_addr) -
1873                          push_base_address;
1874    }
1875 
1876    if (layout->descriptor_buffer_sampler_size &&
1877        (push_set->set_used_on_gpu ||
1878         set->desc_sampler_mem.alloc_size < layout->descriptor_buffer_sampler_size)) {
1879       struct anv_physical_device *pdevice = cmd_buffer->device->physical;
1880       assert(!pdevice->indirect_descriptors);
1881       struct anv_state_stream *push_stream = &cmd_buffer->dynamic_state_stream;
1882 
1883       uint32_t surface_size, sampler_size;
1884       anv_descriptor_set_layout_descriptor_buffer_size(layout, 0,
1885                                                        &surface_size,
1886                                                        &sampler_size);
1887 
1888       /* The previous buffer is either actively used by some GPU command (so
1889        * we can't modify it) or is too small.  Allocate a new one.
1890        */
1891       struct anv_state desc_sampler_mem =
1892          anv_state_stream_alloc(push_stream, sampler_size, ANV_SAMPLER_STATE_SIZE);
1893       if (desc_sampler_mem.map == NULL)
1894          return false;
1895 
1896       if (set->desc_sampler_mem.alloc_size) {
1897          /* TODO: Do we really need to copy all the time? */
1898          memcpy(desc_sampler_mem.map, set->desc_sampler_mem.map,
1899                 MIN2(desc_sampler_mem.alloc_size,
1900                      set->desc_sampler_mem.alloc_size));
1901       }
1902       set->desc_sampler_mem = desc_sampler_mem;
1903 
1904       set->desc_sampler_addr = anv_state_pool_state_address(
1905          push_stream->state_pool,
1906          set->desc_sampler_mem);
1907    }
1908 
1909    if (push_set->set_used_on_gpu) {
1910       set->desc_surface_state = ANV_STATE_NULL;
1911       push_set->set_used_on_gpu = false;
1912    }
1913 
1914    return true;
1915 }
1916 
1917 void
anv_push_descriptor_set_finish(struct anv_push_descriptor_set * push_set)1918 anv_push_descriptor_set_finish(struct anv_push_descriptor_set *push_set)
1919 {
1920    struct anv_descriptor_set *set = &push_set->set;
1921    if (set->layout) {
1922       struct anv_device *device =
1923          container_of(set->base.device, struct anv_device, vk);
1924       anv_descriptor_set_layout_unref(device, set->layout);
1925    }
1926 }
1927 
1928 static uint32_t
anv_surface_state_to_handle(struct anv_physical_device * device,struct anv_state state)1929 anv_surface_state_to_handle(struct anv_physical_device *device,
1930                             struct anv_state state)
1931 {
1932    /* Bits 31:12 of the bindless surface offset in the extended message
1933     * descriptor is bits 25:6 of the byte-based address.
1934     */
1935    assert(state.offset >= 0);
1936    uint32_t offset = state.offset;
1937    if (device->uses_ex_bso) {
1938       assert((offset & 0x3f) == 0);
1939       return offset;
1940    } else {
1941       assert((offset & 0x3f) == 0 && offset < (1 << 26));
1942       return offset << 6;
1943    }
1944 }
1945 
1946 static const void *
anv_image_view_surface_data_for_plane_layout(struct anv_image_view * image_view,VkDescriptorType desc_type,unsigned plane,VkImageLayout layout)1947 anv_image_view_surface_data_for_plane_layout(struct anv_image_view *image_view,
1948                                              VkDescriptorType desc_type,
1949                                              unsigned plane,
1950                                              VkImageLayout layout)
1951 {
1952    if (desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
1953        desc_type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
1954        desc_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) {
1955       return layout == VK_IMAGE_LAYOUT_GENERAL ?
1956          &image_view->planes[plane].general_sampler.state_data :
1957          &image_view->planes[plane].optimal_sampler.state_data;
1958    }
1959 
1960    if (desc_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1961       return &image_view->planes[plane].storage.state_data;
1962 
1963    unreachable("Invalid descriptor type");
1964 }
1965 
1966 void
anv_descriptor_set_write_image_view(struct anv_device * device,struct anv_descriptor_set * set,const VkDescriptorImageInfo * const info,VkDescriptorType type,uint32_t binding,uint32_t element)1967 anv_descriptor_set_write_image_view(struct anv_device *device,
1968                                     struct anv_descriptor_set *set,
1969                                     const VkDescriptorImageInfo * const info,
1970                                     VkDescriptorType type,
1971                                     uint32_t binding,
1972                                     uint32_t element)
1973 {
1974    const struct anv_descriptor_set_binding_layout *bind_layout =
1975       &set->layout->binding[binding];
1976    struct anv_descriptor *desc =
1977       &set->descriptors[bind_layout->descriptor_index + element];
1978    struct anv_image_view *image_view = NULL;
1979    struct anv_sampler *sampler = NULL;
1980 
1981    /* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor
1982     * set initialization to set the bindless samplers.
1983     */
1984    assert(type == bind_layout->type ||
1985           type == VK_DESCRIPTOR_TYPE_SAMPLER ||
1986           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
1987 
1988    switch (type) {
1989    case VK_DESCRIPTOR_TYPE_SAMPLER:
1990       sampler = bind_layout->immutable_samplers ?
1991                 bind_layout->immutable_samplers[element] :
1992                 anv_sampler_from_handle(info->sampler);
1993       break;
1994 
1995    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1996       image_view = anv_image_view_from_handle(info->imageView);
1997       sampler = bind_layout->immutable_samplers ?
1998                 bind_layout->immutable_samplers[element] :
1999                 anv_sampler_from_handle(info->sampler);
2000       break;
2001 
2002    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2003    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2004    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2005       image_view = anv_image_view_from_handle(info->imageView);
2006       break;
2007 
2008    default:
2009       unreachable("invalid descriptor type");
2010    }
2011 
2012    *desc = (struct anv_descriptor) {
2013       .type = type,
2014       .layout = info->imageLayout,
2015       .image_view = image_view,
2016       .sampler = sampler,
2017    };
2018 
2019    void *desc_surface_map = set->desc_surface_mem.map +
2020       bind_layout->descriptor_surface_offset +
2021       element * bind_layout->descriptor_surface_stride;
2022    void *desc_sampler_map = set->desc_sampler_mem.map +
2023       bind_layout->descriptor_sampler_offset +
2024       element * bind_layout->descriptor_sampler_stride;
2025 
2026    enum anv_descriptor_data data =
2027       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2028       anv_descriptor_data_for_type(device->physical, set->layout->type, type) :
2029       bind_layout->data;
2030 
2031    if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE) {
2032       struct anv_sampled_image_descriptor desc_data[3];
2033       memset(desc_data, 0, sizeof(desc_data));
2034 
2035       if (image_view) {
2036          for (unsigned p = 0; p < image_view->n_planes; p++) {
2037             const struct anv_surface_state *sstate =
2038                anv_image_view_texture_surface_state(image_view, p,
2039                                                     desc->layout);
2040             desc_data[p].image =
2041                anv_surface_state_to_handle(device->physical, sstate->state);
2042          }
2043       }
2044 
2045       if (sampler) {
2046          for (unsigned p = 0; p < sampler->n_planes; p++)
2047             desc_data[p].sampler = sampler->bindless_state.offset + p * 32;
2048       }
2049 
2050       /* We may have max_plane_count < 0 if this isn't a sampled image but it
2051        * can be no more than the size of our array of handles.
2052        */
2053       assert(bind_layout->max_plane_count <= ARRAY_SIZE(desc_data));
2054       memcpy(desc_surface_map, desc_data,
2055              MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
2056    }
2057 
2058    if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE) {
2059       if (image_view) {
2060          assert(image_view->n_planes == 1);
2061          struct anv_storage_image_descriptor desc_data = {
2062             .vanilla = anv_surface_state_to_handle(
2063                device->physical,
2064                anv_image_view_storage_surface_state(image_view)->state),
2065             .image_depth = image_view->vk.storage.z_slice_count,
2066          };
2067          memcpy(desc_surface_map, &desc_data, sizeof(desc_data));
2068       } else {
2069          memset(desc_surface_map, 0, bind_layout->descriptor_surface_stride);
2070       }
2071    }
2072 
2073    if (data & ANV_DESCRIPTOR_SAMPLER) {
2074       if (sampler) {
2075          for (unsigned p = 0; p < sampler->n_planes; p++) {
2076             memcpy(desc_sampler_map + p * ANV_SAMPLER_STATE_SIZE,
2077                    sampler->state[p], ANV_SAMPLER_STATE_SIZE);
2078          }
2079       } else {
2080          memset(desc_sampler_map, 0, bind_layout->descriptor_sampler_stride);
2081       }
2082    }
2083 
2084    if (data & ANV_DESCRIPTOR_SURFACE) {
2085       unsigned max_plane_count = image_view ? image_view->n_planes : 1;
2086 
2087       for (unsigned p = 0; p < max_plane_count; p++) {
2088          void *plane_map = desc_surface_map + p * ANV_SURFACE_STATE_SIZE;
2089 
2090          if (image_view) {
2091             memcpy(plane_map,
2092                    anv_image_view_surface_data_for_plane_layout(image_view, type,
2093                                                                 p, desc->layout),
2094                    ANV_SURFACE_STATE_SIZE);
2095          } else {
2096             memcpy(plane_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2097          }
2098       }
2099    }
2100 
2101    if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
2102       unsigned max_plane_count =
2103          MAX2(image_view ? image_view->n_planes : 1,
2104               sampler ? sampler->n_planes : 1);
2105 
2106       for (unsigned p = 0; p < max_plane_count; p++) {
2107          void *plane_map = desc_surface_map + p * 2 * ANV_SURFACE_STATE_SIZE;
2108 
2109          if (image_view) {
2110             memcpy(plane_map,
2111                    anv_image_view_surface_data_for_plane_layout(image_view, type,
2112                                                                 p, desc->layout),
2113                    ANV_SURFACE_STATE_SIZE);
2114          } else {
2115             memcpy(plane_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2116          }
2117 
2118          if (sampler) {
2119             memcpy(plane_map + ANV_SURFACE_STATE_SIZE,
2120                    sampler->state[p], ANV_SAMPLER_STATE_SIZE);
2121          } else {
2122             memset(plane_map + ANV_SURFACE_STATE_SIZE, 0,
2123                    ANV_SAMPLER_STATE_SIZE);
2124          }
2125       }
2126    }
2127 }
2128 
2129 static const void *
anv_buffer_view_surface_data(struct anv_buffer_view * buffer_view,VkDescriptorType desc_type)2130 anv_buffer_view_surface_data(struct anv_buffer_view *buffer_view,
2131                              VkDescriptorType desc_type)
2132 {
2133    if (desc_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER)
2134       return &buffer_view->general.state_data;
2135 
2136    if (desc_type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
2137       return &buffer_view->storage.state_data;
2138 
2139    unreachable("Invalid descriptor type");
2140 }
2141 
2142 void
anv_descriptor_set_write_buffer_view(struct anv_device * device,struct anv_descriptor_set * set,VkDescriptorType type,struct anv_buffer_view * buffer_view,uint32_t binding,uint32_t element)2143 anv_descriptor_set_write_buffer_view(struct anv_device *device,
2144                                      struct anv_descriptor_set *set,
2145                                      VkDescriptorType type,
2146                                      struct anv_buffer_view *buffer_view,
2147                                      uint32_t binding,
2148                                      uint32_t element)
2149 {
2150    const struct anv_descriptor_set_binding_layout *bind_layout =
2151       &set->layout->binding[binding];
2152    struct anv_descriptor *desc =
2153       &set->descriptors[bind_layout->descriptor_index + element];
2154 
2155    assert(type == bind_layout->type ||
2156           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
2157 
2158    *desc = (struct anv_descriptor) {
2159       .type = type,
2160       .buffer_view = buffer_view,
2161    };
2162 
2163    enum anv_descriptor_data data =
2164       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2165       anv_descriptor_data_for_type(device->physical, set->layout->type, type) :
2166       bind_layout->data;
2167 
2168    void *desc_map = set->desc_surface_mem.map +
2169                     bind_layout->descriptor_surface_offset +
2170                     element * bind_layout->descriptor_surface_stride;
2171 
2172    if (buffer_view == NULL) {
2173       if (data & ANV_DESCRIPTOR_SURFACE)
2174          memcpy(desc_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2175       else
2176          memset(desc_map, 0, bind_layout->descriptor_surface_stride);
2177       return;
2178    }
2179 
2180    if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE) {
2181       struct anv_sampled_image_descriptor desc_data = {
2182          .image = anv_surface_state_to_handle(
2183             device->physical, buffer_view->general.state),
2184       };
2185       memcpy(desc_map, &desc_data, sizeof(desc_data));
2186    }
2187 
2188    if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE) {
2189       struct anv_storage_image_descriptor desc_data = {
2190          .vanilla = anv_surface_state_to_handle(
2191             device->physical, buffer_view->storage.state),
2192       };
2193       memcpy(desc_map, &desc_data, sizeof(desc_data));
2194    }
2195 
2196    if (data & ANV_DESCRIPTOR_SURFACE) {
2197       memcpy(desc_map,
2198              anv_buffer_view_surface_data(buffer_view, type),
2199              ANV_SURFACE_STATE_SIZE);
2200    }
2201 }
2202 
2203 void
anv_descriptor_write_surface_state(struct anv_device * device,struct anv_descriptor * desc,struct anv_state surface_state)2204 anv_descriptor_write_surface_state(struct anv_device *device,
2205                                    struct anv_descriptor *desc,
2206                                    struct anv_state surface_state)
2207 {
2208    assert(surface_state.alloc_size);
2209 
2210    struct anv_buffer_view *bview = desc->buffer_view;
2211 
2212    bview->general.state = surface_state;
2213 
2214    isl_surf_usage_flags_t usage =
2215       anv_isl_usage_for_descriptor_type(desc->type);
2216 
2217    enum isl_format format =
2218       anv_isl_format_for_descriptor_type(device, desc->type);
2219    anv_fill_buffer_surface_state(device, bview->general.state.map,
2220                                  format, ISL_SWIZZLE_IDENTITY,
2221                                  usage, bview->address, bview->vk.range, 1);
2222 }
2223 
2224 void
anv_descriptor_set_write_buffer(struct anv_device * device,struct anv_descriptor_set * set,VkDescriptorType type,struct anv_buffer * buffer,uint32_t binding,uint32_t element,VkDeviceSize offset,VkDeviceSize range)2225 anv_descriptor_set_write_buffer(struct anv_device *device,
2226                                 struct anv_descriptor_set *set,
2227                                 VkDescriptorType type,
2228                                 struct anv_buffer *buffer,
2229                                 uint32_t binding,
2230                                 uint32_t element,
2231                                 VkDeviceSize offset,
2232                                 VkDeviceSize range)
2233 {
2234    const struct anv_descriptor_set_binding_layout *bind_layout =
2235       &set->layout->binding[binding];
2236    const uint32_t descriptor_index = bind_layout->descriptor_index + element;
2237    struct anv_descriptor *desc = &set->descriptors[descriptor_index];
2238 
2239    assert(type == bind_layout->type ||
2240           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
2241 
2242    *desc = (struct anv_descriptor) {
2243       .type = type,
2244       .offset = offset,
2245       .range = range,
2246       .buffer = buffer,
2247    };
2248 
2249    enum anv_descriptor_data data =
2250       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2251       anv_descriptor_data_for_type(device->physical, set->layout->type, type) :
2252       bind_layout->data;
2253 
2254    void *desc_map = set->desc_surface_mem.map +
2255                     bind_layout->descriptor_surface_offset +
2256                     element * bind_layout->descriptor_surface_stride;
2257 
2258    if (buffer == NULL) {
2259       if (data & ANV_DESCRIPTOR_SURFACE)
2260          memcpy(desc_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2261       else
2262          memset(desc_map, 0, bind_layout->descriptor_surface_stride);
2263       return;
2264    }
2265 
2266    struct anv_address bind_addr = anv_address_add(buffer->address, offset);
2267    desc->bind_range = vk_buffer_range(&buffer->vk, offset, range);
2268 
2269    /* We report a bounds checking alignment of 32B for the sake of block
2270     * messages which read an entire register worth at a time.
2271     */
2272    if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
2273        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
2274       desc->bind_range = align64(desc->bind_range, ANV_UBO_ALIGNMENT);
2275 
2276    if (data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE) {
2277       struct anv_address_range_descriptor desc_data = {
2278          .address = anv_address_physical(bind_addr),
2279          .range = desc->bind_range,
2280       };
2281       memcpy(desc_map, &desc_data, sizeof(desc_data));
2282    }
2283 
2284    if (data & ANV_DESCRIPTOR_SURFACE) {
2285       isl_surf_usage_flags_t usage =
2286          anv_isl_usage_for_descriptor_type(desc->type);
2287 
2288       enum isl_format format =
2289          anv_isl_format_for_descriptor_type(device, desc->type);
2290 
2291       isl_buffer_fill_state(&device->isl_dev, desc_map,
2292                             .address = anv_address_physical(bind_addr),
2293                             .mocs = isl_mocs(&device->isl_dev, usage,
2294                                              bind_addr.bo && anv_bo_is_external(bind_addr.bo)),
2295                             .size_B = desc->bind_range,
2296                             .format = format,
2297                             .swizzle = ISL_SWIZZLE_IDENTITY,
2298                             .stride_B = 1);
2299    }
2300 
2301    if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
2302        type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)
2303       return;
2304 
2305    if (data & ANV_DESCRIPTOR_BUFFER_VIEW) {
2306       struct anv_buffer_view *bview =
2307          &set->buffer_views[bind_layout->buffer_view_index + element];
2308 
2309       desc->set_buffer_view = bview;
2310 
2311       bview->vk.range = desc->bind_range;
2312       bview->address = bind_addr;
2313 
2314       if (set->is_push) {
2315          set->generate_surface_states |= BITFIELD_BIT(descriptor_index);
2316          /* Reset the surface state to make sure
2317           * genX(cmd_buffer_emit_push_descriptor_surfaces) generates a new
2318           * one.
2319           */
2320          bview->general.state = ANV_STATE_NULL;
2321       } else {
2322          anv_descriptor_write_surface_state(device, desc, bview->general.state);
2323       }
2324    }
2325 }
2326 
2327 void
anv_descriptor_set_write_inline_uniform_data(struct anv_device * device,struct anv_descriptor_set * set,uint32_t binding,const void * data,size_t offset,size_t size)2328 anv_descriptor_set_write_inline_uniform_data(struct anv_device *device,
2329                                              struct anv_descriptor_set *set,
2330                                              uint32_t binding,
2331                                              const void *data,
2332                                              size_t offset,
2333                                              size_t size)
2334 {
2335    const struct anv_descriptor_set_binding_layout *bind_layout =
2336       &set->layout->binding[binding];
2337 
2338    assert(bind_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM);
2339 
2340    void *desc_map = set->desc_surface_mem.map +
2341                     bind_layout->descriptor_surface_offset;
2342 
2343    memcpy(desc_map + offset, data, size);
2344 }
2345 
2346 void
anv_descriptor_set_write_acceleration_structure(struct anv_device * device,struct anv_descriptor_set * set,struct vk_acceleration_structure * accel,uint32_t binding,uint32_t element)2347 anv_descriptor_set_write_acceleration_structure(struct anv_device *device,
2348                                                 struct anv_descriptor_set *set,
2349                                                 struct vk_acceleration_structure *accel,
2350                                                 uint32_t binding,
2351                                                 uint32_t element)
2352 {
2353    const struct anv_descriptor_set_binding_layout *bind_layout =
2354       &set->layout->binding[binding];
2355    struct anv_descriptor *desc =
2356       &set->descriptors[bind_layout->descriptor_index + element];
2357 
2358    assert(bind_layout->data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE);
2359    *desc = (struct anv_descriptor) {
2360       .type = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
2361       .accel_struct = accel,
2362    };
2363 
2364    struct anv_address_range_descriptor desc_data = { };
2365    if (accel != NULL) {
2366       desc_data.address = vk_acceleration_structure_get_va(accel);
2367       desc_data.range = accel->size;
2368    }
2369    assert(sizeof(desc_data) <= bind_layout->descriptor_surface_stride);
2370 
2371    void *desc_map = set->desc_surface_mem.map +
2372                     bind_layout->descriptor_surface_offset +
2373                     element * bind_layout->descriptor_surface_stride;
2374    memcpy(desc_map, &desc_data, sizeof(desc_data));
2375 }
2376 
2377 void
anv_descriptor_set_write(struct anv_device * device,struct anv_descriptor_set * set_override,uint32_t write_count,const VkWriteDescriptorSet * writes)2378 anv_descriptor_set_write(struct anv_device *device,
2379                          struct anv_descriptor_set *set_override,
2380                          uint32_t write_count,
2381                          const VkWriteDescriptorSet *writes)
2382 {
2383    for (uint32_t i = 0; i < write_count; i++) {
2384       const VkWriteDescriptorSet *write = &writes[i];
2385       struct anv_descriptor_set *set = unlikely(set_override) ?
2386          set_override :
2387          anv_descriptor_set_from_handle(write->dstSet);
2388 
2389       switch (write->descriptorType) {
2390       case VK_DESCRIPTOR_TYPE_SAMPLER:
2391       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2392       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2393       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2394       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2395          for (uint32_t j = 0; j < write->descriptorCount; j++) {
2396             anv_descriptor_set_write_image_view(device, set,
2397                                                 write->pImageInfo + j,
2398                                                 write->descriptorType,
2399                                                 write->dstBinding,
2400                                                 write->dstArrayElement + j);
2401          }
2402          break;
2403 
2404       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2405       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2406          for (uint32_t j = 0; j < write->descriptorCount; j++) {
2407             ANV_FROM_HANDLE(anv_buffer_view, bview,
2408                             write->pTexelBufferView[j]);
2409 
2410             anv_descriptor_set_write_buffer_view(device, set,
2411                                                  write->descriptorType,
2412                                                  bview,
2413                                                  write->dstBinding,
2414                                                  write->dstArrayElement + j);
2415          }
2416          break;
2417 
2418       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2419       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2420       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2421       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2422          for (uint32_t j = 0; j < write->descriptorCount; j++) {
2423             ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
2424 
2425             anv_descriptor_set_write_buffer(device, set,
2426                                             write->descriptorType,
2427                                             buffer,
2428                                             write->dstBinding,
2429                                             write->dstArrayElement + j,
2430                                             write->pBufferInfo[j].offset,
2431                                             write->pBufferInfo[j].range);
2432          }
2433          break;
2434 
2435       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
2436          const VkWriteDescriptorSetInlineUniformBlock *inline_write =
2437             vk_find_struct_const(write->pNext,
2438                                  WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
2439          assert(inline_write->dataSize == write->descriptorCount);
2440          anv_descriptor_set_write_inline_uniform_data(device, set,
2441                                                       write->dstBinding,
2442                                                       inline_write->pData,
2443                                                       write->dstArrayElement,
2444                                                       inline_write->dataSize);
2445          break;
2446       }
2447 
2448       case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
2449          const VkWriteDescriptorSetAccelerationStructureKHR *accel_write =
2450             vk_find_struct_const(write, WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR);
2451          assert(accel_write->accelerationStructureCount ==
2452                 write->descriptorCount);
2453          for (uint32_t j = 0; j < write->descriptorCount; j++) {
2454             ANV_FROM_HANDLE(vk_acceleration_structure, accel,
2455                             accel_write->pAccelerationStructures[j]);
2456             anv_descriptor_set_write_acceleration_structure(device, set, accel,
2457                                                             write->dstBinding,
2458                                                             write->dstArrayElement + j);
2459          }
2460          break;
2461       }
2462 
2463       default:
2464          break;
2465       }
2466    }
2467 }
2468 
anv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2469 void anv_UpdateDescriptorSets(
2470     VkDevice                                    _device,
2471     uint32_t                                    descriptorWriteCount,
2472     const VkWriteDescriptorSet*                 pDescriptorWrites,
2473     uint32_t                                    descriptorCopyCount,
2474     const VkCopyDescriptorSet*                  pDescriptorCopies)
2475 {
2476    ANV_FROM_HANDLE(anv_device, device, _device);
2477 
2478    anv_descriptor_set_write(device, NULL, descriptorWriteCount,
2479                             pDescriptorWrites);
2480 
2481    for (uint32_t i = 0; i < descriptorCopyCount; i++) {
2482       const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
2483       ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
2484       ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
2485 
2486       const struct anv_descriptor_set_binding_layout *src_layout =
2487          &src->layout->binding[copy->srcBinding];
2488       const struct anv_descriptor_set_binding_layout *dst_layout =
2489          &dst->layout->binding[copy->dstBinding];
2490 
2491       if (src_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
2492          anv_descriptor_set_write_inline_uniform_data(device, dst,
2493                                                       copy->dstBinding,
2494                                                       src->desc_surface_mem.map +
2495                                                       src_layout->descriptor_surface_offset + copy->srcArrayElement,
2496                                                       copy->dstArrayElement,
2497                                                       copy->descriptorCount);
2498          continue;
2499       }
2500 
2501       uint32_t copy_surface_element_size =
2502          MIN2(src_layout->descriptor_surface_stride,
2503               dst_layout->descriptor_surface_stride);
2504       uint32_t copy_sampler_element_size =
2505          MIN2(src_layout->descriptor_sampler_stride,
2506               dst_layout->descriptor_sampler_stride);
2507       for (uint32_t j = 0; j < copy->descriptorCount; j++) {
2508          struct anv_descriptor *src_desc =
2509             &src->descriptors[src_layout->descriptor_index +
2510                               copy->srcArrayElement + j];
2511          struct anv_descriptor *dst_desc =
2512             &dst->descriptors[dst_layout->descriptor_index +
2513                               copy->dstArrayElement + j];
2514 
2515          /* Copy the memory containing one of the following structure read by
2516           * the shaders :
2517           *    - anv_sampled_image_descriptor
2518           *    - anv_storage_image_descriptor
2519           *    - anv_address_range_descriptor
2520           *    - RENDER_SURFACE_STATE
2521           *    - SAMPLER_STATE
2522           */
2523          memcpy(dst->desc_surface_mem.map +
2524                 dst_layout->descriptor_surface_offset +
2525                 (copy->dstArrayElement + j) * dst_layout->descriptor_surface_stride,
2526                 src->desc_surface_mem.map +
2527                 src_layout->descriptor_surface_offset +
2528                 (copy->srcArrayElement + j) * src_layout->descriptor_surface_stride,
2529                 copy_surface_element_size);
2530          memcpy(dst->desc_sampler_mem.map +
2531                 dst_layout->descriptor_sampler_offset +
2532                 (copy->dstArrayElement + j) * dst_layout->descriptor_sampler_stride,
2533                 src->desc_sampler_mem.map +
2534                 src_layout->descriptor_sampler_offset +
2535                 (copy->srcArrayElement + j) * src_layout->descriptor_sampler_stride,
2536                 copy_sampler_element_size);
2537 
2538          /* Copy the CPU side data anv_descriptor */
2539          *dst_desc = *src_desc;
2540 
2541          /* If the CPU side may contain a buffer view, we need to copy that as
2542           * well
2543           */
2544          const enum anv_descriptor_data data =
2545             src_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2546             anv_descriptor_data_for_type(device->physical,
2547                                          src->layout->type,
2548                                          src_desc->type) :
2549             src_layout->data;
2550          if (data & ANV_DESCRIPTOR_BUFFER_VIEW) {
2551             struct anv_buffer_view *src_bview =
2552                &src->buffer_views[src_layout->buffer_view_index +
2553                                   copy->srcArrayElement + j];
2554             struct anv_buffer_view *dst_bview =
2555                &dst->buffer_views[dst_layout->buffer_view_index +
2556                                   copy->dstArrayElement + j];
2557 
2558             dst_desc->set_buffer_view = dst_bview;
2559 
2560             dst_bview->vk.range = src_bview->vk.range;
2561             dst_bview->address = src_bview->address;
2562 
2563             memcpy(dst_bview->general.state.map,
2564                    src_bview->general.state.map,
2565                    ANV_SURFACE_STATE_SIZE);
2566          }
2567       }
2568    }
2569 }
2570 
2571 /*
2572  * Descriptor update templates.
2573  */
2574 
2575 void
anv_descriptor_set_write_template(struct anv_device * device,struct anv_descriptor_set * set,const struct vk_descriptor_update_template * template,const void * data)2576 anv_descriptor_set_write_template(struct anv_device *device,
2577                                   struct anv_descriptor_set *set,
2578                                   const struct vk_descriptor_update_template *template,
2579                                   const void *data)
2580 {
2581    for (uint32_t i = 0; i < template->entry_count; i++) {
2582       const struct vk_descriptor_template_entry *entry =
2583          &template->entries[i];
2584 
2585       switch (entry->type) {
2586       case VK_DESCRIPTOR_TYPE_SAMPLER:
2587       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2588       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2589       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2590       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2591          for (uint32_t j = 0; j < entry->array_count; j++) {
2592             const VkDescriptorImageInfo *info =
2593                data + entry->offset + j * entry->stride;
2594             anv_descriptor_set_write_image_view(device, set,
2595                                                 info, entry->type,
2596                                                 entry->binding,
2597                                                 entry->array_element + j);
2598          }
2599          break;
2600 
2601       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2602       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2603          for (uint32_t j = 0; j < entry->array_count; j++) {
2604             const VkBufferView *_bview =
2605                data + entry->offset + j * entry->stride;
2606             ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
2607 
2608             anv_descriptor_set_write_buffer_view(device, set,
2609                                                  entry->type,
2610                                                  bview,
2611                                                  entry->binding,
2612                                                  entry->array_element + j);
2613          }
2614          break;
2615 
2616       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2617       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2618       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2619       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2620          for (uint32_t j = 0; j < entry->array_count; j++) {
2621             const VkDescriptorBufferInfo *info =
2622                data + entry->offset + j * entry->stride;
2623             ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
2624 
2625             anv_descriptor_set_write_buffer(device, set,
2626                                             entry->type,
2627                                             buffer,
2628                                             entry->binding,
2629                                             entry->array_element + j,
2630                                             info->offset, info->range);
2631          }
2632          break;
2633 
2634       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
2635          anv_descriptor_set_write_inline_uniform_data(device, set,
2636                                                       entry->binding,
2637                                                       data + entry->offset,
2638                                                       entry->array_element,
2639                                                       entry->array_count);
2640          break;
2641 
2642       case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2643          for (uint32_t j = 0; j < entry->array_count; j++) {
2644             VkAccelerationStructureKHR *accel_obj =
2645                (VkAccelerationStructureKHR *)(data + entry->offset + j * entry->stride);
2646             ANV_FROM_HANDLE(vk_acceleration_structure, accel, *accel_obj);
2647 
2648             anv_descriptor_set_write_acceleration_structure(device, set,
2649                                                             accel,
2650                                                             entry->binding,
2651                                                             entry->array_element + j);
2652          }
2653          break;
2654 
2655       default:
2656          break;
2657       }
2658    }
2659 }
2660 
anv_UpdateDescriptorSetWithTemplate(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)2661 void anv_UpdateDescriptorSetWithTemplate(
2662     VkDevice                                    _device,
2663     VkDescriptorSet                             descriptorSet,
2664     VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
2665     const void*                                 pData)
2666 {
2667    ANV_FROM_HANDLE(anv_device, device, _device);
2668    ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
2669    VK_FROM_HANDLE(vk_descriptor_update_template, template,
2670                   descriptorUpdateTemplate);
2671 
2672    anv_descriptor_set_write_template(device, set, template, pData);
2673 }
2674