• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29 
30 #include "util/mesa-sha1.h"
31 #include "vk_util.h"
32 
33 #include "anv_private.h"
34 
35 /*
36  * Descriptor set layouts.
37  */
38 
39 static enum anv_descriptor_data
anv_descriptor_data_for_type(const struct anv_physical_device * device,VkDescriptorType type)40 anv_descriptor_data_for_type(const struct anv_physical_device *device,
41                              VkDescriptorType type)
42 {
43    enum anv_descriptor_data data = 0;
44 
45    switch (type) {
46    case VK_DESCRIPTOR_TYPE_SAMPLER:
47       data = ANV_DESCRIPTOR_SAMPLER_STATE;
48       if (device->has_bindless_samplers)
49          data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
50       break;
51 
52    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
53       data = ANV_DESCRIPTOR_SURFACE_STATE |
54              ANV_DESCRIPTOR_SAMPLER_STATE;
55       if (device->has_bindless_samplers)
56          data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
57       break;
58 
59    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
60    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
61       data = ANV_DESCRIPTOR_SURFACE_STATE;
62       break;
63 
64    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
65       data = ANV_DESCRIPTOR_SURFACE_STATE;
66       break;
67 
68    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
69    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
70       data = ANV_DESCRIPTOR_SURFACE_STATE;
71       data |= ANV_DESCRIPTOR_IMAGE_PARAM;
72       break;
73 
74    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
75    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
76       data = ANV_DESCRIPTOR_SURFACE_STATE |
77              ANV_DESCRIPTOR_BUFFER_VIEW;
78       break;
79 
80    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
81    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
82       data = ANV_DESCRIPTOR_SURFACE_STATE;
83       break;
84 
85    case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
86       data = ANV_DESCRIPTOR_INLINE_UNIFORM;
87       break;
88 
89    default:
90       unreachable("Unsupported descriptor type");
91    }
92 
93    /* On gfx8 and above when we have softpin enabled, we also need to push
94     * SSBO address ranges so that we can use A64 messages in the shader.
95     */
96    if (device->has_a64_buffer_access &&
97        (type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
98         type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
99         type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
100         type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC))
101       data |= ANV_DESCRIPTOR_ADDRESS_RANGE;
102 
103    /* On Ivy Bridge and Bay Trail, we need swizzles textures in the shader
104     * Do not handle VK_DESCRIPTOR_TYPE_STORAGE_IMAGE and
105     * VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT because they already must
106     * have identity swizzle.
107     *
108     * TODO: We need to handle swizzle on buffer views too for those same
109     *       platforms.
110     */
111    if (device->info.verx10 == 70 &&
112        (type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
113         type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
114       data |= ANV_DESCRIPTOR_TEXTURE_SWIZZLE;
115 
116    return data;
117 }
118 
119 static enum anv_descriptor_data
anv_descriptor_data_for_mutable_type(const struct anv_physical_device * device,const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,int binding)120 anv_descriptor_data_for_mutable_type(const struct anv_physical_device *device,
121                                      const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
122                                      int binding)
123 {
124    enum anv_descriptor_data desc_data = 0;
125 
126    if (!mutable_info || mutable_info->mutableDescriptorTypeListCount <= binding) {
127       for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
128          if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
129              i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
130              i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
131             continue;
132 
133          desc_data |= anv_descriptor_data_for_type(device, i);
134       }
135 
136       return desc_data;
137    }
138 
139    const VkMutableDescriptorTypeListEXT *type_list =
140       &mutable_info->pMutableDescriptorTypeLists[binding];
141    for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
142       desc_data |=
143          anv_descriptor_data_for_type(device, type_list->pDescriptorTypes[i]);
144    }
145 
146    return desc_data;
147 }
148 
149 static unsigned
anv_descriptor_data_size(enum anv_descriptor_data data)150 anv_descriptor_data_size(enum anv_descriptor_data data)
151 {
152    unsigned size = 0;
153 
154    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE)
155       size += sizeof(struct anv_sampled_image_descriptor);
156 
157    if (data & ANV_DESCRIPTOR_STORAGE_IMAGE)
158       size += sizeof(struct anv_storage_image_descriptor);
159 
160    if (data & ANV_DESCRIPTOR_IMAGE_PARAM)
161       size += ISL_IMAGE_PARAM_SIZE * 4;
162 
163    if (data & ANV_DESCRIPTOR_ADDRESS_RANGE)
164       size += sizeof(struct anv_address_range_descriptor);
165 
166    if (data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE)
167       size += sizeof(struct anv_texture_swizzle_descriptor);
168 
169    return size;
170 }
171 
172 static bool
anv_needs_descriptor_buffer(VkDescriptorType desc_type,enum anv_descriptor_data desc_data)173 anv_needs_descriptor_buffer(VkDescriptorType desc_type,
174                             enum anv_descriptor_data desc_data)
175 {
176    if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK ||
177        anv_descriptor_data_size(desc_data) > 0)
178       return true;
179    return false;
180 }
181 
182 /** Returns the size in bytes of each descriptor with the given layout */
183 static unsigned
anv_descriptor_size(const struct anv_descriptor_set_binding_layout * layout)184 anv_descriptor_size(const struct anv_descriptor_set_binding_layout *layout)
185 {
186    if (layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
187       assert(layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
188       return layout->array_size;
189    }
190 
191    unsigned size = anv_descriptor_data_size(layout->data);
192 
193    /* For multi-planar bindings, we make every descriptor consume the maximum
194     * number of planes so we don't have to bother with walking arrays and
195     * adding things up every time.  Fortunately, YCbCr samplers aren't all
196     * that common and likely won't be in the middle of big arrays.
197     */
198    if (layout->max_plane_count > 1)
199       size *= layout->max_plane_count;
200 
201    return size;
202 }
203 
204 /** Returns size in bytes of the biggest descriptor in the given layout */
205 static unsigned
anv_descriptor_size_for_mutable_type(const struct anv_physical_device * device,const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,int binding)206 anv_descriptor_size_for_mutable_type(const struct anv_physical_device *device,
207                                      const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
208                                      int binding)
209 {
210    unsigned size = 0;
211 
212    if (!mutable_info || mutable_info->mutableDescriptorTypeListCount <= binding) {
213       for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
214 
215          if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
216              i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
217              i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
218             continue;
219 
220          enum anv_descriptor_data desc_data =
221             anv_descriptor_data_for_type(device, i);
222          size = MAX2(size, anv_descriptor_data_size(desc_data));
223       }
224 
225       return size;
226    }
227 
228    const VkMutableDescriptorTypeListEXT *type_list =
229       &mutable_info->pMutableDescriptorTypeLists[binding];
230    for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
231       enum anv_descriptor_data desc_data =
232          anv_descriptor_data_for_type(device, type_list->pDescriptorTypes[i]);
233       size = MAX2(size, anv_descriptor_data_size(desc_data));
234    }
235 
236    return size;
237 }
238 
239 static bool
anv_descriptor_data_supports_bindless(const struct anv_physical_device * pdevice,enum anv_descriptor_data data,bool sampler)240 anv_descriptor_data_supports_bindless(const struct anv_physical_device *pdevice,
241                                       enum anv_descriptor_data data,
242                                       bool sampler)
243 {
244    if (data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
245       assert(pdevice->has_a64_buffer_access);
246       return true;
247    }
248 
249    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
250       assert(pdevice->has_bindless_samplers);
251       return sampler && pdevice->has_bindless_samplers;
252    }
253 
254    return false;
255 }
256 
257 bool
anv_descriptor_supports_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_binding_layout * binding,bool sampler)258 anv_descriptor_supports_bindless(const struct anv_physical_device *pdevice,
259                                  const struct anv_descriptor_set_binding_layout *binding,
260                                  bool sampler)
261 {
262    return anv_descriptor_data_supports_bindless(pdevice, binding->data,
263                                                 sampler);
264 }
265 
266 bool
anv_descriptor_requires_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_binding_layout * binding,bool sampler)267 anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,
268                                  const struct anv_descriptor_set_binding_layout *binding,
269                                  bool sampler)
270 {
271    if (pdevice->always_use_bindless)
272       return anv_descriptor_supports_bindless(pdevice, binding, sampler);
273 
274    static const VkDescriptorBindingFlagBits flags_requiring_bindless =
275       VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
276       VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
277       VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
278 
279    return (binding->flags & flags_requiring_bindless) != 0;
280 }
281 
anv_GetDescriptorSetLayoutSupport(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)282 void anv_GetDescriptorSetLayoutSupport(
283     VkDevice                                    _device,
284     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
285     VkDescriptorSetLayoutSupport*               pSupport)
286 {
287    ANV_FROM_HANDLE(anv_device, device, _device);
288    const struct anv_physical_device *pdevice = device->physical;
289 
290    uint32_t surface_count[MESA_VULKAN_SHADER_STAGES] = { 0, };
291    VkDescriptorType varying_desc_type = VK_DESCRIPTOR_TYPE_MAX_ENUM;
292    bool needs_descriptor_buffer = false;
293 
294    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
295       vk_find_struct_const(pCreateInfo->pNext,
296                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
297    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
298       vk_find_struct_const(pCreateInfo->pNext,
299                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
300 
301    for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
302       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
303 
304       VkDescriptorBindingFlags flags = 0;
305       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
306          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
307          flags = binding_flags_info->pBindingFlags[b];
308       }
309 
310       enum anv_descriptor_data desc_data =
311          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
312          anv_descriptor_data_for_mutable_type(pdevice, mutable_info, b) :
313          anv_descriptor_data_for_type(pdevice, binding->descriptorType);
314 
315       if (anv_needs_descriptor_buffer(binding->descriptorType, desc_data))
316          needs_descriptor_buffer = true;
317 
318       if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
319          varying_desc_type = binding->descriptorType;
320 
321       switch (binding->descriptorType) {
322       case VK_DESCRIPTOR_TYPE_SAMPLER:
323          /* There is no real limit on samplers */
324          break;
325 
326       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
327          /* Inline uniforms don't use a binding */
328          break;
329 
330       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
331          if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
332             break;
333 
334          if (binding->pImmutableSamplers) {
335             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
336                ANV_FROM_HANDLE(anv_sampler, sampler,
337                                binding->pImmutableSamplers[i]);
338                anv_foreach_stage(s, binding->stageFlags)
339                   surface_count[s] += sampler->n_planes;
340             }
341          } else {
342             anv_foreach_stage(s, binding->stageFlags)
343                surface_count[s] += binding->descriptorCount;
344          }
345          break;
346 
347       default:
348          if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
349             break;
350 
351          anv_foreach_stage(s, binding->stageFlags)
352             surface_count[s] += binding->descriptorCount;
353          break;
354       }
355    }
356 
357    for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
358       if (needs_descriptor_buffer)
359          surface_count[s] += 1;
360    }
361 
362    VkDescriptorSetVariableDescriptorCountLayoutSupport *vdcls =
363       vk_find_struct(pSupport->pNext,
364                      DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
365    if (vdcls != NULL) {
366       if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
367          vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE;
368       } else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) {
369          vdcls->maxVariableDescriptorCount = UINT16_MAX;
370       } else {
371          vdcls->maxVariableDescriptorCount = 0;
372       }
373    }
374 
375    bool supported = true;
376    for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
377       /* Our maximum binding table size is 240 and we need to reserve 8 for
378        * render targets.
379        */
380       if (surface_count[s] > MAX_BINDING_TABLE_SIZE - MAX_RTS)
381          supported = false;
382    }
383 
384    pSupport->supported = supported;
385 }
386 
anv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)387 VkResult anv_CreateDescriptorSetLayout(
388     VkDevice                                    _device,
389     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
390     const VkAllocationCallbacks*                pAllocator,
391     VkDescriptorSetLayout*                      pSetLayout)
392 {
393    ANV_FROM_HANDLE(anv_device, device, _device);
394 
395    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
396 
397    uint32_t num_bindings = 0;
398    uint32_t immutable_sampler_count = 0;
399    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
400       num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
401 
402       /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
403        *
404        *    "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
405        *    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
406        *    pImmutableSamplers can be used to initialize a set of immutable
407        *    samplers. [...]  If descriptorType is not one of these descriptor
408        *    types, then pImmutableSamplers is ignored.
409        *
410        * We need to be careful here and only parse pImmutableSamplers if we
411        * have one of the right descriptor types.
412        */
413       VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
414       if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
415            desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
416           pCreateInfo->pBindings[j].pImmutableSamplers)
417          immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
418    }
419 
420    /* We need to allocate descriptor set layouts off the device allocator
421     * with DEVICE scope because they are reference counted and may not be
422     * destroyed when vkDestroyDescriptorSetLayout is called.
423     */
424    VK_MULTIALLOC(ma);
425    VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_layout, set_layout, 1);
426    VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_binding_layout,
427                            bindings, num_bindings);
428    VK_MULTIALLOC_DECL(&ma, struct anv_sampler *, samplers,
429                            immutable_sampler_count);
430 
431    if (!vk_object_multizalloc(&device->vk, &ma, NULL,
432                               VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT))
433       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
434 
435    set_layout->ref_cnt = 1;
436    set_layout->binding_count = num_bindings;
437 
438    for (uint32_t b = 0; b < num_bindings; b++) {
439       /* Initialize all binding_layout entries to -1 */
440       memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
441 
442       set_layout->binding[b].flags = 0;
443       set_layout->binding[b].data = 0;
444       set_layout->binding[b].max_plane_count = 0;
445       set_layout->binding[b].array_size = 0;
446       set_layout->binding[b].immutable_samplers = NULL;
447    }
448 
449    /* Initialize all samplers to 0 */
450    memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
451 
452    uint32_t buffer_view_count = 0;
453    uint32_t dynamic_offset_count = 0;
454    uint32_t descriptor_buffer_size = 0;
455 
456    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
457       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
458       uint32_t b = binding->binding;
459       /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
460        * immutable_samplers pointer.  This provides us with a quick-and-dirty
461        * way to sort the bindings by binding number.
462        */
463       set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
464    }
465 
466    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
467       vk_find_struct_const(pCreateInfo->pNext,
468                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
469 
470    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
471       vk_find_struct_const(pCreateInfo->pNext,
472                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
473 
474    for (uint32_t b = 0; b < num_bindings; b++) {
475       /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
476        * immutable_samplers pointer.  Check for NULL (empty binding) and then
477        * reset it and compute the index.
478        */
479       if (set_layout->binding[b].immutable_samplers == NULL)
480          continue;
481       const uint32_t info_idx =
482          (uintptr_t)(void *)set_layout->binding[b].immutable_samplers - 1;
483       set_layout->binding[b].immutable_samplers = NULL;
484 
485       const VkDescriptorSetLayoutBinding *binding =
486          &pCreateInfo->pBindings[info_idx];
487 
488       if (binding->descriptorCount == 0)
489          continue;
490 
491       set_layout->binding[b].type = binding->descriptorType;
492 
493       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
494          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
495          set_layout->binding[b].flags =
496             binding_flags_info->pBindingFlags[info_idx];
497 
498          /* From the Vulkan spec:
499           *
500           *    "If VkDescriptorSetLayoutCreateInfo::flags includes
501           *    VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then
502           *    all elements of pBindingFlags must not include
503           *    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
504           *    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or
505           *    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"
506           */
507          if (pCreateInfo->flags &
508              VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
509             assert(!(set_layout->binding[b].flags &
510                (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
511                 VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
512                 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)));
513          }
514       }
515 
516       set_layout->binding[b].data =
517          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
518          anv_descriptor_data_for_mutable_type(device->physical, mutable_info, b) :
519          anv_descriptor_data_for_type(device->physical, binding->descriptorType);
520 
521       set_layout->binding[b].array_size = binding->descriptorCount;
522       set_layout->binding[b].descriptor_index = set_layout->descriptor_count;
523       set_layout->descriptor_count += binding->descriptorCount;
524 
525       if (set_layout->binding[b].data & ANV_DESCRIPTOR_BUFFER_VIEW) {
526          set_layout->binding[b].buffer_view_index = buffer_view_count;
527          buffer_view_count += binding->descriptorCount;
528       }
529 
530       switch (binding->descriptorType) {
531       case VK_DESCRIPTOR_TYPE_SAMPLER:
532       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
533       case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
534          set_layout->binding[b].max_plane_count = 1;
535          if (binding->pImmutableSamplers) {
536             set_layout->binding[b].immutable_samplers = samplers;
537             samplers += binding->descriptorCount;
538 
539             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
540                ANV_FROM_HANDLE(anv_sampler, sampler,
541                                binding->pImmutableSamplers[i]);
542 
543                set_layout->binding[b].immutable_samplers[i] = sampler;
544                if (set_layout->binding[b].max_plane_count < sampler->n_planes)
545                   set_layout->binding[b].max_plane_count = sampler->n_planes;
546             }
547          }
548          break;
549 
550       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
551          set_layout->binding[b].max_plane_count = 1;
552          break;
553 
554       default:
555          break;
556       }
557 
558       switch (binding->descriptorType) {
559       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
560       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
561          set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
562          set_layout->dynamic_offset_stages[dynamic_offset_count] = binding->stageFlags;
563          dynamic_offset_count += binding->descriptorCount;
564          assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
565          break;
566 
567       default:
568          break;
569       }
570 
571       set_layout->binding[b].descriptor_stride =
572          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
573          anv_descriptor_size_for_mutable_type(device->physical, mutable_info, b) :
574          anv_descriptor_size(&set_layout->binding[b]);
575 
576       if (binding->descriptorType ==
577           VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
578          /* Inline uniform blocks are specified to use the descriptor array
579           * size as the size in bytes of the block.
580           */
581          descriptor_buffer_size = align(descriptor_buffer_size,
582                                         ANV_UBO_ALIGNMENT);
583          set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
584          descriptor_buffer_size += binding->descriptorCount;
585       } else {
586          set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
587          descriptor_buffer_size +=
588             set_layout->binding[b].descriptor_stride * binding->descriptorCount;
589       }
590 
591       set_layout->shader_stages |= binding->stageFlags;
592    }
593 
594    set_layout->buffer_view_count = buffer_view_count;
595    set_layout->dynamic_offset_count = dynamic_offset_count;
596    set_layout->descriptor_buffer_size = descriptor_buffer_size;
597 
598    *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
599 
600    return VK_SUCCESS;
601 }
602 
603 void
anv_descriptor_set_layout_destroy(struct anv_device * device,struct anv_descriptor_set_layout * layout)604 anv_descriptor_set_layout_destroy(struct anv_device *device,
605                                   struct anv_descriptor_set_layout *layout)
606 {
607    assert(layout->ref_cnt == 0);
608    vk_object_free(&device->vk, NULL, layout);
609 }
610 
611 static const struct anv_descriptor_set_binding_layout *
set_layout_dynamic_binding(const struct anv_descriptor_set_layout * set_layout)612 set_layout_dynamic_binding(const struct anv_descriptor_set_layout *set_layout)
613 {
614    if (set_layout->binding_count == 0)
615       return NULL;
616 
617    const struct anv_descriptor_set_binding_layout *last_binding =
618       &set_layout->binding[set_layout->binding_count - 1];
619    if (!(last_binding->flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))
620       return NULL;
621 
622    return last_binding;
623 }
624 
625 static uint32_t
set_layout_descriptor_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)626 set_layout_descriptor_count(const struct anv_descriptor_set_layout *set_layout,
627                             uint32_t var_desc_count)
628 {
629    const struct anv_descriptor_set_binding_layout *dynamic_binding =
630       set_layout_dynamic_binding(set_layout);
631    if (dynamic_binding == NULL)
632       return set_layout->descriptor_count;
633 
634    assert(var_desc_count <= dynamic_binding->array_size);
635    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
636 
637    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
638       return set_layout->descriptor_count;
639 
640    return set_layout->descriptor_count - shrink;
641 }
642 
643 static uint32_t
set_layout_buffer_view_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)644 set_layout_buffer_view_count(const struct anv_descriptor_set_layout *set_layout,
645                              uint32_t var_desc_count)
646 {
647    const struct anv_descriptor_set_binding_layout *dynamic_binding =
648       set_layout_dynamic_binding(set_layout);
649    if (dynamic_binding == NULL)
650       return set_layout->buffer_view_count;
651 
652    assert(var_desc_count <= dynamic_binding->array_size);
653    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
654 
655    if (!(dynamic_binding->data & ANV_DESCRIPTOR_BUFFER_VIEW))
656       return set_layout->buffer_view_count;
657 
658    return set_layout->buffer_view_count - shrink;
659 }
660 
661 uint32_t
anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)662 anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout *set_layout,
663                                                  uint32_t var_desc_count)
664 {
665    const struct anv_descriptor_set_binding_layout *dynamic_binding =
666       set_layout_dynamic_binding(set_layout);
667    if (dynamic_binding == NULL)
668       return ALIGN(set_layout->descriptor_buffer_size, ANV_UBO_ALIGNMENT);
669 
670    assert(var_desc_count <= dynamic_binding->array_size);
671    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
672    uint32_t set_size;
673 
674    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
675       /* Inline uniform blocks are specified to use the descriptor array
676        * size as the size in bytes of the block.
677        */
678       set_size = set_layout->descriptor_buffer_size - shrink;
679    } else {
680       set_size = set_layout->descriptor_buffer_size -
681                  shrink * dynamic_binding->descriptor_stride;
682    }
683 
684    return ALIGN(set_size, ANV_UBO_ALIGNMENT);
685 }
686 
anv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)687 void anv_DestroyDescriptorSetLayout(
688     VkDevice                                    _device,
689     VkDescriptorSetLayout                       _set_layout,
690     const VkAllocationCallbacks*                pAllocator)
691 {
692    ANV_FROM_HANDLE(anv_device, device, _device);
693    ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
694 
695    if (!set_layout)
696       return;
697 
698    anv_descriptor_set_layout_unref(device, set_layout);
699 }
700 
701 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
702 
703 static void
sha1_update_immutable_sampler(struct mesa_sha1 * ctx,const struct anv_sampler * sampler)704 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
705                               const struct anv_sampler *sampler)
706 {
707    if (!sampler->conversion)
708       return;
709 
710    /* The only thing that affects the shader is ycbcr conversion */
711    _mesa_sha1_update(ctx, sampler->conversion,
712                      sizeof(*sampler->conversion));
713 }
714 
715 static void
sha1_update_descriptor_set_binding_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_binding_layout * layout)716 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
717    const struct anv_descriptor_set_binding_layout *layout)
718 {
719    SHA1_UPDATE_VALUE(ctx, layout->flags);
720    SHA1_UPDATE_VALUE(ctx, layout->data);
721    SHA1_UPDATE_VALUE(ctx, layout->max_plane_count);
722    SHA1_UPDATE_VALUE(ctx, layout->array_size);
723    SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
724    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
725    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_index);
726    SHA1_UPDATE_VALUE(ctx, layout->descriptor_offset);
727 
728    if (layout->immutable_samplers) {
729       for (uint16_t i = 0; i < layout->array_size; i++)
730          sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);
731    }
732 }
733 
734 static void
sha1_update_descriptor_set_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_layout * layout)735 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
736                                   const struct anv_descriptor_set_layout *layout)
737 {
738    SHA1_UPDATE_VALUE(ctx, layout->binding_count);
739    SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);
740    SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
741    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_count);
742    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
743    SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_size);
744 
745    for (uint16_t i = 0; i < layout->binding_count; i++)
746       sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
747 }
748 
749 /*
750  * Pipeline layouts.  These have nothing to do with the pipeline.  They are
751  * just multiple descriptor set layouts pasted together
752  */
753 
anv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)754 VkResult anv_CreatePipelineLayout(
755     VkDevice                                    _device,
756     const VkPipelineLayoutCreateInfo*           pCreateInfo,
757     const VkAllocationCallbacks*                pAllocator,
758     VkPipelineLayout*                           pPipelineLayout)
759 {
760    ANV_FROM_HANDLE(anv_device, device, _device);
761    struct anv_pipeline_layout *layout;
762 
763    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
764 
765    layout = vk_object_alloc(&device->vk, pAllocator, sizeof(*layout),
766                             VK_OBJECT_TYPE_PIPELINE_LAYOUT);
767    if (layout == NULL)
768       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
769 
770    layout->num_sets = pCreateInfo->setLayoutCount;
771 
772    unsigned dynamic_offset_count = 0;
773 
774    for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
775       ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
776                       pCreateInfo->pSetLayouts[set]);
777       layout->set[set].layout = set_layout;
778       anv_descriptor_set_layout_ref(set_layout);
779 
780       layout->set[set].dynamic_offset_start = dynamic_offset_count;
781       dynamic_offset_count += set_layout->dynamic_offset_count;
782    }
783    assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
784 
785    struct mesa_sha1 ctx;
786    _mesa_sha1_init(&ctx);
787    for (unsigned s = 0; s < layout->num_sets; s++) {
788       sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
789       _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
790                         sizeof(layout->set[s].dynamic_offset_start));
791    }
792    _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
793    _mesa_sha1_final(&ctx, layout->sha1);
794 
795    *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
796 
797    return VK_SUCCESS;
798 }
799 
anv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)800 void anv_DestroyPipelineLayout(
801     VkDevice                                    _device,
802     VkPipelineLayout                            _pipelineLayout,
803     const VkAllocationCallbacks*                pAllocator)
804 {
805    ANV_FROM_HANDLE(anv_device, device, _device);
806    ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
807 
808    if (!pipeline_layout)
809       return;
810 
811    for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
812       anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
813 
814    vk_object_free(&device->vk, pAllocator, pipeline_layout);
815 }
816 
817 /*
818  * Descriptor pools.
819  *
820  * These are implemented using a big pool of memory and a free-list for the
821  * host memory allocations and a state_stream and a free list for the buffer
822  * view surface state. The spec allows us to fail to allocate due to
823  * fragmentation in all cases but two: 1) after pool reset, allocating up
824  * until the pool size with no freeing must succeed and 2) allocating and
825  * freeing only descriptor sets with the same layout. Case 1) is easy enough,
826  * and the free lists lets us recycle blocks for case 2).
827  */
828 
829 /* The vma heap reserves 0 to mean NULL; we have to offset by some amount to
830  * ensure we can allocate the entire BO without hitting zero.  The actual
831  * amount doesn't matter.
832  */
833 #define POOL_HEAP_OFFSET 64
834 
835 #define EMPTY 1
836 
anv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)837 VkResult anv_CreateDescriptorPool(
838     VkDevice                                    _device,
839     const VkDescriptorPoolCreateInfo*           pCreateInfo,
840     const VkAllocationCallbacks*                pAllocator,
841     VkDescriptorPool*                           pDescriptorPool)
842 {
843    ANV_FROM_HANDLE(anv_device, device, _device);
844    struct anv_descriptor_pool *pool;
845 
846    const VkDescriptorPoolInlineUniformBlockCreateInfo *inline_info =
847       vk_find_struct_const(pCreateInfo->pNext,
848                            DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO);
849    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
850       vk_find_struct_const(pCreateInfo->pNext,
851                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
852 
853    uint32_t descriptor_count = 0;
854    uint32_t buffer_view_count = 0;
855    uint32_t descriptor_bo_size = 0;
856 
857    for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
858       enum anv_descriptor_data desc_data =
859          pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
860          anv_descriptor_data_for_mutable_type(device->physical, mutable_info, i) :
861          anv_descriptor_data_for_type(device->physical, pCreateInfo->pPoolSizes[i].type);
862 
863       if (desc_data & ANV_DESCRIPTOR_BUFFER_VIEW)
864          buffer_view_count += pCreateInfo->pPoolSizes[i].descriptorCount;
865 
866       unsigned desc_data_size =
867          pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
868          anv_descriptor_size_for_mutable_type(device->physical, mutable_info, i) :
869          anv_descriptor_data_size(desc_data);
870 
871       desc_data_size *= pCreateInfo->pPoolSizes[i].descriptorCount;
872 
873       /* Combined image sampler descriptors can take up to 3 slots if they
874        * hold a YCbCr image.
875        */
876       if (pCreateInfo->pPoolSizes[i].type ==
877           VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
878          desc_data_size *= 3;
879 
880       if (pCreateInfo->pPoolSizes[i].type ==
881           VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
882          /* Inline uniform blocks are specified to use the descriptor array
883           * size as the size in bytes of the block.
884           */
885          assert(inline_info);
886          desc_data_size += pCreateInfo->pPoolSizes[i].descriptorCount;
887       }
888 
889       descriptor_bo_size += desc_data_size;
890 
891       descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
892    }
893    /* We have to align descriptor buffer allocations to 32B so that we can
894     * push descriptor buffers.  This means that each descriptor buffer
895     * allocated may burn up to 32B of extra space to get the right alignment.
896     * (Technically, it's at most 28B because we're always going to start at
897     * least 4B aligned but we're being conservative here.)  Allocate enough
898     * extra space that we can chop it into maxSets pieces and align each one
899     * of them to 32B.
900     */
901    descriptor_bo_size += ANV_UBO_ALIGNMENT * pCreateInfo->maxSets;
902    /* We align inline uniform blocks to ANV_UBO_ALIGNMENT */
903    if (inline_info) {
904       descriptor_bo_size +=
905          ANV_UBO_ALIGNMENT * inline_info->maxInlineUniformBlockBindings;
906    }
907    descriptor_bo_size = ALIGN(descriptor_bo_size, 4096);
908 
909    const size_t pool_size =
910       pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
911       descriptor_count * sizeof(struct anv_descriptor) +
912       buffer_view_count * sizeof(struct anv_buffer_view);
913    const size_t total_size = sizeof(*pool) + pool_size;
914 
915    pool = vk_object_alloc(&device->vk, pAllocator, total_size,
916                           VK_OBJECT_TYPE_DESCRIPTOR_POOL);
917    if (!pool)
918       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
919 
920    pool->size = pool_size;
921    pool->next = 0;
922    pool->free_list = EMPTY;
923    pool->host_only = pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT;
924 
925    if (descriptor_bo_size > 0) {
926       VkResult result = anv_device_alloc_bo(device,
927                                             "descriptors",
928                                             descriptor_bo_size,
929                                             ANV_BO_ALLOC_MAPPED |
930                                             ANV_BO_ALLOC_SNOOPED,
931                                             0 /* explicit_address */,
932                                             &pool->bo);
933       if (result != VK_SUCCESS) {
934          vk_object_free(&device->vk, pAllocator, pool);
935          return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
936       }
937 
938       util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, descriptor_bo_size);
939    } else {
940       pool->bo = NULL;
941    }
942 
943    anv_state_stream_init(&pool->surface_state_stream,
944                          &device->surface_state_pool, 4096);
945    pool->surface_state_free_list = NULL;
946 
947    list_inithead(&pool->desc_sets);
948 
949    *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
950 
951    return VK_SUCCESS;
952 }
953 
anv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)954 void anv_DestroyDescriptorPool(
955     VkDevice                                    _device,
956     VkDescriptorPool                            _pool,
957     const VkAllocationCallbacks*                pAllocator)
958 {
959    ANV_FROM_HANDLE(anv_device, device, _device);
960    ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
961 
962    if (!pool)
963       return;
964 
965    list_for_each_entry_safe(struct anv_descriptor_set, set,
966                             &pool->desc_sets, pool_link) {
967       anv_descriptor_set_layout_unref(device, set->layout);
968    }
969 
970    if (pool->bo) {
971       util_vma_heap_finish(&pool->bo_heap);
972       anv_device_release_bo(device, pool->bo);
973    }
974    anv_state_stream_finish(&pool->surface_state_stream);
975 
976    vk_object_free(&device->vk, pAllocator, pool);
977 }
978 
anv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)979 VkResult anv_ResetDescriptorPool(
980     VkDevice                                    _device,
981     VkDescriptorPool                            descriptorPool,
982     VkDescriptorPoolResetFlags                  flags)
983 {
984    ANV_FROM_HANDLE(anv_device, device, _device);
985    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
986 
987    list_for_each_entry_safe(struct anv_descriptor_set, set,
988                             &pool->desc_sets, pool_link) {
989       anv_descriptor_set_layout_unref(device, set->layout);
990    }
991    list_inithead(&pool->desc_sets);
992 
993    pool->next = 0;
994    pool->free_list = EMPTY;
995 
996    if (pool->bo) {
997       util_vma_heap_finish(&pool->bo_heap);
998       util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, pool->bo->size);
999    }
1000 
1001    anv_state_stream_finish(&pool->surface_state_stream);
1002    anv_state_stream_init(&pool->surface_state_stream,
1003                          &device->surface_state_pool, 4096);
1004    pool->surface_state_free_list = NULL;
1005 
1006    return VK_SUCCESS;
1007 }
1008 
1009 struct pool_free_list_entry {
1010    uint32_t next;
1011    uint32_t size;
1012 };
1013 
1014 static VkResult
anv_descriptor_pool_alloc_set(struct anv_descriptor_pool * pool,uint32_t size,struct anv_descriptor_set ** set)1015 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,
1016                               uint32_t size,
1017                               struct anv_descriptor_set **set)
1018 {
1019    if (size <= pool->size - pool->next) {
1020       *set = (struct anv_descriptor_set *) (pool->data + pool->next);
1021       (*set)->size = size;
1022       pool->next += size;
1023       return VK_SUCCESS;
1024    } else {
1025       struct pool_free_list_entry *entry;
1026       uint32_t *link = &pool->free_list;
1027       for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
1028          entry = (struct pool_free_list_entry *) (pool->data + f);
1029          if (size <= entry->size) {
1030             *link = entry->next;
1031             *set = (struct anv_descriptor_set *) entry;
1032             (*set)->size = entry->size;
1033             return VK_SUCCESS;
1034          }
1035          link = &entry->next;
1036       }
1037 
1038       if (pool->free_list != EMPTY) {
1039          return VK_ERROR_FRAGMENTED_POOL;
1040       } else {
1041          return VK_ERROR_OUT_OF_POOL_MEMORY;
1042       }
1043    }
1044 }
1045 
1046 static void
anv_descriptor_pool_free_set(struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1047 anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,
1048                              struct anv_descriptor_set *set)
1049 {
1050    /* Put the descriptor set allocation back on the free list. */
1051    const uint32_t index = (char *) set - pool->data;
1052    if (index + set->size == pool->next) {
1053       pool->next = index;
1054    } else {
1055       struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
1056       entry->next = pool->free_list;
1057       entry->size = set->size;
1058       pool->free_list = (char *) entry - pool->data;
1059    }
1060 }
1061 
1062 struct surface_state_free_list_entry {
1063    void *next;
1064    struct anv_state state;
1065 };
1066 
1067 static struct anv_state
anv_descriptor_pool_alloc_state(struct anv_descriptor_pool * pool)1068 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)
1069 {
1070    assert(!pool->host_only);
1071 
1072    struct surface_state_free_list_entry *entry =
1073       pool->surface_state_free_list;
1074 
1075    if (entry) {
1076       struct anv_state state = entry->state;
1077       pool->surface_state_free_list = entry->next;
1078       assert(state.alloc_size == 64);
1079       return state;
1080    } else {
1081       return anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
1082    }
1083 }
1084 
1085 static void
anv_descriptor_pool_free_state(struct anv_descriptor_pool * pool,struct anv_state state)1086 anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,
1087                                struct anv_state state)
1088 {
1089    assert(state.alloc_size);
1090    /* Put the buffer view surface state back on the free list. */
1091    struct surface_state_free_list_entry *entry = state.map;
1092    entry->next = pool->surface_state_free_list;
1093    entry->state = state;
1094    pool->surface_state_free_list = entry;
1095 }
1096 
1097 size_t
anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout * layout,uint32_t var_desc_count)1098 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout,
1099                                uint32_t var_desc_count)
1100 {
1101    const uint32_t descriptor_count =
1102       set_layout_descriptor_count(layout, var_desc_count);
1103    const uint32_t buffer_view_count =
1104       set_layout_buffer_view_count(layout, var_desc_count);
1105 
1106    return sizeof(struct anv_descriptor_set) +
1107           descriptor_count * sizeof(struct anv_descriptor) +
1108           buffer_view_count * sizeof(struct anv_buffer_view);
1109 }
1110 
1111 static VkResult
anv_descriptor_set_create(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set_layout * layout,uint32_t var_desc_count,struct anv_descriptor_set ** out_set)1112 anv_descriptor_set_create(struct anv_device *device,
1113                           struct anv_descriptor_pool *pool,
1114                           struct anv_descriptor_set_layout *layout,
1115                           uint32_t var_desc_count,
1116                           struct anv_descriptor_set **out_set)
1117 {
1118    struct anv_descriptor_set *set;
1119    const size_t size = anv_descriptor_set_layout_size(layout, var_desc_count);
1120 
1121    VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);
1122    if (result != VK_SUCCESS)
1123       return result;
1124 
1125    uint32_t descriptor_buffer_size =
1126       anv_descriptor_set_layout_descriptor_buffer_size(layout, var_desc_count);
1127 
1128    set->desc_surface_state = ANV_STATE_NULL;
1129 
1130    if (descriptor_buffer_size) {
1131       uint64_t pool_vma_offset =
1132          util_vma_heap_alloc(&pool->bo_heap, descriptor_buffer_size,
1133                              ANV_UBO_ALIGNMENT);
1134       if (pool_vma_offset == 0) {
1135          anv_descriptor_pool_free_set(pool, set);
1136          return vk_error(pool, VK_ERROR_FRAGMENTED_POOL);
1137       }
1138       assert(pool_vma_offset >= POOL_HEAP_OFFSET &&
1139              pool_vma_offset - POOL_HEAP_OFFSET <= INT32_MAX);
1140       set->desc_mem.offset = pool_vma_offset - POOL_HEAP_OFFSET;
1141       set->desc_mem.alloc_size = descriptor_buffer_size;
1142       set->desc_mem.map = pool->bo->map + set->desc_mem.offset;
1143 
1144       set->desc_addr = (struct anv_address) {
1145          .bo = pool->bo,
1146          .offset = set->desc_mem.offset,
1147       };
1148 
1149       enum isl_format format =
1150          anv_isl_format_for_descriptor_type(device,
1151                                             VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1152 
1153       if (!pool->host_only) {
1154          set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);
1155          anv_fill_buffer_surface_state(device, set->desc_surface_state,
1156                                        format, ISL_SWIZZLE_IDENTITY,
1157                                        ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
1158                                        set->desc_addr,
1159                                        descriptor_buffer_size, 1);
1160       }
1161    } else {
1162       set->desc_mem = ANV_STATE_NULL;
1163       set->desc_addr = (struct anv_address) { .bo = NULL, .offset = 0 };
1164    }
1165 
1166    vk_object_base_init(&device->vk, &set->base,
1167                        VK_OBJECT_TYPE_DESCRIPTOR_SET);
1168    set->pool = pool;
1169    set->layout = layout;
1170    anv_descriptor_set_layout_ref(layout);
1171 
1172    set->buffer_view_count =
1173       set_layout_buffer_view_count(layout, var_desc_count);
1174    set->descriptor_count =
1175       set_layout_descriptor_count(layout, var_desc_count);
1176 
1177    set->buffer_views =
1178       (struct anv_buffer_view *) &set->descriptors[set->descriptor_count];
1179 
1180    /* By defining the descriptors to be zero now, we can later verify that
1181     * a descriptor has not been populated with user data.
1182     */
1183    memset(set->descriptors, 0,
1184           sizeof(struct anv_descriptor) * set->descriptor_count);
1185 
1186    /* Go through and fill out immutable samplers if we have any */
1187    for (uint32_t b = 0; b < layout->binding_count; b++) {
1188       if (layout->binding[b].immutable_samplers) {
1189          for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
1190             /* The type will get changed to COMBINED_IMAGE_SAMPLER in
1191              * UpdateDescriptorSets if needed.  However, if the descriptor
1192              * set has an immutable sampler, UpdateDescriptorSets may never
1193              * touch it, so we need to make sure it's 100% valid now.
1194              *
1195              * We don't need to actually provide a sampler because the helper
1196              * will always write in the immutable sampler regardless of what
1197              * is in the sampler parameter.
1198              */
1199             VkDescriptorImageInfo info = { };
1200             anv_descriptor_set_write_image_view(device, set, &info,
1201                                                 VK_DESCRIPTOR_TYPE_SAMPLER,
1202                                                 b, i);
1203          }
1204       }
1205    }
1206 
1207    /* Allocate null surface state for the buffer views since
1208     * we lazy allocate this in the write anyway.
1209     */
1210    if (!pool->host_only) {
1211       for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1212          set->buffer_views[b].surface_state =
1213             anv_descriptor_pool_alloc_state(pool);
1214       }
1215    }
1216 
1217    list_addtail(&set->pool_link, &pool->desc_sets);
1218 
1219    *out_set = set;
1220 
1221    return VK_SUCCESS;
1222 }
1223 
1224 static void
anv_descriptor_set_destroy(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1225 anv_descriptor_set_destroy(struct anv_device *device,
1226                            struct anv_descriptor_pool *pool,
1227                            struct anv_descriptor_set *set)
1228 {
1229    anv_descriptor_set_layout_unref(device, set->layout);
1230 
1231    if (set->desc_mem.alloc_size) {
1232       util_vma_heap_free(&pool->bo_heap,
1233                          (uint64_t)set->desc_mem.offset + POOL_HEAP_OFFSET,
1234                          set->desc_mem.alloc_size);
1235       if (set->desc_surface_state.alloc_size)
1236          anv_descriptor_pool_free_state(pool, set->desc_surface_state);
1237    }
1238 
1239    if (!pool->host_only) {
1240       for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1241          if (set->buffer_views[b].surface_state.alloc_size)
1242             anv_descriptor_pool_free_state(pool, set->buffer_views[b].surface_state);
1243       }
1244    }
1245 
1246    list_del(&set->pool_link);
1247 
1248    vk_object_base_finish(&set->base);
1249    anv_descriptor_pool_free_set(pool, set);
1250 }
1251 
anv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1252 VkResult anv_AllocateDescriptorSets(
1253     VkDevice                                    _device,
1254     const VkDescriptorSetAllocateInfo*          pAllocateInfo,
1255     VkDescriptorSet*                            pDescriptorSets)
1256 {
1257    ANV_FROM_HANDLE(anv_device, device, _device);
1258    ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
1259 
1260    VkResult result = VK_SUCCESS;
1261    struct anv_descriptor_set *set = NULL;
1262    uint32_t i;
1263 
1264    const VkDescriptorSetVariableDescriptorCountAllocateInfo *vdcai =
1265       vk_find_struct_const(pAllocateInfo->pNext,
1266                            DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
1267 
1268    for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1269       ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
1270                       pAllocateInfo->pSetLayouts[i]);
1271 
1272       uint32_t var_desc_count = 0;
1273       if (vdcai != NULL && vdcai->descriptorSetCount > 0) {
1274          assert(vdcai->descriptorSetCount == pAllocateInfo->descriptorSetCount);
1275          var_desc_count = vdcai->pDescriptorCounts[i];
1276       }
1277 
1278       result = anv_descriptor_set_create(device, pool, layout,
1279                                          var_desc_count, &set);
1280       if (result != VK_SUCCESS)
1281          break;
1282 
1283       pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1284    }
1285 
1286    if (result != VK_SUCCESS)
1287       anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
1288                              i, pDescriptorSets);
1289 
1290    return result;
1291 }
1292 
anv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1293 VkResult anv_FreeDescriptorSets(
1294     VkDevice                                    _device,
1295     VkDescriptorPool                            descriptorPool,
1296     uint32_t                                    count,
1297     const VkDescriptorSet*                      pDescriptorSets)
1298 {
1299    ANV_FROM_HANDLE(anv_device, device, _device);
1300    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1301 
1302    for (uint32_t i = 0; i < count; i++) {
1303       ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1304 
1305       if (!set)
1306          continue;
1307 
1308       anv_descriptor_set_destroy(device, pool, set);
1309    }
1310 
1311    return VK_SUCCESS;
1312 }
1313 
1314 static void
anv_descriptor_set_write_image_param(uint32_t * param_desc_map,const struct isl_image_param * param)1315 anv_descriptor_set_write_image_param(uint32_t *param_desc_map,
1316                                      const struct isl_image_param *param)
1317 {
1318 #define WRITE_PARAM_FIELD(field, FIELD) \
1319    for (unsigned i = 0; i < ARRAY_SIZE(param->field); i++) \
1320       param_desc_map[ISL_IMAGE_PARAM_##FIELD##_OFFSET + i] = param->field[i]
1321 
1322    WRITE_PARAM_FIELD(offset, OFFSET);
1323    WRITE_PARAM_FIELD(size, SIZE);
1324    WRITE_PARAM_FIELD(stride, STRIDE);
1325    WRITE_PARAM_FIELD(tiling, TILING);
1326    WRITE_PARAM_FIELD(swizzling, SWIZZLING);
1327    WRITE_PARAM_FIELD(size, SIZE);
1328 
1329 #undef WRITE_PARAM_FIELD
1330 }
1331 
1332 static uint32_t
anv_surface_state_to_handle(struct anv_state state)1333 anv_surface_state_to_handle(struct anv_state state)
1334 {
1335    /* Bits 31:12 of the bindless surface offset in the extended message
1336     * descriptor is bits 25:6 of the byte-based address.
1337     */
1338    assert(state.offset >= 0);
1339    uint32_t offset = state.offset;
1340    assert((offset & 0x3f) == 0 && offset < (1 << 26));
1341    return offset << 6;
1342 }
1343 
1344 void
anv_descriptor_set_write_image_view(struct anv_device * device,struct anv_descriptor_set * set,const VkDescriptorImageInfo * const info,VkDescriptorType type,uint32_t binding,uint32_t element)1345 anv_descriptor_set_write_image_view(struct anv_device *device,
1346                                     struct anv_descriptor_set *set,
1347                                     const VkDescriptorImageInfo * const info,
1348                                     VkDescriptorType type,
1349                                     uint32_t binding,
1350                                     uint32_t element)
1351 {
1352    const struct anv_descriptor_set_binding_layout *bind_layout =
1353       &set->layout->binding[binding];
1354    struct anv_descriptor *desc =
1355       &set->descriptors[bind_layout->descriptor_index + element];
1356    struct anv_image_view *image_view = NULL;
1357    struct anv_sampler *sampler = NULL;
1358 
1359    /* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor
1360     * set initialization to set the bindless samplers.
1361     */
1362    assert(type == bind_layout->type ||
1363           type == VK_DESCRIPTOR_TYPE_SAMPLER ||
1364           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
1365 
1366    switch (type) {
1367    case VK_DESCRIPTOR_TYPE_SAMPLER:
1368       sampler = bind_layout->immutable_samplers ?
1369                 bind_layout->immutable_samplers[element] :
1370                 anv_sampler_from_handle(info->sampler);
1371       break;
1372 
1373    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1374       image_view = anv_image_view_from_handle(info->imageView);
1375       sampler = bind_layout->immutable_samplers ?
1376                 bind_layout->immutable_samplers[element] :
1377                 anv_sampler_from_handle(info->sampler);
1378       break;
1379 
1380    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1381    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1382    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1383       image_view = anv_image_view_from_handle(info->imageView);
1384       break;
1385 
1386    default:
1387       unreachable("invalid descriptor type");
1388    }
1389 
1390    *desc = (struct anv_descriptor) {
1391       .type = type,
1392       .layout = info->imageLayout,
1393       .image_view = image_view,
1394       .sampler = sampler,
1395    };
1396 
1397    if (set->pool && set->pool->host_only)
1398       return;
1399 
1400    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1401                     element * bind_layout->descriptor_stride;
1402    memset(desc_map, 0, bind_layout->descriptor_stride);
1403 
1404    if (image_view == NULL && sampler == NULL)
1405       return;
1406 
1407    enum anv_descriptor_data data =
1408       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
1409       anv_descriptor_data_for_type(device->physical, type) :
1410       bind_layout->data;
1411 
1412 
1413    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1414       struct anv_sampled_image_descriptor desc_data[3];
1415       memset(desc_data, 0, sizeof(desc_data));
1416 
1417       if (image_view) {
1418          for (unsigned p = 0; p < image_view->n_planes; p++) {
1419             struct anv_surface_state sstate =
1420                (desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
1421                image_view->planes[p].general_sampler_surface_state :
1422                image_view->planes[p].optimal_sampler_surface_state;
1423             desc_data[p].image = anv_surface_state_to_handle(sstate.state);
1424          }
1425       }
1426 
1427       if (sampler) {
1428          for (unsigned p = 0; p < sampler->n_planes; p++)
1429             desc_data[p].sampler = sampler->bindless_state.offset + p * 32;
1430       }
1431 
1432       /* We may have max_plane_count < 0 if this isn't a sampled image but it
1433        * can be no more than the size of our array of handles.
1434        */
1435       assert(bind_layout->max_plane_count <= ARRAY_SIZE(desc_data));
1436       memcpy(desc_map, desc_data,
1437              MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1438    }
1439 
1440    if (image_view == NULL)
1441       return;
1442 
1443    if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1444       assert(!(data & ANV_DESCRIPTOR_IMAGE_PARAM));
1445       assert(image_view->n_planes == 1);
1446       struct anv_storage_image_descriptor desc_data = {
1447          .vanilla = anv_surface_state_to_handle(
1448                            image_view->planes[0].storage_surface_state.state),
1449          .lowered = anv_surface_state_to_handle(
1450                            image_view->planes[0].lowered_storage_surface_state.state),
1451       };
1452       memcpy(desc_map, &desc_data, sizeof(desc_data));
1453    }
1454 
1455    if (data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1456       /* Storage images can only ever have one plane */
1457       assert(image_view->n_planes == 1);
1458       const struct isl_image_param *image_param =
1459          &image_view->planes[0].lowered_storage_image_param;
1460 
1461       anv_descriptor_set_write_image_param(desc_map, image_param);
1462    }
1463 
1464    if (data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE) {
1465       assert(!(data & ANV_DESCRIPTOR_SAMPLED_IMAGE));
1466       assert(image_view);
1467       struct anv_texture_swizzle_descriptor desc_data[3];
1468       memset(desc_data, 0, sizeof(desc_data));
1469 
1470       for (unsigned p = 0; p < image_view->n_planes; p++) {
1471          desc_data[p] = (struct anv_texture_swizzle_descriptor) {
1472             .swizzle = {
1473                (uint8_t)image_view->planes[p].isl.swizzle.r,
1474                (uint8_t)image_view->planes[p].isl.swizzle.g,
1475                (uint8_t)image_view->planes[p].isl.swizzle.b,
1476                (uint8_t)image_view->planes[p].isl.swizzle.a,
1477             },
1478          };
1479       }
1480       memcpy(desc_map, desc_data,
1481              MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1482    }
1483 }
1484 
1485 void
anv_descriptor_set_write_buffer_view(struct anv_device * device,struct anv_descriptor_set * set,VkDescriptorType type,struct anv_buffer_view * buffer_view,uint32_t binding,uint32_t element)1486 anv_descriptor_set_write_buffer_view(struct anv_device *device,
1487                                      struct anv_descriptor_set *set,
1488                                      VkDescriptorType type,
1489                                      struct anv_buffer_view *buffer_view,
1490                                      uint32_t binding,
1491                                      uint32_t element)
1492 {
1493    const struct anv_descriptor_set_binding_layout *bind_layout =
1494       &set->layout->binding[binding];
1495    struct anv_descriptor *desc =
1496       &set->descriptors[bind_layout->descriptor_index + element];
1497 
1498    assert(type == bind_layout->type ||
1499           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
1500 
1501    *desc = (struct anv_descriptor) {
1502       .type = type,
1503       .buffer_view = buffer_view,
1504    };
1505 
1506    if (set->pool && set->pool->host_only)
1507       return;
1508 
1509    enum anv_descriptor_data data =
1510       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
1511       anv_descriptor_data_for_type(device->physical, type) :
1512       bind_layout->data;
1513 
1514    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1515                     element * bind_layout->descriptor_stride;
1516 
1517    if (buffer_view == NULL) {
1518       memset(desc_map, 0, bind_layout->descriptor_stride);
1519       return;
1520    }
1521 
1522    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1523       struct anv_sampled_image_descriptor desc_data = {
1524          .image = anv_surface_state_to_handle(buffer_view->surface_state),
1525       };
1526       memcpy(desc_map, &desc_data, sizeof(desc_data));
1527    }
1528 
1529    if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1530       assert(!(data & ANV_DESCRIPTOR_IMAGE_PARAM));
1531       struct anv_storage_image_descriptor desc_data = {
1532          .vanilla = anv_surface_state_to_handle(
1533                            buffer_view->storage_surface_state),
1534          .lowered = anv_surface_state_to_handle(
1535                            buffer_view->lowered_storage_surface_state),
1536       };
1537       memcpy(desc_map, &desc_data, sizeof(desc_data));
1538    }
1539 
1540    if (data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1541       anv_descriptor_set_write_image_param(desc_map,
1542          &buffer_view->lowered_storage_image_param);
1543    }
1544 }
1545 
1546 void
anv_descriptor_set_write_buffer(struct anv_device * device,struct anv_descriptor_set * set,struct anv_state_stream * alloc_stream,VkDescriptorType type,struct anv_buffer * buffer,uint32_t binding,uint32_t element,VkDeviceSize offset,VkDeviceSize range)1547 anv_descriptor_set_write_buffer(struct anv_device *device,
1548                                 struct anv_descriptor_set *set,
1549                                 struct anv_state_stream *alloc_stream,
1550                                 VkDescriptorType type,
1551                                 struct anv_buffer *buffer,
1552                                 uint32_t binding,
1553                                 uint32_t element,
1554                                 VkDeviceSize offset,
1555                                 VkDeviceSize range)
1556 {
1557    assert(alloc_stream || set->pool);
1558 
1559    const struct anv_descriptor_set_binding_layout *bind_layout =
1560       &set->layout->binding[binding];
1561    struct anv_descriptor *desc =
1562       &set->descriptors[bind_layout->descriptor_index + element];
1563 
1564    assert(type == bind_layout->type ||
1565           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
1566 
1567    *desc = (struct anv_descriptor) {
1568       .type = type,
1569       .offset = offset,
1570       .range = range,
1571       .buffer = buffer,
1572    };
1573 
1574    if (set->pool && set->pool->host_only)
1575       return;
1576 
1577    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1578                     element * bind_layout->descriptor_stride;
1579 
1580    if (buffer == NULL) {
1581       memset(desc_map, 0, bind_layout->descriptor_stride);
1582       return;
1583    }
1584 
1585    struct anv_address bind_addr = anv_address_add(buffer->address, offset);
1586    uint64_t bind_range = vk_buffer_range(&buffer->vk, offset, range);
1587    enum anv_descriptor_data data =
1588       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
1589       anv_descriptor_data_for_type(device->physical, type) :
1590       bind_layout->data;
1591 
1592    /* We report a bounds checking alignment of 32B for the sake of block
1593     * messages which read an entire register worth at a time.
1594     */
1595    if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1596        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
1597       bind_range = align64(bind_range, ANV_UBO_ALIGNMENT);
1598 
1599    if (data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
1600       struct anv_address_range_descriptor desc_data = {
1601          .address = anv_address_physical(bind_addr),
1602          .range = bind_range,
1603       };
1604       memcpy(desc_map, &desc_data, sizeof(desc_data));
1605    }
1606 
1607    if (vk_descriptor_type_is_dynamic(type))
1608       return;
1609 
1610    assert(data & ANV_DESCRIPTOR_BUFFER_VIEW);
1611    struct anv_buffer_view *bview =
1612       &set->buffer_views[bind_layout->buffer_view_index + element];
1613 
1614    bview->range = bind_range;
1615    bview->address = bind_addr;
1616 
1617    /* If we're writing descriptors through a push command, we need to
1618       * allocate the surface state from the command buffer. Otherwise it will
1619       * be allocated by the descriptor pool when calling
1620       * vkAllocateDescriptorSets. */
1621    if (alloc_stream) {
1622       bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
1623    }
1624 
1625    assert(bview->surface_state.alloc_size);
1626 
1627    isl_surf_usage_flags_t usage =
1628       (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1629        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ?
1630       ISL_SURF_USAGE_CONSTANT_BUFFER_BIT :
1631       ISL_SURF_USAGE_STORAGE_BIT;
1632 
1633    enum isl_format format = anv_isl_format_for_descriptor_type(device, type);
1634    anv_fill_buffer_surface_state(device, bview->surface_state,
1635                                  format, ISL_SWIZZLE_IDENTITY,
1636                                  usage, bind_addr, bind_range, 1);
1637    desc->set_buffer_view = bview;
1638 }
1639 
1640 void
anv_descriptor_set_write_inline_uniform_data(struct anv_device * device,struct anv_descriptor_set * set,uint32_t binding,const void * data,size_t offset,size_t size)1641 anv_descriptor_set_write_inline_uniform_data(struct anv_device *device,
1642                                              struct anv_descriptor_set *set,
1643                                              uint32_t binding,
1644                                              const void *data,
1645                                              size_t offset,
1646                                              size_t size)
1647 {
1648    const struct anv_descriptor_set_binding_layout *bind_layout =
1649       &set->layout->binding[binding];
1650 
1651    assert(bind_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM);
1652 
1653    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset;
1654 
1655    memcpy(desc_map + offset, data, size);
1656 }
1657 
anv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)1658 void anv_UpdateDescriptorSets(
1659     VkDevice                                    _device,
1660     uint32_t                                    descriptorWriteCount,
1661     const VkWriteDescriptorSet*                 pDescriptorWrites,
1662     uint32_t                                    descriptorCopyCount,
1663     const VkCopyDescriptorSet*                  pDescriptorCopies)
1664 {
1665    ANV_FROM_HANDLE(anv_device, device, _device);
1666 
1667    for (uint32_t i = 0; i < descriptorWriteCount; i++) {
1668       const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1669       ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
1670 
1671       switch (write->descriptorType) {
1672       case VK_DESCRIPTOR_TYPE_SAMPLER:
1673       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1674       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1675       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1676       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1677          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1678             anv_descriptor_set_write_image_view(device, set,
1679                                                 write->pImageInfo + j,
1680                                                 write->descriptorType,
1681                                                 write->dstBinding,
1682                                                 write->dstArrayElement + j);
1683          }
1684          break;
1685 
1686       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1687       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1688          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1689             ANV_FROM_HANDLE(anv_buffer_view, bview,
1690                             write->pTexelBufferView[j]);
1691 
1692             anv_descriptor_set_write_buffer_view(device, set,
1693                                                  write->descriptorType,
1694                                                  bview,
1695                                                  write->dstBinding,
1696                                                  write->dstArrayElement + j);
1697          }
1698          break;
1699 
1700       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1701       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1702       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1703       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1704          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1705             ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
1706 
1707             anv_descriptor_set_write_buffer(device, set,
1708                                             NULL,
1709                                             write->descriptorType,
1710                                             buffer,
1711                                             write->dstBinding,
1712                                             write->dstArrayElement + j,
1713                                             write->pBufferInfo[j].offset,
1714                                             write->pBufferInfo[j].range);
1715          }
1716          break;
1717 
1718       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
1719          const VkWriteDescriptorSetInlineUniformBlock *inline_write =
1720             vk_find_struct_const(write->pNext,
1721                                  WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
1722          assert(inline_write->dataSize == write->descriptorCount);
1723          anv_descriptor_set_write_inline_uniform_data(device, set,
1724                                                       write->dstBinding,
1725                                                       inline_write->pData,
1726                                                       write->dstArrayElement,
1727                                                       inline_write->dataSize);
1728          break;
1729       }
1730 
1731       default:
1732          break;
1733       }
1734    }
1735 
1736    for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1737       const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
1738       ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
1739       ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
1740 
1741       const struct anv_descriptor_set_binding_layout *src_layout =
1742          &src->layout->binding[copy->srcBinding];
1743       struct anv_descriptor *src_desc =
1744          &src->descriptors[src_layout->descriptor_index];
1745       src_desc += copy->srcArrayElement;
1746 
1747       if (src_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
1748          anv_descriptor_set_write_inline_uniform_data(device, dst,
1749                                                       copy->dstBinding,
1750                                                       src->desc_mem.map + src_layout->descriptor_offset + copy->srcArrayElement,
1751                                                       copy->dstArrayElement,
1752                                                       copy->descriptorCount);
1753          continue;
1754       }
1755 
1756 
1757       /* Copy CPU side data */
1758       for (uint32_t j = 0; j < copy->descriptorCount; j++) {
1759          switch(src_desc[j].type) {
1760          case VK_DESCRIPTOR_TYPE_SAMPLER:
1761          case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1762          case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1763          case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1764          case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
1765             VkDescriptorImageInfo info = {
1766                .sampler = anv_sampler_to_handle(src_desc[j].sampler),
1767                .imageView = anv_image_view_to_handle(src_desc[j].image_view),
1768                .imageLayout = src_desc[j].layout
1769             };
1770             anv_descriptor_set_write_image_view(device, dst,
1771                                                 &info,
1772                                                 src_desc[j].type,
1773                                                 copy->dstBinding,
1774                                                 copy->dstArrayElement + j);
1775             break;
1776          }
1777 
1778          case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1779          case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
1780             anv_descriptor_set_write_buffer_view(device, dst,
1781                                                  src_desc[j].type,
1782                                                  src_desc[j].buffer_view,
1783                                                  copy->dstBinding,
1784                                                  copy->dstArrayElement + j);
1785             break;
1786          }
1787 
1788          case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1789          case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1790          case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1791          case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
1792             anv_descriptor_set_write_buffer(device, dst,
1793                                             NULL,
1794                                             src_desc[j].type,
1795                                             src_desc[j].buffer,
1796                                             copy->dstBinding,
1797                                             copy->dstArrayElement + j,
1798                                             src_desc[j].offset,
1799                                             src_desc[j].range);
1800             break;
1801          }
1802 
1803          default:
1804             break;
1805          }
1806       }
1807    }
1808 }
1809 
1810 /*
1811  * Descriptor update templates.
1812  */
1813 
1814 void
anv_descriptor_set_write_template(struct anv_device * device,struct anv_descriptor_set * set,struct anv_state_stream * alloc_stream,const struct vk_descriptor_update_template * template,const void * data)1815 anv_descriptor_set_write_template(struct anv_device *device,
1816                                   struct anv_descriptor_set *set,
1817                                   struct anv_state_stream *alloc_stream,
1818                                   const struct vk_descriptor_update_template *template,
1819                                   const void *data)
1820 {
1821    for (uint32_t i = 0; i < template->entry_count; i++) {
1822       const struct vk_descriptor_template_entry *entry =
1823          &template->entries[i];
1824 
1825       switch (entry->type) {
1826       case VK_DESCRIPTOR_TYPE_SAMPLER:
1827       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1828       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1829       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1830       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1831          for (uint32_t j = 0; j < entry->array_count; j++) {
1832             const VkDescriptorImageInfo *info =
1833                data + entry->offset + j * entry->stride;
1834             anv_descriptor_set_write_image_view(device, set,
1835                                                 info, entry->type,
1836                                                 entry->binding,
1837                                                 entry->array_element + j);
1838          }
1839          break;
1840 
1841       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1842       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1843          for (uint32_t j = 0; j < entry->array_count; j++) {
1844             const VkBufferView *_bview =
1845                data + entry->offset + j * entry->stride;
1846             ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
1847 
1848             anv_descriptor_set_write_buffer_view(device, set,
1849                                                  entry->type,
1850                                                  bview,
1851                                                  entry->binding,
1852                                                  entry->array_element + j);
1853          }
1854          break;
1855 
1856       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1857       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1858       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1859       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1860          for (uint32_t j = 0; j < entry->array_count; j++) {
1861             const VkDescriptorBufferInfo *info =
1862                data + entry->offset + j * entry->stride;
1863             ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
1864 
1865             anv_descriptor_set_write_buffer(device, set,
1866                                             alloc_stream,
1867                                             entry->type,
1868                                             buffer,
1869                                             entry->binding,
1870                                             entry->array_element + j,
1871                                             info->offset, info->range);
1872          }
1873          break;
1874 
1875       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
1876          anv_descriptor_set_write_inline_uniform_data(device, set,
1877                                                       entry->binding,
1878                                                       data + entry->offset,
1879                                                       entry->array_element,
1880                                                       entry->array_count);
1881          break;
1882 
1883       default:
1884          break;
1885       }
1886    }
1887 }
1888 
anv_UpdateDescriptorSetWithTemplate(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)1889 void anv_UpdateDescriptorSetWithTemplate(
1890     VkDevice                                    _device,
1891     VkDescriptorSet                             descriptorSet,
1892     VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
1893     const void*                                 pData)
1894 {
1895    ANV_FROM_HANDLE(anv_device, device, _device);
1896    ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
1897    VK_FROM_HANDLE(vk_descriptor_update_template, template,
1898                   descriptorUpdateTemplate);
1899 
1900    anv_descriptor_set_write_template(device, set, NULL, template, pData);
1901 }
1902