• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29 
30 #include "util/mesa-sha1.h"
31 #include "vk_util.h"
32 
33 #include "anv_private.h"
34 
35 /*
36  * Descriptor set layouts.
37  */
38 
39 static enum anv_descriptor_data
anv_descriptor_data_for_type(const struct anv_physical_device * device,VkDescriptorType type)40 anv_descriptor_data_for_type(const struct anv_physical_device *device,
41                              VkDescriptorType type)
42 {
43    enum anv_descriptor_data data = 0;
44 
45    switch (type) {
46    case VK_DESCRIPTOR_TYPE_SAMPLER:
47       data = ANV_DESCRIPTOR_SAMPLER_STATE;
48       if (device->has_bindless_samplers)
49          data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
50       break;
51 
52    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
53       data = ANV_DESCRIPTOR_SURFACE_STATE |
54              ANV_DESCRIPTOR_SAMPLER_STATE;
55       if (device->has_bindless_images || device->has_bindless_samplers)
56          data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
57       break;
58 
59    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
60    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
61       data = ANV_DESCRIPTOR_SURFACE_STATE;
62       if (device->has_bindless_images)
63          data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
64       break;
65 
66    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
67       data = ANV_DESCRIPTOR_SURFACE_STATE;
68       break;
69 
70    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
71    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
72       data = ANV_DESCRIPTOR_SURFACE_STATE;
73       if (device->info.ver < 9)
74          data |= ANV_DESCRIPTOR_IMAGE_PARAM;
75       if (device->has_bindless_images)
76          data |= ANV_DESCRIPTOR_STORAGE_IMAGE;
77       break;
78 
79    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
80    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
81       data = ANV_DESCRIPTOR_SURFACE_STATE |
82              ANV_DESCRIPTOR_BUFFER_VIEW;
83       break;
84 
85    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
86    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
87       data = ANV_DESCRIPTOR_SURFACE_STATE;
88       break;
89 
90    case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
91       data = ANV_DESCRIPTOR_INLINE_UNIFORM;
92       break;
93 
94    case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
95       data = ANV_DESCRIPTOR_ADDRESS_RANGE;
96       break;
97 
98    default:
99       unreachable("Unsupported descriptor type");
100    }
101 
102    /* On gfx8 and above when we have softpin enabled, we also need to push
103     * SSBO address ranges so that we can use A64 messages in the shader.
104     */
105    if (device->has_a64_buffer_access &&
106        (type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
107         type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
108         type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
109         type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC))
110       data |= ANV_DESCRIPTOR_ADDRESS_RANGE;
111 
112    /* On Ivy Bridge and Bay Trail, we need swizzles textures in the shader
113     * Do not handle VK_DESCRIPTOR_TYPE_STORAGE_IMAGE and
114     * VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT because they already must
115     * have identity swizzle.
116     *
117     * TODO: We need to handle swizzle on buffer views too for those same
118     *       platforms.
119     */
120    if (device->info.verx10 == 70 &&
121        (type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
122         type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
123       data |= ANV_DESCRIPTOR_TEXTURE_SWIZZLE;
124 
125    return data;
126 }
127 
128 static enum anv_descriptor_data
anv_descriptor_data_for_mutable_type(const struct anv_physical_device * device,const VkMutableDescriptorTypeCreateInfoVALVE * mutable_info,int binding)129 anv_descriptor_data_for_mutable_type(const struct anv_physical_device *device,
130                                      const VkMutableDescriptorTypeCreateInfoVALVE *mutable_info,
131                                      int binding)
132 {
133    enum anv_descriptor_data desc_data = 0;
134 
135    if (!mutable_info || mutable_info->mutableDescriptorTypeListCount == 0) {
136       for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
137          if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
138              i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
139              i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
140             continue;
141 
142          desc_data |= anv_descriptor_data_for_type(device, i);
143       }
144 
145       desc_data |= anv_descriptor_data_for_type(
146          device, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
147 
148       return desc_data;
149    }
150 
151    const VkMutableDescriptorTypeListVALVE *type_list =
152       &mutable_info->pMutableDescriptorTypeLists[binding];
153    for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
154       desc_data |=
155          anv_descriptor_data_for_type(device, type_list->pDescriptorTypes[i]);
156    }
157 
158    return desc_data;
159 }
160 
161 static unsigned
anv_descriptor_data_size(enum anv_descriptor_data data)162 anv_descriptor_data_size(enum anv_descriptor_data data)
163 {
164    unsigned size = 0;
165 
166    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE)
167       size += sizeof(struct anv_sampled_image_descriptor);
168 
169    if (data & ANV_DESCRIPTOR_STORAGE_IMAGE)
170       size += sizeof(struct anv_storage_image_descriptor);
171 
172    if (data & ANV_DESCRIPTOR_IMAGE_PARAM)
173       size += BRW_IMAGE_PARAM_SIZE * 4;
174 
175    if (data & ANV_DESCRIPTOR_ADDRESS_RANGE)
176       size += sizeof(struct anv_address_range_descriptor);
177 
178    if (data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE)
179       size += sizeof(struct anv_texture_swizzle_descriptor);
180 
181    return size;
182 }
183 
184 static bool
anv_needs_descriptor_buffer(VkDescriptorType desc_type,enum anv_descriptor_data desc_data)185 anv_needs_descriptor_buffer(VkDescriptorType desc_type,
186                             enum anv_descriptor_data desc_data)
187 {
188    if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK ||
189        anv_descriptor_data_size(desc_data) > 0)
190       return true;
191    return false;
192 }
193 
194 /** Returns the size in bytes of each descriptor with the given layout */
195 static unsigned
anv_descriptor_size(const struct anv_descriptor_set_binding_layout * layout)196 anv_descriptor_size(const struct anv_descriptor_set_binding_layout *layout)
197 {
198    if (layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
199       assert(layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
200       return layout->array_size;
201    }
202 
203    unsigned size = anv_descriptor_data_size(layout->data);
204 
205    /* For multi-planar bindings, we make every descriptor consume the maximum
206     * number of planes so we don't have to bother with walking arrays and
207     * adding things up every time.  Fortunately, YCbCr samplers aren't all
208     * that common and likely won't be in the middle of big arrays.
209     */
210    if (layout->max_plane_count > 1)
211       size *= layout->max_plane_count;
212 
213    return size;
214 }
215 
216 /** Returns size in bytes of the biggest descriptor in the given layout */
217 static unsigned
anv_descriptor_size_for_mutable_type(const struct anv_physical_device * device,const VkMutableDescriptorTypeCreateInfoVALVE * mutable_info,int binding)218 anv_descriptor_size_for_mutable_type(const struct anv_physical_device *device,
219                                      const VkMutableDescriptorTypeCreateInfoVALVE *mutable_info,
220                                      int binding)
221 {
222    unsigned size = 0;
223 
224    if (!mutable_info || mutable_info->mutableDescriptorTypeListCount == 0) {
225       for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
226 
227          if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
228              i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
229              i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
230             continue;
231 
232          enum anv_descriptor_data desc_data =
233             anv_descriptor_data_for_type(device, i);
234          size = MAX2(size, anv_descriptor_data_size(desc_data));
235       }
236 
237       enum anv_descriptor_data desc_data = anv_descriptor_data_for_type(
238          device, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
239       size = MAX2(size, anv_descriptor_data_size(desc_data));
240 
241       return size;
242    }
243 
244    const VkMutableDescriptorTypeListVALVE *type_list =
245       &mutable_info->pMutableDescriptorTypeLists[binding];
246    for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
247       enum anv_descriptor_data desc_data =
248          anv_descriptor_data_for_type(device, type_list->pDescriptorTypes[i]);
249       size = MAX2(size, anv_descriptor_data_size(desc_data));
250    }
251 
252    return size;
253 }
254 
255 static bool
anv_descriptor_data_supports_bindless(const struct anv_physical_device * pdevice,enum anv_descriptor_data data,bool sampler)256 anv_descriptor_data_supports_bindless(const struct anv_physical_device *pdevice,
257                                       enum anv_descriptor_data data,
258                                       bool sampler)
259 {
260    if (data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
261       assert(pdevice->has_a64_buffer_access);
262       return true;
263    }
264 
265    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
266       assert(pdevice->has_bindless_images || pdevice->has_bindless_samplers);
267       return sampler ? pdevice->has_bindless_samplers :
268                        pdevice->has_bindless_images;
269    }
270 
271    if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
272       assert(pdevice->has_bindless_images);
273       return true;
274    }
275 
276    return false;
277 }
278 
279 bool
anv_descriptor_supports_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_binding_layout * binding,bool sampler)280 anv_descriptor_supports_bindless(const struct anv_physical_device *pdevice,
281                                  const struct anv_descriptor_set_binding_layout *binding,
282                                  bool sampler)
283 {
284    return anv_descriptor_data_supports_bindless(pdevice, binding->data,
285                                                 sampler);
286 }
287 
288 bool
anv_descriptor_requires_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_binding_layout * binding,bool sampler)289 anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,
290                                  const struct anv_descriptor_set_binding_layout *binding,
291                                  bool sampler)
292 {
293    if (pdevice->always_use_bindless)
294       return anv_descriptor_supports_bindless(pdevice, binding, sampler);
295 
296    static const VkDescriptorBindingFlagBits flags_requiring_bindless =
297       VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
298       VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
299       VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
300 
301    return (binding->flags & flags_requiring_bindless) != 0;
302 }
303 
anv_GetDescriptorSetLayoutSupport(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)304 void anv_GetDescriptorSetLayoutSupport(
305     VkDevice                                    _device,
306     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
307     VkDescriptorSetLayoutSupport*               pSupport)
308 {
309    ANV_FROM_HANDLE(anv_device, device, _device);
310    const struct anv_physical_device *pdevice = device->physical;
311 
312    uint32_t surface_count[MESA_VULKAN_SHADER_STAGES] = { 0, };
313    VkDescriptorType varying_desc_type = VK_DESCRIPTOR_TYPE_MAX_ENUM;
314    bool needs_descriptor_buffer = false;
315 
316    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
317       vk_find_struct_const(pCreateInfo->pNext,
318                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
319    const VkMutableDescriptorTypeCreateInfoVALVE *mutable_info =
320       vk_find_struct_const(pCreateInfo->pNext,
321                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE);
322 
323    for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
324       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
325 
326       VkDescriptorBindingFlags flags = 0;
327       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
328          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
329          flags = binding_flags_info->pBindingFlags[b];
330       }
331 
332       enum anv_descriptor_data desc_data =
333          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE ?
334          anv_descriptor_data_for_mutable_type(pdevice, mutable_info, b) :
335          anv_descriptor_data_for_type(pdevice, binding->descriptorType);
336 
337       if (anv_needs_descriptor_buffer(binding->descriptorType, desc_data))
338          needs_descriptor_buffer = true;
339 
340       if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
341          varying_desc_type = binding->descriptorType;
342 
343       switch (binding->descriptorType) {
344       case VK_DESCRIPTOR_TYPE_SAMPLER:
345          /* There is no real limit on samplers */
346          break;
347 
348       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
349          /* Inline uniforms don't use a binding */
350          break;
351 
352       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
353          if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
354             break;
355 
356          if (binding->pImmutableSamplers) {
357             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
358                ANV_FROM_HANDLE(anv_sampler, sampler,
359                                binding->pImmutableSamplers[i]);
360                anv_foreach_stage(s, binding->stageFlags)
361                   surface_count[s] += sampler->n_planes;
362             }
363          } else {
364             anv_foreach_stage(s, binding->stageFlags)
365                surface_count[s] += binding->descriptorCount;
366          }
367          break;
368 
369       default:
370          if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
371             break;
372 
373          anv_foreach_stage(s, binding->stageFlags)
374             surface_count[s] += binding->descriptorCount;
375          break;
376       }
377    }
378 
379    for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
380       if (needs_descriptor_buffer)
381          surface_count[s] += 1;
382    }
383 
384    VkDescriptorSetVariableDescriptorCountLayoutSupport *vdcls =
385       vk_find_struct(pSupport->pNext,
386                      DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
387    if (vdcls != NULL) {
388       if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
389          vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE;
390       } else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) {
391          vdcls->maxVariableDescriptorCount = UINT16_MAX;
392       } else {
393          vdcls->maxVariableDescriptorCount = 0;
394       }
395    }
396 
397    bool supported = true;
398    for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
399       /* Our maximum binding table size is 240 and we need to reserve 8 for
400        * render targets.
401        */
402       if (surface_count[s] > MAX_BINDING_TABLE_SIZE - MAX_RTS)
403          supported = false;
404    }
405 
406    pSupport->supported = supported;
407 }
408 
anv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)409 VkResult anv_CreateDescriptorSetLayout(
410     VkDevice                                    _device,
411     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
412     const VkAllocationCallbacks*                pAllocator,
413     VkDescriptorSetLayout*                      pSetLayout)
414 {
415    ANV_FROM_HANDLE(anv_device, device, _device);
416 
417    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
418 
419    uint32_t num_bindings = 0;
420    uint32_t immutable_sampler_count = 0;
421    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
422       num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
423 
424       /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
425        *
426        *    "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
427        *    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
428        *    pImmutableSamplers can be used to initialize a set of immutable
429        *    samplers. [...]  If descriptorType is not one of these descriptor
430        *    types, then pImmutableSamplers is ignored.
431        *
432        * We need to be careful here and only parse pImmutableSamplers if we
433        * have one of the right descriptor types.
434        */
435       VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
436       if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
437            desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
438           pCreateInfo->pBindings[j].pImmutableSamplers)
439          immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
440    }
441 
442    /* We need to allocate descriptor set layouts off the device allocator
443     * with DEVICE scope because they are reference counted and may not be
444     * destroyed when vkDestroyDescriptorSetLayout is called.
445     */
446    VK_MULTIALLOC(ma);
447    VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_layout, set_layout, 1);
448    VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_binding_layout,
449                            bindings, num_bindings);
450    VK_MULTIALLOC_DECL(&ma, struct anv_sampler *, samplers,
451                            immutable_sampler_count);
452 
453    if (!vk_object_multizalloc(&device->vk, &ma, NULL,
454                               VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT))
455       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
456 
457    set_layout->ref_cnt = 1;
458    set_layout->binding_count = num_bindings;
459 
460    for (uint32_t b = 0; b < num_bindings; b++) {
461       /* Initialize all binding_layout entries to -1 */
462       memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
463 
464       set_layout->binding[b].flags = 0;
465       set_layout->binding[b].data = 0;
466       set_layout->binding[b].max_plane_count = 0;
467       set_layout->binding[b].array_size = 0;
468       set_layout->binding[b].immutable_samplers = NULL;
469    }
470 
471    /* Initialize all samplers to 0 */
472    memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
473 
474    uint32_t buffer_view_count = 0;
475    uint32_t dynamic_offset_count = 0;
476    uint32_t descriptor_buffer_size = 0;
477 
478    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
479       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
480       uint32_t b = binding->binding;
481       /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
482        * immutable_samplers pointer.  This provides us with a quick-and-dirty
483        * way to sort the bindings by binding number.
484        */
485       set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
486    }
487 
488    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
489       vk_find_struct_const(pCreateInfo->pNext,
490                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
491 
492    const VkMutableDescriptorTypeCreateInfoVALVE *mutable_info =
493       vk_find_struct_const(pCreateInfo->pNext,
494                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE);
495 
496    for (uint32_t b = 0; b < num_bindings; b++) {
497       /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
498        * immutable_samplers pointer.  Check for NULL (empty binding) and then
499        * reset it and compute the index.
500        */
501       if (set_layout->binding[b].immutable_samplers == NULL)
502          continue;
503       const uint32_t info_idx =
504          (uintptr_t)(void *)set_layout->binding[b].immutable_samplers - 1;
505       set_layout->binding[b].immutable_samplers = NULL;
506 
507       const VkDescriptorSetLayoutBinding *binding =
508          &pCreateInfo->pBindings[info_idx];
509 
510       if (binding->descriptorCount == 0)
511          continue;
512 
513       set_layout->binding[b].type = binding->descriptorType;
514 
515       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
516          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
517          set_layout->binding[b].flags =
518             binding_flags_info->pBindingFlags[info_idx];
519 
520          /* From the Vulkan spec:
521           *
522           *    "If VkDescriptorSetLayoutCreateInfo::flags includes
523           *    VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then
524           *    all elements of pBindingFlags must not include
525           *    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
526           *    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or
527           *    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"
528           */
529          if (pCreateInfo->flags &
530              VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
531             assert(!(set_layout->binding[b].flags &
532                (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
533                 VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
534                 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)));
535          }
536       }
537 
538       set_layout->binding[b].data =
539          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE ?
540          anv_descriptor_data_for_mutable_type(device->physical, mutable_info, b) :
541          anv_descriptor_data_for_type(device->physical, binding->descriptorType);
542 
543       set_layout->binding[b].array_size = binding->descriptorCount;
544       set_layout->binding[b].descriptor_index = set_layout->descriptor_count;
545       set_layout->descriptor_count += binding->descriptorCount;
546 
547       if (set_layout->binding[b].data & ANV_DESCRIPTOR_BUFFER_VIEW) {
548          set_layout->binding[b].buffer_view_index = buffer_view_count;
549          buffer_view_count += binding->descriptorCount;
550       }
551 
552       switch (binding->descriptorType) {
553       case VK_DESCRIPTOR_TYPE_SAMPLER:
554       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
555       case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE:
556          set_layout->binding[b].max_plane_count = 1;
557          if (binding->pImmutableSamplers) {
558             set_layout->binding[b].immutable_samplers = samplers;
559             samplers += binding->descriptorCount;
560 
561             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
562                ANV_FROM_HANDLE(anv_sampler, sampler,
563                                binding->pImmutableSamplers[i]);
564 
565                set_layout->binding[b].immutable_samplers[i] = sampler;
566                if (set_layout->binding[b].max_plane_count < sampler->n_planes)
567                   set_layout->binding[b].max_plane_count = sampler->n_planes;
568             }
569          }
570          break;
571 
572       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
573          set_layout->binding[b].max_plane_count = 1;
574          break;
575 
576       default:
577          break;
578       }
579 
580       switch (binding->descriptorType) {
581       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
582       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
583          set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
584          set_layout->dynamic_offset_stages[dynamic_offset_count] = binding->stageFlags;
585          dynamic_offset_count += binding->descriptorCount;
586          assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
587          break;
588 
589       default:
590          break;
591       }
592 
593       set_layout->binding[b].descriptor_stride =
594          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE ?
595          anv_descriptor_size_for_mutable_type(device->physical, mutable_info, b) :
596          anv_descriptor_size(&set_layout->binding[b]);
597 
598       if (binding->descriptorType ==
599           VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
600          /* Inline uniform blocks are specified to use the descriptor array
601           * size as the size in bytes of the block.
602           */
603          descriptor_buffer_size = align_u32(descriptor_buffer_size,
604                                             ANV_UBO_ALIGNMENT);
605          set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
606          descriptor_buffer_size += binding->descriptorCount;
607       } else {
608          set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
609          descriptor_buffer_size +=
610             set_layout->binding[b].descriptor_stride * binding->descriptorCount;
611       }
612 
613       set_layout->shader_stages |= binding->stageFlags;
614    }
615 
616    set_layout->buffer_view_count = buffer_view_count;
617    set_layout->dynamic_offset_count = dynamic_offset_count;
618    set_layout->descriptor_buffer_size = descriptor_buffer_size;
619 
620    *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
621 
622    return VK_SUCCESS;
623 }
624 
625 void
anv_descriptor_set_layout_destroy(struct anv_device * device,struct anv_descriptor_set_layout * layout)626 anv_descriptor_set_layout_destroy(struct anv_device *device,
627                                   struct anv_descriptor_set_layout *layout)
628 {
629    assert(layout->ref_cnt == 0);
630    vk_object_free(&device->vk, NULL, layout);
631 }
632 
633 static const struct anv_descriptor_set_binding_layout *
set_layout_dynamic_binding(const struct anv_descriptor_set_layout * set_layout)634 set_layout_dynamic_binding(const struct anv_descriptor_set_layout *set_layout)
635 {
636    if (set_layout->binding_count == 0)
637       return NULL;
638 
639    const struct anv_descriptor_set_binding_layout *last_binding =
640       &set_layout->binding[set_layout->binding_count - 1];
641    if (!(last_binding->flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))
642       return NULL;
643 
644    return last_binding;
645 }
646 
647 static uint32_t
set_layout_descriptor_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)648 set_layout_descriptor_count(const struct anv_descriptor_set_layout *set_layout,
649                             uint32_t var_desc_count)
650 {
651    const struct anv_descriptor_set_binding_layout *dynamic_binding =
652       set_layout_dynamic_binding(set_layout);
653    if (dynamic_binding == NULL)
654       return set_layout->descriptor_count;
655 
656    assert(var_desc_count <= dynamic_binding->array_size);
657    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
658 
659    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
660       return set_layout->descriptor_count;
661 
662    return set_layout->descriptor_count - shrink;
663 }
664 
665 static uint32_t
set_layout_buffer_view_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)666 set_layout_buffer_view_count(const struct anv_descriptor_set_layout *set_layout,
667                              uint32_t var_desc_count)
668 {
669    const struct anv_descriptor_set_binding_layout *dynamic_binding =
670       set_layout_dynamic_binding(set_layout);
671    if (dynamic_binding == NULL)
672       return set_layout->buffer_view_count;
673 
674    assert(var_desc_count <= dynamic_binding->array_size);
675    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
676 
677    if (!(dynamic_binding->data & ANV_DESCRIPTOR_BUFFER_VIEW))
678       return set_layout->buffer_view_count;
679 
680    return set_layout->buffer_view_count - shrink;
681 }
682 
683 uint32_t
anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)684 anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout *set_layout,
685                                                  uint32_t var_desc_count)
686 {
687    const struct anv_descriptor_set_binding_layout *dynamic_binding =
688       set_layout_dynamic_binding(set_layout);
689    if (dynamic_binding == NULL)
690       return ALIGN(set_layout->descriptor_buffer_size, ANV_UBO_ALIGNMENT);
691 
692    assert(var_desc_count <= dynamic_binding->array_size);
693    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
694    uint32_t set_size;
695 
696    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
697       /* Inline uniform blocks are specified to use the descriptor array
698        * size as the size in bytes of the block.
699        */
700       set_size = set_layout->descriptor_buffer_size - shrink;
701    } else {
702       set_size = set_layout->descriptor_buffer_size -
703                  shrink * dynamic_binding->descriptor_stride;
704    }
705 
706    return ALIGN(set_size, ANV_UBO_ALIGNMENT);
707 }
708 
anv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)709 void anv_DestroyDescriptorSetLayout(
710     VkDevice                                    _device,
711     VkDescriptorSetLayout                       _set_layout,
712     const VkAllocationCallbacks*                pAllocator)
713 {
714    ANV_FROM_HANDLE(anv_device, device, _device);
715    ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
716 
717    if (!set_layout)
718       return;
719 
720    anv_descriptor_set_layout_unref(device, set_layout);
721 }
722 
723 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
724 
725 static void
sha1_update_immutable_sampler(struct mesa_sha1 * ctx,const struct anv_sampler * sampler)726 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
727                               const struct anv_sampler *sampler)
728 {
729    if (!sampler->conversion)
730       return;
731 
732    /* The only thing that affects the shader is ycbcr conversion */
733    _mesa_sha1_update(ctx, sampler->conversion,
734                      sizeof(*sampler->conversion));
735 }
736 
737 static void
sha1_update_descriptor_set_binding_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_binding_layout * layout)738 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
739    const struct anv_descriptor_set_binding_layout *layout)
740 {
741    SHA1_UPDATE_VALUE(ctx, layout->flags);
742    SHA1_UPDATE_VALUE(ctx, layout->data);
743    SHA1_UPDATE_VALUE(ctx, layout->max_plane_count);
744    SHA1_UPDATE_VALUE(ctx, layout->array_size);
745    SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
746    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
747    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_index);
748    SHA1_UPDATE_VALUE(ctx, layout->descriptor_offset);
749 
750    if (layout->immutable_samplers) {
751       for (uint16_t i = 0; i < layout->array_size; i++)
752          sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);
753    }
754 }
755 
756 static void
sha1_update_descriptor_set_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_layout * layout)757 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
758                                   const struct anv_descriptor_set_layout *layout)
759 {
760    SHA1_UPDATE_VALUE(ctx, layout->binding_count);
761    SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);
762    SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
763    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_count);
764    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
765    SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_size);
766 
767    for (uint16_t i = 0; i < layout->binding_count; i++)
768       sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
769 }
770 
771 /*
772  * Pipeline layouts.  These have nothing to do with the pipeline.  They are
773  * just multiple descriptor set layouts pasted together
774  */
775 
anv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)776 VkResult anv_CreatePipelineLayout(
777     VkDevice                                    _device,
778     const VkPipelineLayoutCreateInfo*           pCreateInfo,
779     const VkAllocationCallbacks*                pAllocator,
780     VkPipelineLayout*                           pPipelineLayout)
781 {
782    ANV_FROM_HANDLE(anv_device, device, _device);
783    struct anv_pipeline_layout *layout;
784 
785    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
786 
787    layout = vk_object_zalloc(&device->vk, pAllocator, sizeof(*layout),
788                              VK_OBJECT_TYPE_PIPELINE_LAYOUT);
789    if (layout == NULL)
790       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
791 
792    layout->num_sets = pCreateInfo->setLayoutCount;
793 
794    unsigned dynamic_offset_count = 0;
795 
796    for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
797       ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
798                       pCreateInfo->pSetLayouts[set]);
799       layout->set[set].layout = set_layout;
800       anv_descriptor_set_layout_ref(set_layout);
801 
802       layout->set[set].dynamic_offset_start = dynamic_offset_count;
803       dynamic_offset_count += set_layout->dynamic_offset_count;
804    }
805    assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
806 
807    struct mesa_sha1 ctx;
808    _mesa_sha1_init(&ctx);
809    for (unsigned s = 0; s < layout->num_sets; s++) {
810       sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
811       _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
812                         sizeof(layout->set[s].dynamic_offset_start));
813    }
814    _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
815    _mesa_sha1_final(&ctx, layout->sha1);
816 
817    *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
818 
819    return VK_SUCCESS;
820 }
821 
anv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)822 void anv_DestroyPipelineLayout(
823     VkDevice                                    _device,
824     VkPipelineLayout                            _pipelineLayout,
825     const VkAllocationCallbacks*                pAllocator)
826 {
827    ANV_FROM_HANDLE(anv_device, device, _device);
828    ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
829 
830    if (!pipeline_layout)
831       return;
832 
833    for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
834       anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
835 
836    vk_object_free(&device->vk, pAllocator, pipeline_layout);
837 }
838 
839 /*
840  * Descriptor pools.
841  *
842  * These are implemented using a big pool of memory and a free-list for the
843  * host memory allocations and a state_stream and a free list for the buffer
844  * view surface state. The spec allows us to fail to allocate due to
845  * fragmentation in all cases but two: 1) after pool reset, allocating up
846  * until the pool size with no freeing must succeed and 2) allocating and
847  * freeing only descriptor sets with the same layout. Case 1) is easy enough,
848  * and the free lists lets us recycle blocks for case 2).
849  */
850 
851 /* The vma heap reserves 0 to mean NULL; we have to offset by some amount to
852  * ensure we can allocate the entire BO without hitting zero.  The actual
853  * amount doesn't matter.
854  */
855 #define POOL_HEAP_OFFSET 64
856 
857 #define EMPTY 1
858 
anv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)859 VkResult anv_CreateDescriptorPool(
860     VkDevice                                    _device,
861     const VkDescriptorPoolCreateInfo*           pCreateInfo,
862     const VkAllocationCallbacks*                pAllocator,
863     VkDescriptorPool*                           pDescriptorPool)
864 {
865    ANV_FROM_HANDLE(anv_device, device, _device);
866    struct anv_descriptor_pool *pool;
867 
868    const VkDescriptorPoolInlineUniformBlockCreateInfo *inline_info =
869       vk_find_struct_const(pCreateInfo->pNext,
870                            DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO);
871    const VkMutableDescriptorTypeCreateInfoVALVE *mutable_info =
872       vk_find_struct_const(pCreateInfo->pNext,
873                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE);
874 
875    uint32_t descriptor_count = 0;
876    uint32_t buffer_view_count = 0;
877    uint32_t descriptor_bo_size = 0;
878 
879    for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
880       enum anv_descriptor_data desc_data =
881          pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE ?
882          anv_descriptor_data_for_mutable_type(device->physical, mutable_info, i) :
883          anv_descriptor_data_for_type(device->physical, pCreateInfo->pPoolSizes[i].type);
884 
885       if (desc_data & ANV_DESCRIPTOR_BUFFER_VIEW)
886          buffer_view_count += pCreateInfo->pPoolSizes[i].descriptorCount;
887 
888       unsigned desc_data_size =
889          pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE ?
890          anv_descriptor_size_for_mutable_type(device->physical, mutable_info, i) :
891          anv_descriptor_data_size(desc_data);
892 
893       desc_data_size *= pCreateInfo->pPoolSizes[i].descriptorCount;
894 
895       /* Combined image sampler descriptors can take up to 3 slots if they
896        * hold a YCbCr image.
897        */
898       if (pCreateInfo->pPoolSizes[i].type ==
899           VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
900          desc_data_size *= 3;
901 
902       if (pCreateInfo->pPoolSizes[i].type ==
903           VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
904          /* Inline uniform blocks are specified to use the descriptor array
905           * size as the size in bytes of the block.
906           */
907          assert(inline_info);
908          desc_data_size += pCreateInfo->pPoolSizes[i].descriptorCount;
909       }
910 
911       descriptor_bo_size += desc_data_size;
912 
913       descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
914    }
915    /* We have to align descriptor buffer allocations to 32B so that we can
916     * push descriptor buffers.  This means that each descriptor buffer
917     * allocated may burn up to 32B of extra space to get the right alignment.
918     * (Technically, it's at most 28B because we're always going to start at
919     * least 4B aligned but we're being conservative here.)  Allocate enough
920     * extra space that we can chop it into maxSets pieces and align each one
921     * of them to 32B.
922     */
923    descriptor_bo_size += ANV_UBO_ALIGNMENT * pCreateInfo->maxSets;
924    /* We align inline uniform blocks to ANV_UBO_ALIGNMENT */
925    if (inline_info) {
926       descriptor_bo_size +=
927          ANV_UBO_ALIGNMENT * inline_info->maxInlineUniformBlockBindings;
928    }
929    descriptor_bo_size = ALIGN(descriptor_bo_size, 4096);
930 
931    const size_t pool_size =
932       pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
933       descriptor_count * sizeof(struct anv_descriptor) +
934       buffer_view_count * sizeof(struct anv_buffer_view);
935    const size_t total_size = sizeof(*pool) + pool_size;
936 
937    pool = vk_object_alloc(&device->vk, pAllocator, total_size,
938                           VK_OBJECT_TYPE_DESCRIPTOR_POOL);
939    if (!pool)
940       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
941 
942    pool->size = pool_size;
943    pool->next = 0;
944    pool->free_list = EMPTY;
945    pool->host_only = pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE;
946 
947    if (descriptor_bo_size > 0) {
948       VkResult result = anv_device_alloc_bo(device,
949                                             "descriptors",
950                                             descriptor_bo_size,
951                                             ANV_BO_ALLOC_MAPPED |
952                                             ANV_BO_ALLOC_SNOOPED,
953                                             0 /* explicit_address */,
954                                             &pool->bo);
955       if (result != VK_SUCCESS) {
956          vk_object_free(&device->vk, pAllocator, pool);
957          return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
958       }
959 
960       util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, descriptor_bo_size);
961    } else {
962       pool->bo = NULL;
963    }
964 
965    anv_state_stream_init(&pool->surface_state_stream,
966                          &device->surface_state_pool, 4096);
967    pool->surface_state_free_list = NULL;
968 
969    list_inithead(&pool->desc_sets);
970 
971    *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
972 
973    return VK_SUCCESS;
974 }
975 
anv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)976 void anv_DestroyDescriptorPool(
977     VkDevice                                    _device,
978     VkDescriptorPool                            _pool,
979     const VkAllocationCallbacks*                pAllocator)
980 {
981    ANV_FROM_HANDLE(anv_device, device, _device);
982    ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
983 
984    if (!pool)
985       return;
986 
987    list_for_each_entry_safe(struct anv_descriptor_set, set,
988                             &pool->desc_sets, pool_link) {
989       anv_descriptor_set_layout_unref(device, set->layout);
990    }
991 
992    if (pool->bo) {
993       util_vma_heap_finish(&pool->bo_heap);
994       anv_device_release_bo(device, pool->bo);
995    }
996    anv_state_stream_finish(&pool->surface_state_stream);
997 
998    vk_object_free(&device->vk, pAllocator, pool);
999 }
1000 
anv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)1001 VkResult anv_ResetDescriptorPool(
1002     VkDevice                                    _device,
1003     VkDescriptorPool                            descriptorPool,
1004     VkDescriptorPoolResetFlags                  flags)
1005 {
1006    ANV_FROM_HANDLE(anv_device, device, _device);
1007    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1008 
1009    list_for_each_entry_safe(struct anv_descriptor_set, set,
1010                             &pool->desc_sets, pool_link) {
1011       anv_descriptor_set_layout_unref(device, set->layout);
1012    }
1013    list_inithead(&pool->desc_sets);
1014 
1015    pool->next = 0;
1016    pool->free_list = EMPTY;
1017 
1018    if (pool->bo) {
1019       util_vma_heap_finish(&pool->bo_heap);
1020       util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, pool->bo->size);
1021    }
1022 
1023    anv_state_stream_finish(&pool->surface_state_stream);
1024    anv_state_stream_init(&pool->surface_state_stream,
1025                          &device->surface_state_pool, 4096);
1026    pool->surface_state_free_list = NULL;
1027 
1028    return VK_SUCCESS;
1029 }
1030 
1031 struct pool_free_list_entry {
1032    uint32_t next;
1033    uint32_t size;
1034 };
1035 
1036 static VkResult
anv_descriptor_pool_alloc_set(struct anv_descriptor_pool * pool,uint32_t size,struct anv_descriptor_set ** set)1037 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,
1038                               uint32_t size,
1039                               struct anv_descriptor_set **set)
1040 {
1041    if (size <= pool->size - pool->next) {
1042       *set = (struct anv_descriptor_set *) (pool->data + pool->next);
1043       (*set)->size = size;
1044       pool->next += size;
1045       return VK_SUCCESS;
1046    } else {
1047       struct pool_free_list_entry *entry;
1048       uint32_t *link = &pool->free_list;
1049       for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
1050          entry = (struct pool_free_list_entry *) (pool->data + f);
1051          if (size <= entry->size) {
1052             *link = entry->next;
1053             *set = (struct anv_descriptor_set *) entry;
1054             (*set)->size = entry->size;
1055             return VK_SUCCESS;
1056          }
1057          link = &entry->next;
1058       }
1059 
1060       if (pool->free_list != EMPTY) {
1061          return VK_ERROR_FRAGMENTED_POOL;
1062       } else {
1063          return VK_ERROR_OUT_OF_POOL_MEMORY;
1064       }
1065    }
1066 }
1067 
1068 static void
anv_descriptor_pool_free_set(struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1069 anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,
1070                              struct anv_descriptor_set *set)
1071 {
1072    /* Put the descriptor set allocation back on the free list. */
1073    const uint32_t index = (char *) set - pool->data;
1074    if (index + set->size == pool->next) {
1075       pool->next = index;
1076    } else {
1077       struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
1078       entry->next = pool->free_list;
1079       entry->size = set->size;
1080       pool->free_list = (char *) entry - pool->data;
1081    }
1082 }
1083 
1084 struct surface_state_free_list_entry {
1085    void *next;
1086    struct anv_state state;
1087 };
1088 
1089 static struct anv_state
anv_descriptor_pool_alloc_state(struct anv_descriptor_pool * pool)1090 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)
1091 {
1092    assert(!pool->host_only);
1093 
1094    struct surface_state_free_list_entry *entry =
1095       pool->surface_state_free_list;
1096 
1097    if (entry) {
1098       struct anv_state state = entry->state;
1099       pool->surface_state_free_list = entry->next;
1100       assert(state.alloc_size == 64);
1101       return state;
1102    } else {
1103       return anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
1104    }
1105 }
1106 
1107 static void
anv_descriptor_pool_free_state(struct anv_descriptor_pool * pool,struct anv_state state)1108 anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,
1109                                struct anv_state state)
1110 {
1111    assert(state.alloc_size);
1112    /* Put the buffer view surface state back on the free list. */
1113    struct surface_state_free_list_entry *entry = state.map;
1114    entry->next = pool->surface_state_free_list;
1115    entry->state = state;
1116    pool->surface_state_free_list = entry;
1117 }
1118 
1119 size_t
anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout * layout,uint32_t var_desc_count)1120 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout,
1121                                uint32_t var_desc_count)
1122 {
1123    const uint32_t descriptor_count =
1124       set_layout_descriptor_count(layout, var_desc_count);
1125    const uint32_t buffer_view_count =
1126       set_layout_buffer_view_count(layout, var_desc_count);
1127 
1128    return sizeof(struct anv_descriptor_set) +
1129           descriptor_count * sizeof(struct anv_descriptor) +
1130           buffer_view_count * sizeof(struct anv_buffer_view);
1131 }
1132 
1133 static VkResult
anv_descriptor_set_create(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set_layout * layout,uint32_t var_desc_count,struct anv_descriptor_set ** out_set)1134 anv_descriptor_set_create(struct anv_device *device,
1135                           struct anv_descriptor_pool *pool,
1136                           struct anv_descriptor_set_layout *layout,
1137                           uint32_t var_desc_count,
1138                           struct anv_descriptor_set **out_set)
1139 {
1140    struct anv_descriptor_set *set;
1141    const size_t size = anv_descriptor_set_layout_size(layout, var_desc_count);
1142 
1143    VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);
1144    if (result != VK_SUCCESS)
1145       return result;
1146 
1147    uint32_t descriptor_buffer_size =
1148       anv_descriptor_set_layout_descriptor_buffer_size(layout, var_desc_count);
1149 
1150    set->desc_surface_state = ANV_STATE_NULL;
1151 
1152    if (descriptor_buffer_size) {
1153       uint64_t pool_vma_offset =
1154          util_vma_heap_alloc(&pool->bo_heap, descriptor_buffer_size,
1155                              ANV_UBO_ALIGNMENT);
1156       if (pool_vma_offset == 0) {
1157          anv_descriptor_pool_free_set(pool, set);
1158          return vk_error(pool, VK_ERROR_FRAGMENTED_POOL);
1159       }
1160       assert(pool_vma_offset >= POOL_HEAP_OFFSET &&
1161              pool_vma_offset - POOL_HEAP_OFFSET <= INT32_MAX);
1162       set->desc_mem.offset = pool_vma_offset - POOL_HEAP_OFFSET;
1163       set->desc_mem.alloc_size = descriptor_buffer_size;
1164       set->desc_mem.map = pool->bo->map + set->desc_mem.offset;
1165 
1166       set->desc_addr = (struct anv_address) {
1167          .bo = pool->bo,
1168          .offset = set->desc_mem.offset,
1169       };
1170 
1171       enum isl_format format =
1172          anv_isl_format_for_descriptor_type(device,
1173                                             VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1174 
1175       if (!pool->host_only) {
1176          set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);
1177          anv_fill_buffer_surface_state(device, set->desc_surface_state,
1178                                        format, ISL_SWIZZLE_IDENTITY,
1179                                        ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
1180                                        set->desc_addr,
1181                                        descriptor_buffer_size, 1);
1182       }
1183    } else {
1184       set->desc_mem = ANV_STATE_NULL;
1185       set->desc_addr = (struct anv_address) { .bo = NULL, .offset = 0 };
1186    }
1187 
1188    vk_object_base_init(&device->vk, &set->base,
1189                        VK_OBJECT_TYPE_DESCRIPTOR_SET);
1190    set->pool = pool;
1191    set->layout = layout;
1192    anv_descriptor_set_layout_ref(layout);
1193 
1194    set->buffer_view_count =
1195       set_layout_buffer_view_count(layout, var_desc_count);
1196    set->descriptor_count =
1197       set_layout_descriptor_count(layout, var_desc_count);
1198 
1199    set->buffer_views =
1200       (struct anv_buffer_view *) &set->descriptors[set->descriptor_count];
1201 
1202    /* By defining the descriptors to be zero now, we can later verify that
1203     * a descriptor has not been populated with user data.
1204     */
1205    memset(set->descriptors, 0,
1206           sizeof(struct anv_descriptor) * set->descriptor_count);
1207 
1208    /* Go through and fill out immutable samplers if we have any */
1209    for (uint32_t b = 0; b < layout->binding_count; b++) {
1210       if (layout->binding[b].immutable_samplers) {
1211          for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
1212             /* The type will get changed to COMBINED_IMAGE_SAMPLER in
1213              * UpdateDescriptorSets if needed.  However, if the descriptor
1214              * set has an immutable sampler, UpdateDescriptorSets may never
1215              * touch it, so we need to make sure it's 100% valid now.
1216              *
1217              * We don't need to actually provide a sampler because the helper
1218              * will always write in the immutable sampler regardless of what
1219              * is in the sampler parameter.
1220              */
1221             VkDescriptorImageInfo info = { };
1222             anv_descriptor_set_write_image_view(device, set, &info,
1223                                                 VK_DESCRIPTOR_TYPE_SAMPLER,
1224                                                 b, i);
1225          }
1226       }
1227    }
1228 
1229    /* Allocate null surface state for the buffer views since
1230     * we lazy allocate this in the write anyway.
1231     */
1232    if (!pool->host_only) {
1233       for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1234          set->buffer_views[b].surface_state =
1235             anv_descriptor_pool_alloc_state(pool);
1236       }
1237    }
1238 
1239    list_addtail(&set->pool_link, &pool->desc_sets);
1240 
1241    *out_set = set;
1242 
1243    return VK_SUCCESS;
1244 }
1245 
1246 static void
anv_descriptor_set_destroy(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1247 anv_descriptor_set_destroy(struct anv_device *device,
1248                            struct anv_descriptor_pool *pool,
1249                            struct anv_descriptor_set *set)
1250 {
1251    anv_descriptor_set_layout_unref(device, set->layout);
1252 
1253    if (set->desc_mem.alloc_size) {
1254       util_vma_heap_free(&pool->bo_heap,
1255                          (uint64_t)set->desc_mem.offset + POOL_HEAP_OFFSET,
1256                          set->desc_mem.alloc_size);
1257       if (set->desc_surface_state.alloc_size)
1258          anv_descriptor_pool_free_state(pool, set->desc_surface_state);
1259    }
1260 
1261    if (!pool->host_only) {
1262       for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1263          if (set->buffer_views[b].surface_state.alloc_size)
1264             anv_descriptor_pool_free_state(pool, set->buffer_views[b].surface_state);
1265       }
1266    }
1267 
1268    list_del(&set->pool_link);
1269 
1270    vk_object_base_finish(&set->base);
1271    anv_descriptor_pool_free_set(pool, set);
1272 }
1273 
anv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1274 VkResult anv_AllocateDescriptorSets(
1275     VkDevice                                    _device,
1276     const VkDescriptorSetAllocateInfo*          pAllocateInfo,
1277     VkDescriptorSet*                            pDescriptorSets)
1278 {
1279    ANV_FROM_HANDLE(anv_device, device, _device);
1280    ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
1281 
1282    VkResult result = VK_SUCCESS;
1283    struct anv_descriptor_set *set = NULL;
1284    uint32_t i;
1285 
1286    const VkDescriptorSetVariableDescriptorCountAllocateInfo *vdcai =
1287       vk_find_struct_const(pAllocateInfo->pNext,
1288                            DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
1289 
1290    for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1291       ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
1292                       pAllocateInfo->pSetLayouts[i]);
1293 
1294       uint32_t var_desc_count = 0;
1295       if (vdcai != NULL && vdcai->descriptorSetCount > 0) {
1296          assert(vdcai->descriptorSetCount == pAllocateInfo->descriptorSetCount);
1297          var_desc_count = vdcai->pDescriptorCounts[i];
1298       }
1299 
1300       result = anv_descriptor_set_create(device, pool, layout,
1301                                          var_desc_count, &set);
1302       if (result != VK_SUCCESS)
1303          break;
1304 
1305       pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1306    }
1307 
1308    if (result != VK_SUCCESS) {
1309       anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
1310                              i, pDescriptorSets);
1311       /* The Vulkan 1.3.228 spec, section 14.2.3. Allocation of Descriptor Sets:
1312        *
1313        *   "If the creation of any of those descriptor sets fails, then the
1314        *    implementation must destroy all successfully created descriptor
1315        *    set objects from this command, set all entries of the
1316        *    pDescriptorSets array to VK_NULL_HANDLE and return the error."
1317        */
1318       for (i = 0; i < pAllocateInfo->descriptorSetCount; i++)
1319          pDescriptorSets[i] = VK_NULL_HANDLE;
1320 
1321    }
1322 
1323    return result;
1324 }
1325 
anv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1326 VkResult anv_FreeDescriptorSets(
1327     VkDevice                                    _device,
1328     VkDescriptorPool                            descriptorPool,
1329     uint32_t                                    count,
1330     const VkDescriptorSet*                      pDescriptorSets)
1331 {
1332    ANV_FROM_HANDLE(anv_device, device, _device);
1333    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1334 
1335    for (uint32_t i = 0; i < count; i++) {
1336       ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1337 
1338       if (!set)
1339          continue;
1340 
1341       anv_descriptor_set_destroy(device, pool, set);
1342    }
1343 
1344    return VK_SUCCESS;
1345 }
1346 
1347 static void
anv_descriptor_set_write_image_param(uint32_t * param_desc_map,const struct brw_image_param * param)1348 anv_descriptor_set_write_image_param(uint32_t *param_desc_map,
1349                                      const struct brw_image_param *param)
1350 {
1351 #define WRITE_PARAM_FIELD(field, FIELD) \
1352    for (unsigned i = 0; i < ARRAY_SIZE(param->field); i++) \
1353       param_desc_map[BRW_IMAGE_PARAM_##FIELD##_OFFSET + i] = param->field[i]
1354 
1355    WRITE_PARAM_FIELD(offset, OFFSET);
1356    WRITE_PARAM_FIELD(size, SIZE);
1357    WRITE_PARAM_FIELD(stride, STRIDE);
1358    WRITE_PARAM_FIELD(tiling, TILING);
1359    WRITE_PARAM_FIELD(swizzling, SWIZZLING);
1360    WRITE_PARAM_FIELD(size, SIZE);
1361 
1362 #undef WRITE_PARAM_FIELD
1363 }
1364 
1365 static uint32_t
anv_surface_state_to_handle(struct anv_state state)1366 anv_surface_state_to_handle(struct anv_state state)
1367 {
1368    /* Bits 31:12 of the bindless surface offset in the extended message
1369     * descriptor is bits 25:6 of the byte-based address.
1370     */
1371    assert(state.offset >= 0);
1372    uint32_t offset = state.offset;
1373    assert((offset & 0x3f) == 0 && offset < (1 << 26));
1374    return offset << 6;
1375 }
1376 
1377 void
anv_descriptor_set_write_image_view(struct anv_device * device,struct anv_descriptor_set * set,const VkDescriptorImageInfo * const info,VkDescriptorType type,uint32_t binding,uint32_t element)1378 anv_descriptor_set_write_image_view(struct anv_device *device,
1379                                     struct anv_descriptor_set *set,
1380                                     const VkDescriptorImageInfo * const info,
1381                                     VkDescriptorType type,
1382                                     uint32_t binding,
1383                                     uint32_t element)
1384 {
1385    const struct anv_descriptor_set_binding_layout *bind_layout =
1386       &set->layout->binding[binding];
1387    struct anv_descriptor *desc =
1388       &set->descriptors[bind_layout->descriptor_index + element];
1389    struct anv_image_view *image_view = NULL;
1390    struct anv_sampler *sampler = NULL;
1391 
1392    /* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor
1393     * set initialization to set the bindless samplers.
1394     */
1395    assert(type == bind_layout->type ||
1396           type == VK_DESCRIPTOR_TYPE_SAMPLER ||
1397           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE);
1398 
1399    switch (type) {
1400    case VK_DESCRIPTOR_TYPE_SAMPLER:
1401       sampler = bind_layout->immutable_samplers ?
1402                 bind_layout->immutable_samplers[element] :
1403                 anv_sampler_from_handle(info->sampler);
1404       break;
1405 
1406    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1407       image_view = anv_image_view_from_handle(info->imageView);
1408       sampler = bind_layout->immutable_samplers ?
1409                 bind_layout->immutable_samplers[element] :
1410                 anv_sampler_from_handle(info->sampler);
1411       break;
1412 
1413    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1414    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1415    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1416       image_view = anv_image_view_from_handle(info->imageView);
1417       break;
1418 
1419    default:
1420       unreachable("invalid descriptor type");
1421    }
1422 
1423    *desc = (struct anv_descriptor) {
1424       .type = type,
1425       .layout = info->imageLayout,
1426       .image_view = image_view,
1427       .sampler = sampler,
1428    };
1429 
1430    if (set->pool && set->pool->host_only)
1431       return;
1432 
1433    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1434                     element * bind_layout->descriptor_stride;
1435    memset(desc_map, 0, bind_layout->descriptor_stride);
1436    enum anv_descriptor_data data =
1437       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE ?
1438       anv_descriptor_data_for_type(device->physical, type) :
1439       bind_layout->data;
1440 
1441 
1442    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1443       struct anv_sampled_image_descriptor desc_data[3];
1444       memset(desc_data, 0, sizeof(desc_data));
1445 
1446       if (image_view) {
1447          for (unsigned p = 0; p < image_view->n_planes; p++) {
1448             struct anv_surface_state sstate =
1449                (desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
1450                image_view->planes[p].general_sampler_surface_state :
1451                image_view->planes[p].optimal_sampler_surface_state;
1452             desc_data[p].image = anv_surface_state_to_handle(sstate.state);
1453          }
1454       }
1455 
1456       if (sampler) {
1457          for (unsigned p = 0; p < sampler->n_planes; p++)
1458             desc_data[p].sampler = sampler->bindless_state.offset + p * 32;
1459       }
1460 
1461       /* We may have max_plane_count < 0 if this isn't a sampled image but it
1462        * can be no more than the size of our array of handles.
1463        */
1464       assert(bind_layout->max_plane_count <= ARRAY_SIZE(desc_data));
1465       memcpy(desc_map, desc_data,
1466              MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1467    }
1468 
1469    if (image_view == NULL)
1470       return;
1471 
1472    if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1473       assert(!(data & ANV_DESCRIPTOR_IMAGE_PARAM));
1474       assert(image_view->n_planes == 1);
1475       struct anv_storage_image_descriptor desc_data = {
1476          .vanilla = anv_surface_state_to_handle(
1477                            image_view->planes[0].storage_surface_state.state),
1478          .lowered = anv_surface_state_to_handle(
1479                            image_view->planes[0].lowered_storage_surface_state.state),
1480       };
1481       memcpy(desc_map, &desc_data, sizeof(desc_data));
1482    }
1483 
1484    if (data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1485       /* Storage images can only ever have one plane */
1486       assert(image_view->n_planes == 1);
1487       const struct brw_image_param *image_param =
1488          &image_view->planes[0].lowered_storage_image_param;
1489 
1490       anv_descriptor_set_write_image_param(desc_map, image_param);
1491    }
1492 
1493    if (data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE) {
1494       assert(!(data & ANV_DESCRIPTOR_SAMPLED_IMAGE));
1495       assert(image_view);
1496       struct anv_texture_swizzle_descriptor desc_data[3];
1497       memset(desc_data, 0, sizeof(desc_data));
1498 
1499       for (unsigned p = 0; p < image_view->n_planes; p++) {
1500          desc_data[p] = (struct anv_texture_swizzle_descriptor) {
1501             .swizzle = {
1502                (uint8_t)image_view->planes[p].isl.swizzle.r,
1503                (uint8_t)image_view->planes[p].isl.swizzle.g,
1504                (uint8_t)image_view->planes[p].isl.swizzle.b,
1505                (uint8_t)image_view->planes[p].isl.swizzle.a,
1506             },
1507          };
1508       }
1509       memcpy(desc_map, desc_data,
1510              MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1511    }
1512 }
1513 
1514 void
anv_descriptor_set_write_buffer_view(struct anv_device * device,struct anv_descriptor_set * set,VkDescriptorType type,struct anv_buffer_view * buffer_view,uint32_t binding,uint32_t element)1515 anv_descriptor_set_write_buffer_view(struct anv_device *device,
1516                                      struct anv_descriptor_set *set,
1517                                      VkDescriptorType type,
1518                                      struct anv_buffer_view *buffer_view,
1519                                      uint32_t binding,
1520                                      uint32_t element)
1521 {
1522    const struct anv_descriptor_set_binding_layout *bind_layout =
1523       &set->layout->binding[binding];
1524    struct anv_descriptor *desc =
1525       &set->descriptors[bind_layout->descriptor_index + element];
1526 
1527    assert(type == bind_layout->type ||
1528           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE);
1529 
1530    *desc = (struct anv_descriptor) {
1531       .type = type,
1532       .buffer_view = buffer_view,
1533    };
1534 
1535    if (set->pool && set->pool->host_only)
1536       return;
1537 
1538    enum anv_descriptor_data data =
1539       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE ?
1540       anv_descriptor_data_for_type(device->physical, type) :
1541       bind_layout->data;
1542 
1543    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1544                     element * bind_layout->descriptor_stride;
1545 
1546    if (buffer_view == NULL) {
1547       memset(desc_map, 0, bind_layout->descriptor_stride);
1548       return;
1549    }
1550 
1551    if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1552       struct anv_sampled_image_descriptor desc_data = {
1553          .image = anv_surface_state_to_handle(buffer_view->surface_state),
1554       };
1555       memcpy(desc_map, &desc_data, sizeof(desc_data));
1556    }
1557 
1558    if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1559       assert(!(data & ANV_DESCRIPTOR_IMAGE_PARAM));
1560       struct anv_storage_image_descriptor desc_data = {
1561          .vanilla = anv_surface_state_to_handle(
1562                            buffer_view->storage_surface_state),
1563          .lowered = anv_surface_state_to_handle(
1564                            buffer_view->lowered_storage_surface_state),
1565       };
1566       memcpy(desc_map, &desc_data, sizeof(desc_data));
1567    }
1568 
1569    if (data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1570       anv_descriptor_set_write_image_param(desc_map,
1571          &buffer_view->lowered_storage_image_param);
1572    }
1573 }
1574 
1575 void
anv_descriptor_set_write_buffer(struct anv_device * device,struct anv_descriptor_set * set,struct anv_state_stream * alloc_stream,VkDescriptorType type,struct anv_buffer * buffer,uint32_t binding,uint32_t element,VkDeviceSize offset,VkDeviceSize range)1576 anv_descriptor_set_write_buffer(struct anv_device *device,
1577                                 struct anv_descriptor_set *set,
1578                                 struct anv_state_stream *alloc_stream,
1579                                 VkDescriptorType type,
1580                                 struct anv_buffer *buffer,
1581                                 uint32_t binding,
1582                                 uint32_t element,
1583                                 VkDeviceSize offset,
1584                                 VkDeviceSize range)
1585 {
1586    assert(alloc_stream || set->pool);
1587 
1588    const struct anv_descriptor_set_binding_layout *bind_layout =
1589       &set->layout->binding[binding];
1590    struct anv_descriptor *desc =
1591       &set->descriptors[bind_layout->descriptor_index + element];
1592 
1593    assert(type == bind_layout->type ||
1594           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE);
1595 
1596    *desc = (struct anv_descriptor) {
1597       .type = type,
1598       .offset = offset,
1599       .range = range,
1600       .buffer = buffer,
1601    };
1602 
1603    if (set->pool && set->pool->host_only)
1604       return;
1605 
1606    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1607                     element * bind_layout->descriptor_stride;
1608 
1609    if (buffer == NULL) {
1610       memset(desc_map, 0, bind_layout->descriptor_stride);
1611       return;
1612    }
1613 
1614    struct anv_address bind_addr = anv_address_add(buffer->address, offset);
1615    uint64_t bind_range = vk_buffer_range(&buffer->vk, offset, range);
1616    enum anv_descriptor_data data =
1617       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE ?
1618       anv_descriptor_data_for_type(device->physical, type) :
1619       bind_layout->data;
1620 
1621    /* We report a bounds checking alignment of 32B for the sake of block
1622     * messages which read an entire register worth at a time.
1623     */
1624    if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1625        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
1626       bind_range = align_u64(bind_range, ANV_UBO_ALIGNMENT);
1627 
1628    if (data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
1629       struct anv_address_range_descriptor desc_data = {
1630          .address = anv_address_physical(bind_addr),
1631          .range = bind_range,
1632       };
1633       memcpy(desc_map, &desc_data, sizeof(desc_data));
1634    }
1635 
1636    if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
1637        type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)
1638       return;
1639 
1640    assert(data & ANV_DESCRIPTOR_BUFFER_VIEW);
1641    struct anv_buffer_view *bview =
1642       &set->buffer_views[bind_layout->buffer_view_index + element];
1643 
1644    bview->range = bind_range;
1645    bview->address = bind_addr;
1646 
1647    /* If we're writing descriptors through a push command, we need to
1648       * allocate the surface state from the command buffer. Otherwise it will
1649       * be allocated by the descriptor pool when calling
1650       * vkAllocateDescriptorSets. */
1651    if (alloc_stream) {
1652       bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
1653    }
1654 
1655    assert(bview->surface_state.alloc_size);
1656 
1657    isl_surf_usage_flags_t usage =
1658       (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1659        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ?
1660       ISL_SURF_USAGE_CONSTANT_BUFFER_BIT :
1661       ISL_SURF_USAGE_STORAGE_BIT;
1662 
1663    enum isl_format format = anv_isl_format_for_descriptor_type(device, type);
1664    anv_fill_buffer_surface_state(device, bview->surface_state,
1665                                  format, ISL_SWIZZLE_IDENTITY,
1666                                  usage, bind_addr, bind_range, 1);
1667    desc->set_buffer_view = bview;
1668 }
1669 
1670 void
anv_descriptor_set_write_inline_uniform_data(struct anv_device * device,struct anv_descriptor_set * set,uint32_t binding,const void * data,size_t offset,size_t size)1671 anv_descriptor_set_write_inline_uniform_data(struct anv_device *device,
1672                                              struct anv_descriptor_set *set,
1673                                              uint32_t binding,
1674                                              const void *data,
1675                                              size_t offset,
1676                                              size_t size)
1677 {
1678    const struct anv_descriptor_set_binding_layout *bind_layout =
1679       &set->layout->binding[binding];
1680 
1681    assert(bind_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM);
1682 
1683    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset;
1684 
1685    memcpy(desc_map + offset, data, size);
1686 }
1687 
1688 void
anv_descriptor_set_write_acceleration_structure(struct anv_device * device,struct anv_descriptor_set * set,struct anv_acceleration_structure * accel,uint32_t binding,uint32_t element)1689 anv_descriptor_set_write_acceleration_structure(struct anv_device *device,
1690                                                 struct anv_descriptor_set *set,
1691                                                 struct anv_acceleration_structure *accel,
1692                                                 uint32_t binding,
1693                                                 uint32_t element)
1694 {
1695    const struct anv_descriptor_set_binding_layout *bind_layout =
1696       &set->layout->binding[binding];
1697    struct anv_descriptor *desc =
1698       &set->descriptors[bind_layout->descriptor_index + element];
1699 
1700    assert(bind_layout->data & ANV_DESCRIPTOR_ADDRESS_RANGE);
1701    *desc = (struct anv_descriptor) {
1702       .type = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
1703       .accel_struct = accel,
1704    };
1705 
1706    if (set->pool && set->pool->host_only)
1707       return;
1708 
1709    struct anv_address_range_descriptor desc_data = { };
1710    if (accel != NULL) {
1711       desc_data.address = anv_address_physical(accel->address);
1712       desc_data.range = accel->size;
1713    }
1714    assert(sizeof(desc_data) <= bind_layout->descriptor_stride);
1715 
1716    void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1717                     element * bind_layout->descriptor_stride;
1718    memcpy(desc_map, &desc_data, sizeof(desc_data));
1719 }
1720 
anv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)1721 void anv_UpdateDescriptorSets(
1722     VkDevice                                    _device,
1723     uint32_t                                    descriptorWriteCount,
1724     const VkWriteDescriptorSet*                 pDescriptorWrites,
1725     uint32_t                                    descriptorCopyCount,
1726     const VkCopyDescriptorSet*                  pDescriptorCopies)
1727 {
1728    ANV_FROM_HANDLE(anv_device, device, _device);
1729 
1730    for (uint32_t i = 0; i < descriptorWriteCount; i++) {
1731       const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1732       ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
1733 
1734       switch (write->descriptorType) {
1735       case VK_DESCRIPTOR_TYPE_SAMPLER:
1736       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1737       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1738       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1739       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1740          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1741             anv_descriptor_set_write_image_view(device, set,
1742                                                 write->pImageInfo + j,
1743                                                 write->descriptorType,
1744                                                 write->dstBinding,
1745                                                 write->dstArrayElement + j);
1746          }
1747          break;
1748 
1749       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1750       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1751          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1752             ANV_FROM_HANDLE(anv_buffer_view, bview,
1753                             write->pTexelBufferView[j]);
1754 
1755             anv_descriptor_set_write_buffer_view(device, set,
1756                                                  write->descriptorType,
1757                                                  bview,
1758                                                  write->dstBinding,
1759                                                  write->dstArrayElement + j);
1760          }
1761          break;
1762 
1763       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1764       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1765       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1766       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1767          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1768             ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
1769 
1770             anv_descriptor_set_write_buffer(device, set,
1771                                             NULL,
1772                                             write->descriptorType,
1773                                             buffer,
1774                                             write->dstBinding,
1775                                             write->dstArrayElement + j,
1776                                             write->pBufferInfo[j].offset,
1777                                             write->pBufferInfo[j].range);
1778          }
1779          break;
1780 
1781       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
1782          const VkWriteDescriptorSetInlineUniformBlock *inline_write =
1783             vk_find_struct_const(write->pNext,
1784                                  WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
1785          assert(inline_write->dataSize == write->descriptorCount);
1786          anv_descriptor_set_write_inline_uniform_data(device, set,
1787                                                       write->dstBinding,
1788                                                       inline_write->pData,
1789                                                       write->dstArrayElement,
1790                                                       inline_write->dataSize);
1791          break;
1792       }
1793 
1794       case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
1795          const VkWriteDescriptorSetAccelerationStructureKHR *accel_write =
1796             vk_find_struct_const(write, WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR);
1797          assert(accel_write->accelerationStructureCount ==
1798                 write->descriptorCount);
1799          for (uint32_t j = 0; j < write->descriptorCount; j++) {
1800             ANV_FROM_HANDLE(anv_acceleration_structure, accel,
1801                             accel_write->pAccelerationStructures[j]);
1802             anv_descriptor_set_write_acceleration_structure(device, set, accel,
1803                                                             write->dstBinding,
1804                                                             write->dstArrayElement + j);
1805          }
1806          break;
1807       }
1808 
1809       default:
1810          break;
1811       }
1812    }
1813 
1814    for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1815       const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
1816       ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
1817       ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
1818 
1819       const struct anv_descriptor_set_binding_layout *src_layout =
1820          &src->layout->binding[copy->srcBinding];
1821       struct anv_descriptor *src_desc =
1822          &src->descriptors[src_layout->descriptor_index];
1823       src_desc += copy->srcArrayElement;
1824 
1825       if (src_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
1826          anv_descriptor_set_write_inline_uniform_data(device, dst,
1827                                                       copy->dstBinding,
1828                                                       src->desc_mem.map + src_layout->descriptor_offset + copy->srcArrayElement,
1829                                                       copy->dstArrayElement,
1830                                                       copy->descriptorCount);
1831          continue;
1832       }
1833 
1834 
1835       /* Copy CPU side data */
1836       for (uint32_t j = 0; j < copy->descriptorCount; j++) {
1837          switch(src_desc[j].type) {
1838          case VK_DESCRIPTOR_TYPE_SAMPLER:
1839          case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1840          case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1841          case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1842          case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
1843             VkDescriptorImageInfo info = {
1844                .sampler = anv_sampler_to_handle(src_desc[j].sampler),
1845                .imageView = anv_image_view_to_handle(src_desc[j].image_view),
1846                .imageLayout = src_desc[j].layout
1847             };
1848             anv_descriptor_set_write_image_view(device, dst,
1849                                                 &info,
1850                                                 src_desc[j].type,
1851                                                 copy->dstBinding,
1852                                                 copy->dstArrayElement + j);
1853             break;
1854          }
1855 
1856          case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1857          case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
1858             anv_descriptor_set_write_buffer_view(device, dst,
1859                                                  src_desc[j].type,
1860                                                  src_desc[j].buffer_view,
1861                                                  copy->dstBinding,
1862                                                  copy->dstArrayElement + j);
1863             break;
1864          }
1865 
1866          case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1867          case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1868          case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1869          case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
1870             anv_descriptor_set_write_buffer(device, dst,
1871                                             NULL,
1872                                             src_desc[j].type,
1873                                             src_desc[j].buffer,
1874                                             copy->dstBinding,
1875                                             copy->dstArrayElement + j,
1876                                             src_desc[j].offset,
1877                                             src_desc[j].range);
1878             break;
1879          }
1880 
1881          case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
1882             anv_descriptor_set_write_acceleration_structure(device, dst,
1883                                                             src_desc[j].accel_struct,
1884                                                             copy->dstBinding,
1885                                                             copy->dstArrayElement + j);
1886             break;
1887          }
1888 
1889          default:
1890             break;
1891          }
1892       }
1893    }
1894 }
1895 
1896 /*
1897  * Descriptor update templates.
1898  */
1899 
1900 void
anv_descriptor_set_write_template(struct anv_device * device,struct anv_descriptor_set * set,struct anv_state_stream * alloc_stream,const struct anv_descriptor_update_template * template,const void * data)1901 anv_descriptor_set_write_template(struct anv_device *device,
1902                                   struct anv_descriptor_set *set,
1903                                   struct anv_state_stream *alloc_stream,
1904                                   const struct anv_descriptor_update_template *template,
1905                                   const void *data)
1906 {
1907    for (uint32_t i = 0; i < template->entry_count; i++) {
1908       const struct anv_descriptor_template_entry *entry =
1909          &template->entries[i];
1910 
1911       switch (entry->type) {
1912       case VK_DESCRIPTOR_TYPE_SAMPLER:
1913       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1914       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1915       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1916       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1917          for (uint32_t j = 0; j < entry->array_count; j++) {
1918             const VkDescriptorImageInfo *info =
1919                data + entry->offset + j * entry->stride;
1920             anv_descriptor_set_write_image_view(device, set,
1921                                                 info, entry->type,
1922                                                 entry->binding,
1923                                                 entry->array_element + j);
1924          }
1925          break;
1926 
1927       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1928       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1929          for (uint32_t j = 0; j < entry->array_count; j++) {
1930             const VkBufferView *_bview =
1931                data + entry->offset + j * entry->stride;
1932             ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
1933 
1934             anv_descriptor_set_write_buffer_view(device, set,
1935                                                  entry->type,
1936                                                  bview,
1937                                                  entry->binding,
1938                                                  entry->array_element + j);
1939          }
1940          break;
1941 
1942       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1943       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1944       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1945       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1946          for (uint32_t j = 0; j < entry->array_count; j++) {
1947             const VkDescriptorBufferInfo *info =
1948                data + entry->offset + j * entry->stride;
1949             ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
1950 
1951             anv_descriptor_set_write_buffer(device, set,
1952                                             alloc_stream,
1953                                             entry->type,
1954                                             buffer,
1955                                             entry->binding,
1956                                             entry->array_element + j,
1957                                             info->offset, info->range);
1958          }
1959          break;
1960 
1961       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
1962          anv_descriptor_set_write_inline_uniform_data(device, set,
1963                                                       entry->binding,
1964                                                       data + entry->offset,
1965                                                       entry->array_element,
1966                                                       entry->array_count);
1967          break;
1968 
1969       case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
1970          for (uint32_t j = 0; j < entry->array_count; j++) {
1971             VkAccelerationStructureKHR *accel_obj =
1972                (VkAccelerationStructureKHR *)(data + entry->offset + j * entry->stride);
1973             ANV_FROM_HANDLE(anv_acceleration_structure, accel, *accel_obj);
1974 
1975             anv_descriptor_set_write_acceleration_structure(device, set,
1976                                                             accel,
1977                                                             entry->binding,
1978                                                             entry->array_element + j);
1979          }
1980          break;
1981 
1982       default:
1983          break;
1984       }
1985    }
1986 }
1987 
anv_CreateDescriptorUpdateTemplate(VkDevice _device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)1988 VkResult anv_CreateDescriptorUpdateTemplate(
1989     VkDevice                                    _device,
1990     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
1991     const VkAllocationCallbacks*                pAllocator,
1992     VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate)
1993 {
1994    ANV_FROM_HANDLE(anv_device, device, _device);
1995    struct anv_descriptor_update_template *template;
1996 
1997    size_t size = sizeof(*template) +
1998       pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
1999    template = vk_object_alloc(&device->vk, pAllocator, size,
2000                               VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
2001    if (template == NULL)
2002       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2003 
2004    template->bind_point = pCreateInfo->pipelineBindPoint;
2005 
2006    if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
2007       template->set = pCreateInfo->set;
2008 
2009    template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
2010    for (uint32_t i = 0; i < template->entry_count; i++) {
2011       const VkDescriptorUpdateTemplateEntry *pEntry =
2012          &pCreateInfo->pDescriptorUpdateEntries[i];
2013 
2014       template->entries[i] = (struct anv_descriptor_template_entry) {
2015          .type = pEntry->descriptorType,
2016          .binding = pEntry->dstBinding,
2017          .array_element = pEntry->dstArrayElement,
2018          .array_count = pEntry->descriptorCount,
2019          .offset = pEntry->offset,
2020          .stride = pEntry->stride,
2021       };
2022    }
2023 
2024    *pDescriptorUpdateTemplate =
2025       anv_descriptor_update_template_to_handle(template);
2026 
2027    return VK_SUCCESS;
2028 }
2029 
anv_DestroyDescriptorUpdateTemplate(VkDevice _device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)2030 void anv_DestroyDescriptorUpdateTemplate(
2031     VkDevice                                    _device,
2032     VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
2033     const VkAllocationCallbacks*                pAllocator)
2034 {
2035    ANV_FROM_HANDLE(anv_device, device, _device);
2036    ANV_FROM_HANDLE(anv_descriptor_update_template, template,
2037                    descriptorUpdateTemplate);
2038 
2039    if (!template)
2040       return;
2041 
2042    vk_object_free(&device->vk, pAllocator, template);
2043 }
2044 
anv_UpdateDescriptorSetWithTemplate(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)2045 void anv_UpdateDescriptorSetWithTemplate(
2046     VkDevice                                    _device,
2047     VkDescriptorSet                             descriptorSet,
2048     VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
2049     const void*                                 pData)
2050 {
2051    ANV_FROM_HANDLE(anv_device, device, _device);
2052    ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
2053    ANV_FROM_HANDLE(anv_descriptor_update_template, template,
2054                    descriptorUpdateTemplate);
2055 
2056    anv_descriptor_set_write_template(device, set, NULL, template, pData);
2057 }
2058