• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2016 Red Hat.
3  * Copyright © 2016 Bas Nieuwenhuizen
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the next
13  * paragraph) shall be included in all copies or substantial portions of the
14  * Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
22  * IN THE SOFTWARE.
23  */
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29 
30 #include "util/mesa-sha1.h"
31 #include "radv_private.h"
32 #include "sid.h"
33 
radv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)34 VkResult radv_CreateDescriptorSetLayout(
35 	VkDevice                                    _device,
36 	const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
37 	const VkAllocationCallbacks*                pAllocator,
38 	VkDescriptorSetLayout*                      pSetLayout)
39 {
40 	RADV_FROM_HANDLE(radv_device, device, _device);
41 	struct radv_descriptor_set_layout *set_layout;
42 
43 	assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
44 
45 	uint32_t max_binding = 0;
46 	uint32_t immutable_sampler_count = 0;
47 	for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
48 		max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
49 		if (pCreateInfo->pBindings[j].pImmutableSamplers)
50 			immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
51 	}
52 
53 	uint32_t samplers_offset = sizeof(struct radv_descriptor_set_layout) +
54 		(max_binding + 1) * sizeof(set_layout->binding[0]);
55 	size_t size = samplers_offset + immutable_sampler_count * 4 * sizeof(uint32_t);
56 
57 	set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8,
58 				 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
59 	if (!set_layout)
60 		return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
61 
62 	set_layout->flags = pCreateInfo->flags;
63 
64 	/* We just allocate all the samplers at the end of the struct */
65 	uint32_t *samplers = (uint32_t*)&set_layout->binding[max_binding + 1];
66 
67 	set_layout->binding_count = max_binding + 1;
68 	set_layout->shader_stages = 0;
69 	set_layout->dynamic_shader_stages = 0;
70 	set_layout->has_immutable_samplers = false;
71 	set_layout->size = 0;
72 
73 	memset(set_layout->binding, 0, size - sizeof(struct radv_descriptor_set_layout));
74 
75 	uint32_t buffer_count = 0;
76 	uint32_t dynamic_offset_count = 0;
77 
78 	for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
79 		const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
80 		uint32_t b = binding->binding;
81 		uint32_t alignment;
82 		unsigned binding_buffer_count = 0;
83 
84 		switch (binding->descriptorType) {
85 		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
86 		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
87 			assert(!(pCreateInfo->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
88 			set_layout->binding[b].dynamic_offset_count = 1;
89 			set_layout->dynamic_shader_stages |= binding->stageFlags;
90 			set_layout->binding[b].size = 0;
91 			binding_buffer_count = 1;
92 			alignment = 1;
93 			break;
94 		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
95 		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
96 		case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
97 		case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
98 			set_layout->binding[b].size = 16;
99 			binding_buffer_count = 1;
100 			alignment = 16;
101 			break;
102 		case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
103 		case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
104 		case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
105 			/* main descriptor + fmask descriptor */
106 			set_layout->binding[b].size = 64;
107 			binding_buffer_count = 1;
108 			alignment = 32;
109 			break;
110 		case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
111 			/* main descriptor + fmask descriptor + sampler */
112 			set_layout->binding[b].size = 96;
113 			binding_buffer_count = 1;
114 			alignment = 32;
115 			break;
116 		case VK_DESCRIPTOR_TYPE_SAMPLER:
117 			set_layout->binding[b].size = 16;
118 			alignment = 16;
119 			break;
120 		default:
121 			unreachable("unknown descriptor type\n");
122 			break;
123 		}
124 
125 		set_layout->size = align(set_layout->size, alignment);
126 		assert(binding->descriptorCount > 0);
127 		set_layout->binding[b].type = binding->descriptorType;
128 		set_layout->binding[b].array_size = binding->descriptorCount;
129 		set_layout->binding[b].offset = set_layout->size;
130 		set_layout->binding[b].buffer_offset = buffer_count;
131 		set_layout->binding[b].dynamic_offset_offset = dynamic_offset_count;
132 
133 		if (binding->pImmutableSamplers) {
134 			set_layout->binding[b].immutable_samplers_offset = samplers_offset;
135 			set_layout->binding[b].immutable_samplers_equal = true;
136 			set_layout->has_immutable_samplers = true;
137 
138 
139 			for (uint32_t i = 0; i < binding->descriptorCount; i++)
140 				memcpy(samplers + 4 * i, &radv_sampler_from_handle(binding->pImmutableSamplers[i])->state, 16);
141 			for (uint32_t i = 1; i < binding->descriptorCount; i++)
142 				if (memcmp(samplers + 4 * i, samplers, 16) != 0)
143 					set_layout->binding[b].immutable_samplers_equal = false;
144 
145 			/* Don't reserve space for the samplers if they're not accessed. */
146 			if (set_layout->binding[b].immutable_samplers_equal) {
147 				if (binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
148 					set_layout->binding[b].size -= 32;
149 				else if (binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER)
150 					set_layout->binding[b].size -= 16;
151 			}
152 			samplers += 4 * binding->descriptorCount;
153 			samplers_offset += 4 * sizeof(uint32_t) * binding->descriptorCount;
154 		}
155 
156 		set_layout->size += binding->descriptorCount * set_layout->binding[b].size;
157 		buffer_count += binding->descriptorCount * binding_buffer_count;
158 		dynamic_offset_count += binding->descriptorCount *
159 			set_layout->binding[b].dynamic_offset_count;
160 		set_layout->shader_stages |= binding->stageFlags;
161 	}
162 
163 	set_layout->buffer_count = buffer_count;
164 	set_layout->dynamic_offset_count = dynamic_offset_count;
165 
166 	*pSetLayout = radv_descriptor_set_layout_to_handle(set_layout);
167 
168 	return VK_SUCCESS;
169 }
170 
radv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)171 void radv_DestroyDescriptorSetLayout(
172 	VkDevice                                    _device,
173 	VkDescriptorSetLayout                       _set_layout,
174 	const VkAllocationCallbacks*                pAllocator)
175 {
176 	RADV_FROM_HANDLE(radv_device, device, _device);
177 	RADV_FROM_HANDLE(radv_descriptor_set_layout, set_layout, _set_layout);
178 
179 	if (!set_layout)
180 		return;
181 
182 	vk_free2(&device->alloc, pAllocator, set_layout);
183 }
184 
185 /*
186  * Pipeline layouts.  These have nothing to do with the pipeline.  They are
187  * just muttiple descriptor set layouts pasted together
188  */
189 
radv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)190 VkResult radv_CreatePipelineLayout(
191 	VkDevice                                    _device,
192 	const VkPipelineLayoutCreateInfo*           pCreateInfo,
193 	const VkAllocationCallbacks*                pAllocator,
194 	VkPipelineLayout*                           pPipelineLayout)
195 {
196 	RADV_FROM_HANDLE(radv_device, device, _device);
197 	struct radv_pipeline_layout *layout;
198 	struct mesa_sha1 ctx;
199 
200 	assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
201 
202 	layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
203 			     VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
204 	if (layout == NULL)
205 		return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
206 
207 	layout->num_sets = pCreateInfo->setLayoutCount;
208 
209 	unsigned dynamic_offset_count = 0;
210 
211 
212 	_mesa_sha1_init(&ctx);
213 	for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
214 		RADV_FROM_HANDLE(radv_descriptor_set_layout, set_layout,
215 				 pCreateInfo->pSetLayouts[set]);
216 		layout->set[set].layout = set_layout;
217 
218 		layout->set[set].dynamic_offset_start = dynamic_offset_count;
219 		for (uint32_t b = 0; b < set_layout->binding_count; b++) {
220 			dynamic_offset_count += set_layout->binding[b].array_size * set_layout->binding[b].dynamic_offset_count;
221 			if (set_layout->binding[b].immutable_samplers_offset)
222 				_mesa_sha1_update(&ctx, radv_immutable_samplers(set_layout, set_layout->binding + b),
223 				                  set_layout->binding[b].array_size * 4 * sizeof(uint32_t));
224 		}
225 		_mesa_sha1_update(&ctx, set_layout->binding,
226 				  sizeof(set_layout->binding[0]) * set_layout->binding_count);
227 	}
228 
229 	layout->dynamic_offset_count = dynamic_offset_count;
230 	layout->push_constant_size = 0;
231 
232 	for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
233 		const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
234 		layout->push_constant_size = MAX2(layout->push_constant_size,
235 						  range->offset + range->size);
236 	}
237 
238 	layout->push_constant_size = align(layout->push_constant_size, 16);
239 	_mesa_sha1_update(&ctx, &layout->push_constant_size,
240 			  sizeof(layout->push_constant_size));
241 	_mesa_sha1_final(&ctx, layout->sha1);
242 	*pPipelineLayout = radv_pipeline_layout_to_handle(layout);
243 
244 	return VK_SUCCESS;
245 }
246 
radv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)247 void radv_DestroyPipelineLayout(
248 	VkDevice                                    _device,
249 	VkPipelineLayout                            _pipelineLayout,
250 	const VkAllocationCallbacks*                pAllocator)
251 {
252 	RADV_FROM_HANDLE(radv_device, device, _device);
253 	RADV_FROM_HANDLE(radv_pipeline_layout, pipeline_layout, _pipelineLayout);
254 
255 	if (!pipeline_layout)
256 		return;
257 	vk_free2(&device->alloc, pAllocator, pipeline_layout);
258 }
259 
260 #define EMPTY 1
261 
262 static VkResult
radv_descriptor_set_create(struct radv_device * device,struct radv_descriptor_pool * pool,const struct radv_descriptor_set_layout * layout,struct radv_descriptor_set ** out_set)263 radv_descriptor_set_create(struct radv_device *device,
264 			   struct radv_descriptor_pool *pool,
265 			   const struct radv_descriptor_set_layout *layout,
266 			   struct radv_descriptor_set **out_set)
267 {
268 	struct radv_descriptor_set *set;
269 	unsigned range_offset = sizeof(struct radv_descriptor_set) +
270 		sizeof(struct radeon_winsys_bo *) * layout->buffer_count;
271 	unsigned mem_size = range_offset +
272 		sizeof(struct radv_descriptor_range) * layout->dynamic_offset_count;
273 
274 	if (pool->host_memory_base) {
275 		if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
276 			return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR);
277 
278 		set = (struct radv_descriptor_set*)pool->host_memory_ptr;
279 		pool->host_memory_ptr += mem_size;
280 	} else {
281 		set = vk_alloc2(&device->alloc, NULL, mem_size, 8,
282 		                VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
283 
284 		if (!set)
285 			return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
286 	}
287 
288 	memset(set, 0, mem_size);
289 
290 	if (layout->dynamic_offset_count) {
291 		set->dynamic_descriptors = (struct radv_descriptor_range*)((uint8_t*)set + range_offset);
292 	}
293 
294 	set->layout = layout;
295 	if (layout->size) {
296 		uint32_t layout_size = align_u32(layout->size, 32);
297 		set->size = layout->size;
298 
299 		if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
300 			vk_free2(&device->alloc, NULL, set);
301 			return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR);
302 		}
303 
304 		/* try to allocate linearly first, so that we don't spend
305 		 * time looking for gaps if the app only allocates &
306 		 * resets via the pool. */
307 		if (pool->current_offset + layout_size <= pool->size) {
308 			set->bo = pool->bo;
309 			set->mapped_ptr = (uint32_t*)(pool->mapped_ptr + pool->current_offset);
310 			set->va = radv_buffer_get_va(set->bo) + pool->current_offset;
311 			if (!pool->host_memory_base) {
312 				pool->entries[pool->entry_count].offset = pool->current_offset;
313 				pool->entries[pool->entry_count].size = layout_size;
314 				pool->entries[pool->entry_count].set = set;
315 				pool->entry_count++;
316 			}
317 			pool->current_offset += layout_size;
318 		} else if (!pool->host_memory_base) {
319 			uint64_t offset = 0;
320 			int index;
321 
322 			for (index = 0; index < pool->entry_count; ++index) {
323 				if (pool->entries[index].offset - offset >= layout_size)
324 					break;
325 				offset = pool->entries[index].offset + pool->entries[index].size;
326 			}
327 
328 			if (pool->size - offset < layout_size) {
329 				vk_free2(&device->alloc, NULL, set);
330 				return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR);
331 			}
332 			set->bo = pool->bo;
333 			set->mapped_ptr = (uint32_t*)(pool->mapped_ptr + offset);
334 			set->va = radv_buffer_get_va(set->bo) + offset;
335 			memmove(&pool->entries[index + 1], &pool->entries[index],
336 				sizeof(pool->entries[0]) * (pool->entry_count - index));
337 			pool->entries[index].offset = offset;
338 			pool->entries[index].size = layout_size;
339 			pool->entries[index].set = set;
340 			pool->entry_count++;
341 		} else
342 			return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR);
343 	}
344 
345 	if (layout->has_immutable_samplers) {
346 		for (unsigned i = 0; i < layout->binding_count; ++i) {
347 			if (!layout->binding[i].immutable_samplers_offset ||
348 			layout->binding[i].immutable_samplers_equal)
349 				continue;
350 
351 			unsigned offset = layout->binding[i].offset / 4;
352 			if (layout->binding[i].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
353 				offset += 16;
354 
355 			const uint32_t *samplers = (const uint32_t*)((const char*)layout + layout->binding[i].immutable_samplers_offset);
356 			for (unsigned j = 0; j < layout->binding[i].array_size; ++j) {
357 				memcpy(set->mapped_ptr + offset, samplers + 4 * j, 16);
358 				offset += layout->binding[i].size / 4;
359 			}
360 
361 		}
362 	}
363 	*out_set = set;
364 	return VK_SUCCESS;
365 }
366 
367 static void
radv_descriptor_set_destroy(struct radv_device * device,struct radv_descriptor_pool * pool,struct radv_descriptor_set * set,bool free_bo)368 radv_descriptor_set_destroy(struct radv_device *device,
369 			    struct radv_descriptor_pool *pool,
370 			    struct radv_descriptor_set *set,
371 			    bool free_bo)
372 {
373 	assert(!pool->host_memory_base);
374 
375 	if (free_bo && set->size && !pool->host_memory_base) {
376 		uint32_t offset = (uint8_t*)set->mapped_ptr - pool->mapped_ptr;
377 		for (int i = 0; i < pool->entry_count; ++i) {
378 			if (pool->entries[i].offset == offset) {
379 				memmove(&pool->entries[i], &pool->entries[i+1],
380 					sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
381 				--pool->entry_count;
382 				break;
383 			}
384 		}
385 	}
386 	vk_free2(&device->alloc, NULL, set);
387 }
388 
radv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)389 VkResult radv_CreateDescriptorPool(
390 	VkDevice                                    _device,
391 	const VkDescriptorPoolCreateInfo*           pCreateInfo,
392 	const VkAllocationCallbacks*                pAllocator,
393 	VkDescriptorPool*                           pDescriptorPool)
394 {
395 	RADV_FROM_HANDLE(radv_device, device, _device);
396 	struct radv_descriptor_pool *pool;
397 	int size = sizeof(struct radv_descriptor_pool);
398 	uint64_t bo_size = 0, bo_count = 0, range_count = 0;
399 
400 
401 	for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
402 		if (pCreateInfo->pPoolSizes[i].type != VK_DESCRIPTOR_TYPE_SAMPLER)
403 			bo_count += pCreateInfo->pPoolSizes[i].descriptorCount;
404 
405 		switch(pCreateInfo->pPoolSizes[i].type) {
406 		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
407 		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
408 			range_count += pCreateInfo->pPoolSizes[i].descriptorCount;
409 			break;
410 		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
411 		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
412 		case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
413 		case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
414 		case VK_DESCRIPTOR_TYPE_SAMPLER:
415 			/* 32 as we may need to align for images */
416 			bo_size += 32 * pCreateInfo->pPoolSizes[i].descriptorCount;
417 			break;
418 		case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
419 		case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
420 		case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
421 			bo_size += 64 * pCreateInfo->pPoolSizes[i].descriptorCount;
422 			break;
423 		case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
424 			bo_size += 96 * pCreateInfo->pPoolSizes[i].descriptorCount;
425 			break;
426 		default:
427 			unreachable("unknown descriptor type\n");
428 			break;
429 		}
430 	}
431 
432 	if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
433 		uint64_t host_size = pCreateInfo->maxSets * sizeof(struct radv_descriptor_set);
434 		host_size += sizeof(struct radeon_winsys_bo*) * bo_count;
435 		host_size += sizeof(struct radv_descriptor_range) * range_count;
436 		size += host_size;
437 	} else {
438 		size += sizeof(struct radv_descriptor_pool_entry) * pCreateInfo->maxSets;
439 	}
440 
441 	pool = vk_alloc2(&device->alloc, pAllocator, size, 8,
442 	                 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
443 	if (!pool)
444 		return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
445 
446 	memset(pool, 0, sizeof(*pool));
447 
448 	if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
449 		pool->host_memory_base = (uint8_t*)pool + sizeof(struct radv_descriptor_pool);
450 		pool->host_memory_ptr = pool->host_memory_base;
451 		pool->host_memory_end = (uint8_t*)pool + size;
452 	}
453 
454 	if (bo_size) {
455 		pool->bo = device->ws->buffer_create(device->ws, bo_size, 32,
456 						     RADEON_DOMAIN_VRAM,
457 						     RADEON_FLAG_NO_INTERPROCESS_SHARING |
458 						     RADEON_FLAG_READ_ONLY);
459 		pool->mapped_ptr = (uint8_t*)device->ws->buffer_map(pool->bo);
460 	}
461 	pool->size = bo_size;
462 	pool->max_entry_count = pCreateInfo->maxSets;
463 
464 	*pDescriptorPool = radv_descriptor_pool_to_handle(pool);
465 	return VK_SUCCESS;
466 }
467 
radv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)468 void radv_DestroyDescriptorPool(
469 	VkDevice                                    _device,
470 	VkDescriptorPool                            _pool,
471 	const VkAllocationCallbacks*                pAllocator)
472 {
473 	RADV_FROM_HANDLE(radv_device, device, _device);
474 	RADV_FROM_HANDLE(radv_descriptor_pool, pool, _pool);
475 
476 	if (!pool)
477 		return;
478 
479 	if (!pool->host_memory_base) {
480 		for(int i = 0; i < pool->entry_count; ++i) {
481 			radv_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
482 		}
483 	}
484 
485 	if (pool->bo)
486 		device->ws->buffer_destroy(pool->bo);
487 	vk_free2(&device->alloc, pAllocator, pool);
488 }
489 
radv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)490 VkResult radv_ResetDescriptorPool(
491 	VkDevice                                    _device,
492 	VkDescriptorPool                            descriptorPool,
493 	VkDescriptorPoolResetFlags                  flags)
494 {
495 	RADV_FROM_HANDLE(radv_device, device, _device);
496 	RADV_FROM_HANDLE(radv_descriptor_pool, pool, descriptorPool);
497 
498 	if (!pool->host_memory_base) {
499 		for(int i = 0; i < pool->entry_count; ++i) {
500 			radv_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
501 		}
502 		pool->entry_count = 0;
503 	}
504 
505 	pool->current_offset = 0;
506 	pool->host_memory_ptr = pool->host_memory_base;
507 
508 	return VK_SUCCESS;
509 }
510 
radv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)511 VkResult radv_AllocateDescriptorSets(
512 	VkDevice                                    _device,
513 	const VkDescriptorSetAllocateInfo*          pAllocateInfo,
514 	VkDescriptorSet*                            pDescriptorSets)
515 {
516 	RADV_FROM_HANDLE(radv_device, device, _device);
517 	RADV_FROM_HANDLE(radv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
518 
519 	VkResult result = VK_SUCCESS;
520 	uint32_t i;
521 	struct radv_descriptor_set *set;
522 
523 	/* allocate a set of buffers for each shader to contain descriptors */
524 	for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
525 		RADV_FROM_HANDLE(radv_descriptor_set_layout, layout,
526 				 pAllocateInfo->pSetLayouts[i]);
527 
528 		assert(!(layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
529 
530 		result = radv_descriptor_set_create(device, pool, layout, &set);
531 		if (result != VK_SUCCESS)
532 			break;
533 
534 		pDescriptorSets[i] = radv_descriptor_set_to_handle(set);
535 	}
536 
537 	if (result != VK_SUCCESS)
538 		radv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
539 					i, pDescriptorSets);
540 	return result;
541 }
542 
radv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)543 VkResult radv_FreeDescriptorSets(
544 	VkDevice                                    _device,
545 	VkDescriptorPool                            descriptorPool,
546 	uint32_t                                    count,
547 	const VkDescriptorSet*                      pDescriptorSets)
548 {
549 	RADV_FROM_HANDLE(radv_device, device, _device);
550 	RADV_FROM_HANDLE(radv_descriptor_pool, pool, descriptorPool);
551 
552 	for (uint32_t i = 0; i < count; i++) {
553 		RADV_FROM_HANDLE(radv_descriptor_set, set, pDescriptorSets[i]);
554 
555 		if (set && !pool->host_memory_base)
556 			radv_descriptor_set_destroy(device, pool, set, true);
557 	}
558 	return VK_SUCCESS;
559 }
560 
write_texel_buffer_descriptor(struct radv_device * device,struct radv_cmd_buffer * cmd_buffer,unsigned * dst,struct radeon_winsys_bo ** buffer_list,const VkBufferView _buffer_view)561 static void write_texel_buffer_descriptor(struct radv_device *device,
562 					  struct radv_cmd_buffer *cmd_buffer,
563 					  unsigned *dst,
564 					  struct radeon_winsys_bo **buffer_list,
565 					  const VkBufferView _buffer_view)
566 {
567 	RADV_FROM_HANDLE(radv_buffer_view, buffer_view, _buffer_view);
568 
569 	memcpy(dst, buffer_view->state, 4 * 4);
570 
571 	if (cmd_buffer)
572 		radv_cs_add_buffer(device->ws, cmd_buffer->cs, buffer_view->bo, 7);
573 	else
574 		*buffer_list = buffer_view->bo;
575 }
576 
write_buffer_descriptor(struct radv_device * device,struct radv_cmd_buffer * cmd_buffer,unsigned * dst,struct radeon_winsys_bo ** buffer_list,const VkDescriptorBufferInfo * buffer_info)577 static void write_buffer_descriptor(struct radv_device *device,
578                                     struct radv_cmd_buffer *cmd_buffer,
579                                     unsigned *dst,
580                                     struct radeon_winsys_bo **buffer_list,
581                                     const VkDescriptorBufferInfo *buffer_info)
582 {
583 	RADV_FROM_HANDLE(radv_buffer, buffer, buffer_info->buffer);
584 	uint64_t va = radv_buffer_get_va(buffer->bo);
585 	uint32_t range = buffer_info->range;
586 
587 	if (buffer_info->range == VK_WHOLE_SIZE)
588 		range = buffer->size - buffer_info->offset;
589 
590 	va += buffer_info->offset + buffer->offset;
591 	dst[0] = va;
592 	dst[1] = S_008F04_BASE_ADDRESS_HI(va >> 32);
593 	dst[2] = range;
594 	dst[3] = S_008F0C_DST_SEL_X(V_008F0C_SQ_SEL_X) |
595 		S_008F0C_DST_SEL_Y(V_008F0C_SQ_SEL_Y) |
596 		S_008F0C_DST_SEL_Z(V_008F0C_SQ_SEL_Z) |
597 		S_008F0C_DST_SEL_W(V_008F0C_SQ_SEL_W) |
598 		S_008F0C_NUM_FORMAT(V_008F0C_BUF_NUM_FORMAT_FLOAT) |
599 		S_008F0C_DATA_FORMAT(V_008F0C_BUF_DATA_FORMAT_32);
600 
601 	if (cmd_buffer)
602 		radv_cs_add_buffer(device->ws, cmd_buffer->cs, buffer->bo, 7);
603 	else
604 		*buffer_list = buffer->bo;
605 }
606 
write_dynamic_buffer_descriptor(struct radv_device * device,struct radv_descriptor_range * range,struct radeon_winsys_bo ** buffer_list,const VkDescriptorBufferInfo * buffer_info)607 static void write_dynamic_buffer_descriptor(struct radv_device *device,
608                                             struct radv_descriptor_range *range,
609                                             struct radeon_winsys_bo **buffer_list,
610                                             const VkDescriptorBufferInfo *buffer_info)
611 {
612 	RADV_FROM_HANDLE(radv_buffer, buffer, buffer_info->buffer);
613 	uint64_t va = radv_buffer_get_va(buffer->bo);
614 	unsigned size = buffer_info->range;
615 
616 	if (buffer_info->range == VK_WHOLE_SIZE)
617 		size = buffer->size - buffer_info->offset;
618 
619 	va += buffer_info->offset + buffer->offset;
620 	range->va = va;
621 	range->size = size;
622 
623 	*buffer_list = buffer->bo;
624 }
625 
626 static void
write_image_descriptor(struct radv_device * device,struct radv_cmd_buffer * cmd_buffer,unsigned * dst,struct radeon_winsys_bo ** buffer_list,VkDescriptorType descriptor_type,const VkDescriptorImageInfo * image_info)627 write_image_descriptor(struct radv_device *device,
628 		       struct radv_cmd_buffer *cmd_buffer,
629 		       unsigned *dst,
630 		       struct radeon_winsys_bo **buffer_list,
631 		       VkDescriptorType descriptor_type,
632 		       const VkDescriptorImageInfo *image_info)
633 {
634 	RADV_FROM_HANDLE(radv_image_view, iview, image_info->imageView);
635 	uint32_t *descriptor;
636 
637 	if (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
638 		descriptor = iview->storage_descriptor;
639 	} else {
640 		descriptor = iview->descriptor;
641 	}
642 
643 	memcpy(dst, descriptor, 16 * 4);
644 
645 	if (cmd_buffer)
646 		radv_cs_add_buffer(device->ws, cmd_buffer->cs, iview->bo, 7);
647 	else
648 		*buffer_list = iview->bo;
649 }
650 
651 static void
write_combined_image_sampler_descriptor(struct radv_device * device,struct radv_cmd_buffer * cmd_buffer,unsigned * dst,struct radeon_winsys_bo ** buffer_list,VkDescriptorType descriptor_type,const VkDescriptorImageInfo * image_info,bool has_sampler)652 write_combined_image_sampler_descriptor(struct radv_device *device,
653 					struct radv_cmd_buffer *cmd_buffer,
654 					unsigned *dst,
655 					struct radeon_winsys_bo **buffer_list,
656 					VkDescriptorType descriptor_type,
657 					const VkDescriptorImageInfo *image_info,
658 					bool has_sampler)
659 {
660 	RADV_FROM_HANDLE(radv_sampler, sampler, image_info->sampler);
661 
662 	write_image_descriptor(device, cmd_buffer, dst, buffer_list, descriptor_type, image_info);
663 	/* copy over sampler state */
664 	if (has_sampler)
665 		memcpy(dst + 16, sampler->state, 16);
666 }
667 
668 static void
write_sampler_descriptor(struct radv_device * device,unsigned * dst,const VkDescriptorImageInfo * image_info)669 write_sampler_descriptor(struct radv_device *device,
670 					unsigned *dst,
671 					const VkDescriptorImageInfo *image_info)
672 {
673 	RADV_FROM_HANDLE(radv_sampler, sampler, image_info->sampler);
674 
675 	memcpy(dst, sampler->state, 16);
676 }
677 
radv_update_descriptor_sets(struct radv_device * device,struct radv_cmd_buffer * cmd_buffer,VkDescriptorSet dstSetOverride,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)678 void radv_update_descriptor_sets(
679 	struct radv_device*                         device,
680 	struct radv_cmd_buffer*                     cmd_buffer,
681 	VkDescriptorSet                             dstSetOverride,
682 	uint32_t                                    descriptorWriteCount,
683 	const VkWriteDescriptorSet*                 pDescriptorWrites,
684 	uint32_t                                    descriptorCopyCount,
685 	const VkCopyDescriptorSet*                  pDescriptorCopies)
686 {
687 	uint32_t i, j;
688 	for (i = 0; i < descriptorWriteCount; i++) {
689 		const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
690 		RADV_FROM_HANDLE(radv_descriptor_set, set,
691 		                 dstSetOverride ? dstSetOverride : writeset->dstSet);
692 		const struct radv_descriptor_set_binding_layout *binding_layout =
693 			set->layout->binding + writeset->dstBinding;
694 		uint32_t *ptr = set->mapped_ptr;
695 		struct radeon_winsys_bo **buffer_list =  set->descriptors;
696 		/* Immutable samplers are not copied into push descriptors when they are
697 		 * allocated, so if we are writing push descriptors we have to copy the
698 		 * immutable samplers into them now.
699 		 */
700 		const bool copy_immutable_samplers = cmd_buffer &&
701 			binding_layout->immutable_samplers_offset && !binding_layout->immutable_samplers_equal;
702 		const uint32_t *samplers = radv_immutable_samplers(set->layout, binding_layout);
703 
704 		ptr += binding_layout->offset / 4;
705 		ptr += binding_layout->size * writeset->dstArrayElement / 4;
706 		buffer_list += binding_layout->buffer_offset;
707 		buffer_list += writeset->dstArrayElement;
708 		for (j = 0; j < writeset->descriptorCount; ++j) {
709 			switch(writeset->descriptorType) {
710 			case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
711 			case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
712 				unsigned idx = writeset->dstArrayElement + j;
713 				idx += binding_layout->dynamic_offset_offset;
714 				assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
715 				write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx,
716 								buffer_list, writeset->pBufferInfo + j);
717 				break;
718 			}
719 			case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
720 			case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
721 				write_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
722 							writeset->pBufferInfo + j);
723 				break;
724 			case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
725 			case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
726 				write_texel_buffer_descriptor(device, cmd_buffer, ptr, buffer_list,
727 							      writeset->pTexelBufferView[j]);
728 				break;
729 			case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
730 			case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
731 			case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
732 				write_image_descriptor(device, cmd_buffer, ptr, buffer_list,
733 						       writeset->descriptorType,
734 						       writeset->pImageInfo + j);
735 				break;
736 			case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
737 				write_combined_image_sampler_descriptor(device, cmd_buffer, ptr, buffer_list,
738 									writeset->descriptorType,
739 									writeset->pImageInfo + j,
740 									!binding_layout->immutable_samplers_offset);
741 				if (copy_immutable_samplers) {
742 					const unsigned idx = writeset->dstArrayElement + j;
743 					memcpy(ptr + 16, samplers + 4 * idx, 16);
744 				}
745 				break;
746 			case VK_DESCRIPTOR_TYPE_SAMPLER:
747 				if (!binding_layout->immutable_samplers_offset) {
748 					write_sampler_descriptor(device, ptr,
749 					                         writeset->pImageInfo + j);
750 				} else if (copy_immutable_samplers) {
751 					unsigned idx = writeset->dstArrayElement + j;
752 					memcpy(ptr, samplers + 4 * idx, 16);
753 				}
754 				break;
755 			default:
756 				unreachable("unimplemented descriptor type");
757 				break;
758 			}
759 			ptr += binding_layout->size / 4;
760 			++buffer_list;
761 		}
762 
763 	}
764 
765 	for (i = 0; i < descriptorCopyCount; i++) {
766 		const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
767 		RADV_FROM_HANDLE(radv_descriptor_set, src_set,
768 		                 copyset->srcSet);
769 		RADV_FROM_HANDLE(radv_descriptor_set, dst_set,
770 		                 copyset->dstSet);
771 		const struct radv_descriptor_set_binding_layout *src_binding_layout =
772 			src_set->layout->binding + copyset->srcBinding;
773 		const struct radv_descriptor_set_binding_layout *dst_binding_layout =
774 			dst_set->layout->binding + copyset->dstBinding;
775 		uint32_t *src_ptr = src_set->mapped_ptr;
776 		uint32_t *dst_ptr = dst_set->mapped_ptr;
777 		struct radeon_winsys_bo **src_buffer_list = src_set->descriptors;
778 		struct radeon_winsys_bo **dst_buffer_list = dst_set->descriptors;
779 
780 		src_ptr += src_binding_layout->offset / 4;
781 		dst_ptr += dst_binding_layout->offset / 4;
782 
783 		src_ptr += src_binding_layout->size * copyset->srcArrayElement / 4;
784 		dst_ptr += dst_binding_layout->size * copyset->dstArrayElement / 4;
785 
786 		src_buffer_list += src_binding_layout->buffer_offset;
787 		src_buffer_list += copyset->srcArrayElement;
788 
789 		dst_buffer_list += dst_binding_layout->buffer_offset;
790 		dst_buffer_list += copyset->dstArrayElement;
791 
792 		for (j = 0; j < copyset->descriptorCount; ++j) {
793 			switch (src_binding_layout->type) {
794 			case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
795 			case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
796 				unsigned src_idx = copyset->srcArrayElement + j;
797 				unsigned dst_idx = copyset->dstArrayElement + j;
798 				struct radv_descriptor_range *src_range, *dst_range;
799 				src_idx += src_binding_layout->dynamic_offset_offset;
800 				dst_idx += dst_binding_layout->dynamic_offset_offset;
801 
802 				src_range = src_set->dynamic_descriptors + src_idx;
803 				dst_range = dst_set->dynamic_descriptors + dst_idx;
804 				*dst_range = *src_range;
805 				break;
806 			}
807 			default:
808 				memcpy(dst_ptr, src_ptr, src_binding_layout->size);
809 			}
810 			src_ptr += src_binding_layout->size / 4;
811 			dst_ptr += dst_binding_layout->size / 4;
812 			dst_buffer_list[j] = src_buffer_list[j];
813 			++src_buffer_list;
814 			++dst_buffer_list;
815 		}
816 	}
817 }
818 
radv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)819 void radv_UpdateDescriptorSets(
820 	VkDevice                                    _device,
821 	uint32_t                                    descriptorWriteCount,
822 	const VkWriteDescriptorSet*                 pDescriptorWrites,
823 	uint32_t                                    descriptorCopyCount,
824 	const VkCopyDescriptorSet*                  pDescriptorCopies)
825 {
826 	RADV_FROM_HANDLE(radv_device, device, _device);
827 
828 	radv_update_descriptor_sets(device, NULL, VK_NULL_HANDLE, descriptorWriteCount, pDescriptorWrites,
829 			            descriptorCopyCount, pDescriptorCopies);
830 }
831 
radv_CreateDescriptorUpdateTemplateKHR(VkDevice _device,const VkDescriptorUpdateTemplateCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplateKHR * pDescriptorUpdateTemplate)832 VkResult radv_CreateDescriptorUpdateTemplateKHR(VkDevice _device,
833                                                 const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo,
834                                                 const VkAllocationCallbacks *pAllocator,
835                                                 VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate)
836 {
837 	RADV_FROM_HANDLE(radv_device, device, _device);
838 	RADV_FROM_HANDLE(radv_descriptor_set_layout, set_layout, pCreateInfo->descriptorSetLayout);
839 	const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
840 	const size_t size = sizeof(struct radv_descriptor_update_template) +
841 		sizeof(struct radv_descriptor_update_template_entry) * entry_count;
842 	struct radv_descriptor_update_template *templ;
843 	uint32_t i;
844 
845 	templ = vk_alloc2(&device->alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
846 	if (!templ)
847 		return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
848 
849 	templ->entry_count = entry_count;
850 
851 	for (i = 0; i < entry_count; i++) {
852 		const VkDescriptorUpdateTemplateEntryKHR *entry = &pCreateInfo->pDescriptorUpdateEntries[i];
853 		const struct radv_descriptor_set_binding_layout *binding_layout =
854 			set_layout->binding + entry->dstBinding;
855 		const uint32_t buffer_offset = binding_layout->buffer_offset + entry->dstArrayElement;
856 		const uint32_t *immutable_samplers = NULL;
857 		uint32_t dst_offset;
858 		uint32_t dst_stride;
859 
860 		/* dst_offset is an offset into dynamic_descriptors when the descriptor
861 		   is dynamic, and an offset into mapped_ptr otherwise */
862 		switch (entry->descriptorType) {
863 		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
864 		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
865 			assert(pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR);
866 			dst_offset = binding_layout->dynamic_offset_offset + entry->dstArrayElement;
867 			dst_stride = 0; /* Not used */
868 			break;
869 		default:
870 			switch (entry->descriptorType) {
871 			case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
872 			case VK_DESCRIPTOR_TYPE_SAMPLER:
873 				/* Immutable samplers are copied into push descriptors when they are pushed */
874 				if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR &&
875 				    binding_layout->immutable_samplers_offset && !binding_layout->immutable_samplers_equal) {
876 					immutable_samplers = radv_immutable_samplers(set_layout, binding_layout) + entry->dstArrayElement * 4;
877 				}
878 				break;
879 			default:
880 				break;
881 			}
882 			dst_offset = binding_layout->offset / 4 + binding_layout->size * entry->dstArrayElement / 4;
883 			dst_stride = binding_layout->size / 4;
884 			break;
885 		}
886 
887 		templ->entry[i] = (struct radv_descriptor_update_template_entry) {
888 			.descriptor_type = entry->descriptorType,
889 			.descriptor_count = entry->descriptorCount,
890 			.src_offset = entry->offset,
891 			.src_stride = entry->stride,
892 			.dst_offset = dst_offset,
893 			.dst_stride = dst_stride,
894 			.buffer_offset = buffer_offset,
895 			.has_sampler = !binding_layout->immutable_samplers_offset,
896 			.immutable_samplers = immutable_samplers
897 		};
898 	}
899 
900 	*pDescriptorUpdateTemplate = radv_descriptor_update_template_to_handle(templ);
901 	return VK_SUCCESS;
902 }
903 
radv_DestroyDescriptorUpdateTemplateKHR(VkDevice _device,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)904 void radv_DestroyDescriptorUpdateTemplateKHR(VkDevice _device,
905                                              VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
906                                              const VkAllocationCallbacks *pAllocator)
907 {
908 	RADV_FROM_HANDLE(radv_device, device, _device);
909 	RADV_FROM_HANDLE(radv_descriptor_update_template, templ, descriptorUpdateTemplate);
910 
911 	if (!templ)
912 		return;
913 
914 	vk_free2(&device->alloc, pAllocator, templ);
915 }
916 
radv_update_descriptor_set_with_template(struct radv_device * device,struct radv_cmd_buffer * cmd_buffer,struct radv_descriptor_set * set,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const void * pData)917 void radv_update_descriptor_set_with_template(struct radv_device *device,
918                                               struct radv_cmd_buffer *cmd_buffer,
919                                               struct radv_descriptor_set *set,
920                                               VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
921                                               const void *pData)
922 {
923 	RADV_FROM_HANDLE(radv_descriptor_update_template, templ, descriptorUpdateTemplate);
924 	uint32_t i;
925 
926 	for (i = 0; i < templ->entry_count; ++i) {
927 		struct radeon_winsys_bo **buffer_list = set->descriptors + templ->entry[i].buffer_offset;
928 		uint32_t *pDst = set->mapped_ptr + templ->entry[i].dst_offset;
929 		const uint8_t *pSrc = ((const uint8_t *) pData) + templ->entry[i].src_offset;
930 		uint32_t j;
931 
932 		for (j = 0; j < templ->entry[i].descriptor_count; ++j) {
933 			switch (templ->entry[i].descriptor_type) {
934 			case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
935 			case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
936 				const unsigned idx = templ->entry[i].dst_offset + j;
937 				assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
938 				write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx,
939 								buffer_list, (struct VkDescriptorBufferInfo *) pSrc);
940 				break;
941 			}
942 			case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
943 			case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
944 				write_buffer_descriptor(device, cmd_buffer, pDst, buffer_list,
945 				                        (struct VkDescriptorBufferInfo *) pSrc);
946 				break;
947 			case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
948 			case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
949 				write_texel_buffer_descriptor(device, cmd_buffer, pDst, buffer_list,
950 						              *(VkBufferView *) pSrc);
951 				break;
952 			case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
953 			case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
954 			case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
955 				write_image_descriptor(device, cmd_buffer, pDst, buffer_list,
956 						       templ->entry[i].descriptor_type,
957 					               (struct VkDescriptorImageInfo *) pSrc);
958 				break;
959 			case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
960 				write_combined_image_sampler_descriptor(device, cmd_buffer, pDst, buffer_list,
961 									templ->entry[i].descriptor_type,
962 									(struct VkDescriptorImageInfo *) pSrc,
963 									templ->entry[i].has_sampler);
964 				if (templ->entry[i].immutable_samplers)
965 					memcpy(pDst + 16, templ->entry[i].immutable_samplers + 4 * j, 16);
966 				break;
967 			case VK_DESCRIPTOR_TYPE_SAMPLER:
968 				if (templ->entry[i].has_sampler)
969 					write_sampler_descriptor(device, pDst,
970 					                         (struct VkDescriptorImageInfo *) pSrc);
971 				else if (templ->entry[i].immutable_samplers)
972 					memcpy(pDst, templ->entry[i].immutable_samplers + 4 * j, 16);
973 				break;
974 			default:
975 				unreachable("unimplemented descriptor type");
976 				break;
977 			}
978 		        pSrc += templ->entry[i].src_stride;
979 			pDst += templ->entry[i].dst_stride;
980 			++buffer_list;
981 		}
982 	}
983 }
984 
radv_UpdateDescriptorSetWithTemplateKHR(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,const void * pData)985 void radv_UpdateDescriptorSetWithTemplateKHR(VkDevice _device,
986                                              VkDescriptorSet descriptorSet,
987                                              VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate,
988                                              const void *pData)
989 {
990 	RADV_FROM_HANDLE(radv_device, device, _device);
991 	RADV_FROM_HANDLE(radv_descriptor_set, set, descriptorSet);
992 
993 	radv_update_descriptor_set_with_template(device, NULL, set, descriptorUpdateTemplate, pData);
994 }
995