1 /*
2 * Copyright © 2020 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "vk_device.h"
25
26 #include "vk_common_entrypoints.h"
27 #include "vk_instance.h"
28 #include "vk_log.h"
29 #include "vk_physical_device.h"
30 #include "vk_queue.h"
31 #include "vk_sync.h"
32 #include "vk_sync_timeline.h"
33 #include "vk_util.h"
34 #include "util/debug.h"
35 #include "util/hash_table.h"
36 #include "util/ralloc.h"
37
38 static enum vk_device_timeline_mode
get_timeline_mode(struct vk_physical_device * physical_device)39 get_timeline_mode(struct vk_physical_device *physical_device)
40 {
41 if (physical_device->supported_sync_types == NULL)
42 return VK_DEVICE_TIMELINE_MODE_NONE;
43
44 const struct vk_sync_type *timeline_type = NULL;
45 for (const struct vk_sync_type *const *t =
46 physical_device->supported_sync_types; *t; t++) {
47 if ((*t)->features & VK_SYNC_FEATURE_TIMELINE) {
48 /* We can only have one timeline mode */
49 assert(timeline_type == NULL);
50 timeline_type = *t;
51 }
52 }
53
54 if (timeline_type == NULL)
55 return VK_DEVICE_TIMELINE_MODE_NONE;
56
57 if (vk_sync_type_is_vk_sync_timeline(timeline_type))
58 return VK_DEVICE_TIMELINE_MODE_EMULATED;
59
60 if (timeline_type->features & VK_SYNC_FEATURE_WAIT_BEFORE_SIGNAL)
61 return VK_DEVICE_TIMELINE_MODE_NATIVE;
62
63 /* For assisted mode, we require a few additional things of all sync types
64 * which may be used as semaphores.
65 */
66 for (const struct vk_sync_type *const *t =
67 physical_device->supported_sync_types; *t; t++) {
68 if ((*t)->features & VK_SYNC_FEATURE_GPU_WAIT) {
69 assert((*t)->features & VK_SYNC_FEATURE_WAIT_PENDING);
70 if ((*t)->features & VK_SYNC_FEATURE_BINARY)
71 assert((*t)->features & VK_SYNC_FEATURE_CPU_RESET);
72 }
73 }
74
75 return VK_DEVICE_TIMELINE_MODE_ASSISTED;
76 }
77
78 static void
collect_enabled_features(struct vk_device * device,const VkDeviceCreateInfo * pCreateInfo)79 collect_enabled_features(struct vk_device *device,
80 const VkDeviceCreateInfo *pCreateInfo)
81 {
82 if (pCreateInfo->pEnabledFeatures) {
83 if (pCreateInfo->pEnabledFeatures->robustBufferAccess)
84 device->enabled_features.robustBufferAccess = true;
85 }
86
87 vk_foreach_struct_const(ext, pCreateInfo->pNext) {
88 switch (ext->sType) {
89 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2: {
90 const VkPhysicalDeviceFeatures2 *features = (const void *)ext;
91 if (features->features.robustBufferAccess)
92 device->enabled_features.robustBufferAccess = true;
93 break;
94 }
95
96 default:
97 /* Don't warn */
98 break;
99 }
100 }
101 }
102
103 VkResult
vk_device_init(struct vk_device * device,struct vk_physical_device * physical_device,const struct vk_device_dispatch_table * dispatch_table,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * alloc)104 vk_device_init(struct vk_device *device,
105 struct vk_physical_device *physical_device,
106 const struct vk_device_dispatch_table *dispatch_table,
107 const VkDeviceCreateInfo *pCreateInfo,
108 const VkAllocationCallbacks *alloc)
109 {
110 memset(device, 0, sizeof(*device));
111 vk_object_base_init(device, &device->base, VK_OBJECT_TYPE_DEVICE);
112 if (alloc != NULL)
113 device->alloc = *alloc;
114 else
115 device->alloc = physical_device->instance->alloc;
116
117 device->physical = physical_device;
118
119 device->dispatch_table = *dispatch_table;
120
121 /* Add common entrypoints without overwriting driver-provided ones. */
122 vk_device_dispatch_table_from_entrypoints(
123 &device->dispatch_table, &vk_common_device_entrypoints, false);
124
125 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
126 int idx;
127 for (idx = 0; idx < VK_DEVICE_EXTENSION_COUNT; idx++) {
128 if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
129 vk_device_extensions[idx].extensionName) == 0)
130 break;
131 }
132
133 if (idx >= VK_DEVICE_EXTENSION_COUNT)
134 return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
135 "%s not supported",
136 pCreateInfo->ppEnabledExtensionNames[i]);
137
138 if (!physical_device->supported_extensions.extensions[idx])
139 return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
140 "%s not supported",
141 pCreateInfo->ppEnabledExtensionNames[i]);
142
143 #ifdef ANDROID
144 if (!vk_android_allowed_device_extensions.extensions[idx])
145 return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
146 "%s not supported",
147 pCreateInfo->ppEnabledExtensionNames[i]);
148 #endif
149
150 device->enabled_extensions.extensions[idx] = true;
151 }
152
153 VkResult result =
154 vk_physical_device_check_device_features(physical_device,
155 pCreateInfo);
156 if (result != VK_SUCCESS)
157 return result;
158
159 collect_enabled_features(device, pCreateInfo);
160
161 p_atomic_set(&device->private_data_next_index, 0);
162
163 list_inithead(&device->queues);
164
165 device->drm_fd = -1;
166
167 device->timeline_mode = get_timeline_mode(physical_device);
168
169 switch (device->timeline_mode) {
170 case VK_DEVICE_TIMELINE_MODE_NONE:
171 case VK_DEVICE_TIMELINE_MODE_NATIVE:
172 device->submit_mode = VK_QUEUE_SUBMIT_MODE_IMMEDIATE;
173 break;
174
175 case VK_DEVICE_TIMELINE_MODE_EMULATED:
176 device->submit_mode = VK_QUEUE_SUBMIT_MODE_DEFERRED;
177 break;
178
179 case VK_DEVICE_TIMELINE_MODE_ASSISTED:
180 if (env_var_as_boolean("MESA_VK_ENABLE_SUBMIT_THREAD", false)) {
181 device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED;
182 } else {
183 device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED_ON_DEMAND;
184 }
185 break;
186
187 default:
188 unreachable("Invalid timeline mode");
189 }
190
191 #ifdef ANDROID
192 mtx_init(&device->swapchain_private_mtx, mtx_plain);
193 device->swapchain_private = NULL;
194 #endif /* ANDROID */
195
196 return VK_SUCCESS;
197 }
198
199 void
vk_device_finish(struct vk_device * device)200 vk_device_finish(struct vk_device *device)
201 {
202 /* Drivers should tear down their own queues */
203 assert(list_is_empty(&device->queues));
204
205 #ifdef ANDROID
206 if (device->swapchain_private) {
207 hash_table_foreach(device->swapchain_private, entry)
208 util_sparse_array_finish(entry->data);
209 ralloc_free(device->swapchain_private);
210 }
211 #endif /* ANDROID */
212
213 vk_object_base_finish(&device->base);
214 }
215
216 void
vk_device_enable_threaded_submit(struct vk_device * device)217 vk_device_enable_threaded_submit(struct vk_device *device)
218 {
219 /* This must be called before any queues are created */
220 assert(list_is_empty(&device->queues));
221
222 /* In order to use threaded submit, we need every sync type that can be
223 * used as a wait fence for vkQueueSubmit() to support WAIT_PENDING.
224 * It's required for cross-thread/process submit re-ordering.
225 */
226 for (const struct vk_sync_type *const *t =
227 device->physical->supported_sync_types; *t; t++) {
228 if ((*t)->features & VK_SYNC_FEATURE_GPU_WAIT)
229 assert((*t)->features & VK_SYNC_FEATURE_WAIT_PENDING);
230 }
231
232 /* Any binary vk_sync types which will be used as permanent semaphore
233 * payloads also need to support vk_sync_type::move, but that's a lot
234 * harder to assert since it only applies to permanent semaphore payloads.
235 */
236
237 if (device->submit_mode != VK_QUEUE_SUBMIT_MODE_THREADED)
238 device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED_ON_DEMAND;
239 }
240
241 VkResult
vk_device_flush(struct vk_device * device)242 vk_device_flush(struct vk_device *device)
243 {
244 if (device->submit_mode != VK_QUEUE_SUBMIT_MODE_DEFERRED)
245 return VK_SUCCESS;
246
247 bool progress;
248 do {
249 progress = false;
250
251 vk_foreach_queue(queue, device) {
252 uint32_t queue_submit_count;
253 VkResult result = vk_queue_flush(queue, &queue_submit_count);
254 if (unlikely(result != VK_SUCCESS))
255 return result;
256
257 if (queue_submit_count)
258 progress = true;
259 }
260 } while (progress);
261
262 return VK_SUCCESS;
263 }
264
265 static const char *
timeline_mode_str(struct vk_device * device)266 timeline_mode_str(struct vk_device *device)
267 {
268 switch (device->timeline_mode) {
269 #define CASE(X) case VK_DEVICE_TIMELINE_MODE_##X: return #X;
270 CASE(NONE)
271 CASE(EMULATED)
272 CASE(ASSISTED)
273 CASE(NATIVE)
274 #undef CASE
275 default: return "UNKNOWN";
276 }
277 }
278
279 void
_vk_device_report_lost(struct vk_device * device)280 _vk_device_report_lost(struct vk_device *device)
281 {
282 assert(p_atomic_read(&device->_lost.lost) > 0);
283
284 device->_lost.reported = true;
285
286 vk_foreach_queue(queue, device) {
287 if (queue->_lost.lost) {
288 __vk_errorf(queue, VK_ERROR_DEVICE_LOST,
289 queue->_lost.error_file, queue->_lost.error_line,
290 "%s", queue->_lost.error_msg);
291 }
292 }
293
294 vk_logd(VK_LOG_OBJS(device), "Timeline mode is %s.",
295 timeline_mode_str(device));
296 }
297
298 VkResult
_vk_device_set_lost(struct vk_device * device,const char * file,int line,const char * msg,...)299 _vk_device_set_lost(struct vk_device *device,
300 const char *file, int line,
301 const char *msg, ...)
302 {
303 /* This flushes out any per-queue device lost messages */
304 if (vk_device_is_lost(device))
305 return VK_ERROR_DEVICE_LOST;
306
307 p_atomic_inc(&device->_lost.lost);
308 device->_lost.reported = true;
309
310 va_list ap;
311 va_start(ap, msg);
312 __vk_errorv(device, VK_ERROR_DEVICE_LOST, file, line, msg, ap);
313 va_end(ap);
314
315 vk_logd(VK_LOG_OBJS(device), "Timeline mode is %s.",
316 timeline_mode_str(device));
317
318 if (env_var_as_boolean("MESA_VK_ABORT_ON_DEVICE_LOSS", false))
319 abort();
320
321 return VK_ERROR_DEVICE_LOST;
322 }
323
324 PFN_vkVoidFunction
vk_device_get_proc_addr(const struct vk_device * device,const char * name)325 vk_device_get_proc_addr(const struct vk_device *device,
326 const char *name)
327 {
328 if (device == NULL || name == NULL)
329 return NULL;
330
331 struct vk_instance *instance = device->physical->instance;
332 return vk_device_dispatch_table_get_if_supported(&device->dispatch_table,
333 name,
334 instance->app_info.api_version,
335 &instance->enabled_extensions,
336 &device->enabled_extensions);
337 }
338
339 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
vk_common_GetDeviceProcAddr(VkDevice _device,const char * pName)340 vk_common_GetDeviceProcAddr(VkDevice _device,
341 const char *pName)
342 {
343 VK_FROM_HANDLE(vk_device, device, _device);
344 return vk_device_get_proc_addr(device, pName);
345 }
346
347 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceQueue(VkDevice _device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)348 vk_common_GetDeviceQueue(VkDevice _device,
349 uint32_t queueFamilyIndex,
350 uint32_t queueIndex,
351 VkQueue *pQueue)
352 {
353 VK_FROM_HANDLE(vk_device, device, _device);
354
355 const VkDeviceQueueInfo2 info = {
356 .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
357 .pNext = NULL,
358 /* flags = 0 because (Vulkan spec 1.2.170 - vkGetDeviceQueue):
359 *
360 * "vkGetDeviceQueue must only be used to get queues that were
361 * created with the flags parameter of VkDeviceQueueCreateInfo set
362 * to zero. To get queues that were created with a non-zero flags
363 * parameter use vkGetDeviceQueue2."
364 */
365 .flags = 0,
366 .queueFamilyIndex = queueFamilyIndex,
367 .queueIndex = queueIndex,
368 };
369
370 device->dispatch_table.GetDeviceQueue2(_device, &info, pQueue);
371 }
372
373 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceQueue2(VkDevice _device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)374 vk_common_GetDeviceQueue2(VkDevice _device,
375 const VkDeviceQueueInfo2 *pQueueInfo,
376 VkQueue *pQueue)
377 {
378 VK_FROM_HANDLE(vk_device, device, _device);
379
380 struct vk_queue *queue = NULL;
381 vk_foreach_queue(iter, device) {
382 if (iter->queue_family_index == pQueueInfo->queueFamilyIndex &&
383 iter->index_in_family == pQueueInfo->queueIndex) {
384 queue = iter;
385 break;
386 }
387 }
388
389 /* From the Vulkan 1.1.70 spec:
390 *
391 * "The queue returned by vkGetDeviceQueue2 must have the same flags
392 * value from this structure as that used at device creation time in a
393 * VkDeviceQueueCreateInfo instance. If no matching flags were specified
394 * at device creation time then pQueue will return VK_NULL_HANDLE."
395 */
396 if (queue && queue->flags == pQueueInfo->flags)
397 *pQueue = vk_queue_to_handle(queue);
398 else
399 *pQueue = VK_NULL_HANDLE;
400 }
401
402 VKAPI_ATTR void VKAPI_CALL
vk_common_GetBufferMemoryRequirements(VkDevice _device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)403 vk_common_GetBufferMemoryRequirements(VkDevice _device,
404 VkBuffer buffer,
405 VkMemoryRequirements *pMemoryRequirements)
406 {
407 VK_FROM_HANDLE(vk_device, device, _device);
408
409 VkBufferMemoryRequirementsInfo2 info = {
410 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
411 .buffer = buffer,
412 };
413 VkMemoryRequirements2 reqs = {
414 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
415 };
416 device->dispatch_table.GetBufferMemoryRequirements2(_device, &info, &reqs);
417
418 *pMemoryRequirements = reqs.memoryRequirements;
419 }
420
421 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_BindBufferMemory(VkDevice _device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)422 vk_common_BindBufferMemory(VkDevice _device,
423 VkBuffer buffer,
424 VkDeviceMemory memory,
425 VkDeviceSize memoryOffset)
426 {
427 VK_FROM_HANDLE(vk_device, device, _device);
428
429 VkBindBufferMemoryInfo bind = {
430 .sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
431 .buffer = buffer,
432 .memory = memory,
433 .memoryOffset = memoryOffset,
434 };
435
436 return device->dispatch_table.BindBufferMemory2(_device, 1, &bind);
437 }
438
439 VKAPI_ATTR void VKAPI_CALL
vk_common_GetImageMemoryRequirements(VkDevice _device,VkImage image,VkMemoryRequirements * pMemoryRequirements)440 vk_common_GetImageMemoryRequirements(VkDevice _device,
441 VkImage image,
442 VkMemoryRequirements *pMemoryRequirements)
443 {
444 VK_FROM_HANDLE(vk_device, device, _device);
445
446 VkImageMemoryRequirementsInfo2 info = {
447 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
448 .image = image,
449 };
450 VkMemoryRequirements2 reqs = {
451 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
452 };
453 device->dispatch_table.GetImageMemoryRequirements2(_device, &info, &reqs);
454
455 *pMemoryRequirements = reqs.memoryRequirements;
456 }
457
458 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_BindImageMemory(VkDevice _device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)459 vk_common_BindImageMemory(VkDevice _device,
460 VkImage image,
461 VkDeviceMemory memory,
462 VkDeviceSize memoryOffset)
463 {
464 VK_FROM_HANDLE(vk_device, device, _device);
465
466 VkBindImageMemoryInfo bind = {
467 .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
468 .image = image,
469 .memory = memory,
470 .memoryOffset = memoryOffset,
471 };
472
473 return device->dispatch_table.BindImageMemory2(_device, 1, &bind);
474 }
475
476 VKAPI_ATTR void VKAPI_CALL
vk_common_GetImageSparseMemoryRequirements(VkDevice _device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)477 vk_common_GetImageSparseMemoryRequirements(VkDevice _device,
478 VkImage image,
479 uint32_t *pSparseMemoryRequirementCount,
480 VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
481 {
482 VK_FROM_HANDLE(vk_device, device, _device);
483
484 VkImageSparseMemoryRequirementsInfo2 info = {
485 .sType = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
486 .image = image,
487 };
488
489 if (!pSparseMemoryRequirements) {
490 device->dispatch_table.GetImageSparseMemoryRequirements2(_device,
491 &info,
492 pSparseMemoryRequirementCount,
493 NULL);
494 return;
495 }
496
497 STACK_ARRAY(VkSparseImageMemoryRequirements2, mem_reqs2, *pSparseMemoryRequirementCount);
498
499 for (unsigned i = 0; i < *pSparseMemoryRequirementCount; ++i) {
500 mem_reqs2[i].sType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2;
501 mem_reqs2[i].pNext = NULL;
502 }
503
504 device->dispatch_table.GetImageSparseMemoryRequirements2(_device,
505 &info,
506 pSparseMemoryRequirementCount,
507 mem_reqs2);
508
509 for (unsigned i = 0; i < *pSparseMemoryRequirementCount; ++i)
510 pSparseMemoryRequirements[i] = mem_reqs2[i].memoryRequirements;
511
512 STACK_ARRAY_FINISH(mem_reqs2);
513 }
514
515 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_DeviceWaitIdle(VkDevice _device)516 vk_common_DeviceWaitIdle(VkDevice _device)
517 {
518 VK_FROM_HANDLE(vk_device, device, _device);
519 const struct vk_device_dispatch_table *disp = &device->dispatch_table;
520
521 vk_foreach_queue(queue, device) {
522 VkResult result = disp->QueueWaitIdle(vk_queue_to_handle(queue));
523 if (result != VK_SUCCESS)
524 return result;
525 }
526
527 return VK_SUCCESS;
528 }
529
530 static void
copy_vk_struct_guts(VkBaseOutStructure * dst,VkBaseInStructure * src,size_t struct_size)531 copy_vk_struct_guts(VkBaseOutStructure *dst, VkBaseInStructure *src, size_t struct_size)
532 {
533 STATIC_ASSERT(sizeof(*dst) == sizeof(*src));
534 memcpy(dst + 1, src + 1, struct_size - sizeof(VkBaseOutStructure));
535 }
536
537 #define CORE_FEATURE(feature) features->feature = core->feature
538
539 bool
vk_get_physical_device_core_1_1_feature_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan11Features * core)540 vk_get_physical_device_core_1_1_feature_ext(struct VkBaseOutStructure *ext,
541 const VkPhysicalDeviceVulkan11Features *core)
542 {
543
544 switch (ext->sType) {
545 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
546 VkPhysicalDevice16BitStorageFeatures *features = (void *)ext;
547 CORE_FEATURE(storageBuffer16BitAccess);
548 CORE_FEATURE(uniformAndStorageBuffer16BitAccess);
549 CORE_FEATURE(storagePushConstant16);
550 CORE_FEATURE(storageInputOutput16);
551 return true;
552 }
553
554 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
555 VkPhysicalDeviceMultiviewFeatures *features = (void *)ext;
556 CORE_FEATURE(multiview);
557 CORE_FEATURE(multiviewGeometryShader);
558 CORE_FEATURE(multiviewTessellationShader);
559 return true;
560 }
561
562 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
563 VkPhysicalDeviceProtectedMemoryFeatures *features = (void *)ext;
564 CORE_FEATURE(protectedMemory);
565 return true;
566 }
567
568 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
569 VkPhysicalDeviceSamplerYcbcrConversionFeatures *features = (void *) ext;
570 CORE_FEATURE(samplerYcbcrConversion);
571 return true;
572 }
573
574 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
575 VkPhysicalDeviceShaderDrawParametersFeatures *features = (void *)ext;
576 CORE_FEATURE(shaderDrawParameters);
577 return true;
578 }
579
580 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
581 VkPhysicalDeviceVariablePointersFeatures *features = (void *)ext;
582 CORE_FEATURE(variablePointersStorageBuffer);
583 CORE_FEATURE(variablePointers);
584 return true;
585 }
586
587 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
588 copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
589 return true;
590
591 default:
592 return false;
593 }
594 }
595
596 bool
vk_get_physical_device_core_1_2_feature_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan12Features * core)597 vk_get_physical_device_core_1_2_feature_ext(struct VkBaseOutStructure *ext,
598 const VkPhysicalDeviceVulkan12Features *core)
599 {
600
601 switch (ext->sType) {
602 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES: {
603 VkPhysicalDevice8BitStorageFeatures *features = (void *)ext;
604 CORE_FEATURE(storageBuffer8BitAccess);
605 CORE_FEATURE(uniformAndStorageBuffer8BitAccess);
606 CORE_FEATURE(storagePushConstant8);
607 return true;
608 }
609
610 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES: {
611 VkPhysicalDeviceBufferDeviceAddressFeatures *features = (void *)ext;
612 CORE_FEATURE(bufferDeviceAddress);
613 CORE_FEATURE(bufferDeviceAddressCaptureReplay);
614 CORE_FEATURE(bufferDeviceAddressMultiDevice);
615 return true;
616 }
617
618 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES: {
619 VkPhysicalDeviceDescriptorIndexingFeatures *features = (void *)ext;
620 CORE_FEATURE(shaderInputAttachmentArrayDynamicIndexing);
621 CORE_FEATURE(shaderUniformTexelBufferArrayDynamicIndexing);
622 CORE_FEATURE(shaderStorageTexelBufferArrayDynamicIndexing);
623 CORE_FEATURE(shaderUniformBufferArrayNonUniformIndexing);
624 CORE_FEATURE(shaderSampledImageArrayNonUniformIndexing);
625 CORE_FEATURE(shaderStorageBufferArrayNonUniformIndexing);
626 CORE_FEATURE(shaderStorageImageArrayNonUniformIndexing);
627 CORE_FEATURE(shaderInputAttachmentArrayNonUniformIndexing);
628 CORE_FEATURE(shaderUniformTexelBufferArrayNonUniformIndexing);
629 CORE_FEATURE(shaderStorageTexelBufferArrayNonUniformIndexing);
630 CORE_FEATURE(descriptorBindingUniformBufferUpdateAfterBind);
631 CORE_FEATURE(descriptorBindingSampledImageUpdateAfterBind);
632 CORE_FEATURE(descriptorBindingStorageImageUpdateAfterBind);
633 CORE_FEATURE(descriptorBindingStorageBufferUpdateAfterBind);
634 CORE_FEATURE(descriptorBindingUniformTexelBufferUpdateAfterBind);
635 CORE_FEATURE(descriptorBindingStorageTexelBufferUpdateAfterBind);
636 CORE_FEATURE(descriptorBindingUpdateUnusedWhilePending);
637 CORE_FEATURE(descriptorBindingPartiallyBound);
638 CORE_FEATURE(descriptorBindingVariableDescriptorCount);
639 CORE_FEATURE(runtimeDescriptorArray);
640 return true;
641 }
642
643 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES: {
644 VkPhysicalDeviceShaderFloat16Int8Features *features = (void *)ext;
645 CORE_FEATURE(shaderFloat16);
646 CORE_FEATURE(shaderInt8);
647 return true;
648 }
649
650 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES: {
651 VkPhysicalDeviceHostQueryResetFeatures *features = (void *)ext;
652 CORE_FEATURE(hostQueryReset);
653 return true;
654 }
655
656 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES: {
657 VkPhysicalDeviceImagelessFramebufferFeatures *features = (void *)ext;
658 CORE_FEATURE(imagelessFramebuffer);
659 return true;
660 }
661
662 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES: {
663 VkPhysicalDeviceScalarBlockLayoutFeatures *features =(void *)ext;
664 CORE_FEATURE(scalarBlockLayout);
665 return true;
666 }
667
668 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES: {
669 VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *features = (void *)ext;
670 CORE_FEATURE(separateDepthStencilLayouts);
671 return true;
672 }
673
674 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES: {
675 VkPhysicalDeviceShaderAtomicInt64Features *features = (void *)ext;
676 CORE_FEATURE(shaderBufferInt64Atomics);
677 CORE_FEATURE(shaderSharedInt64Atomics);
678 return true;
679 }
680
681 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES: {
682 VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *features = (void *)ext;
683 CORE_FEATURE(shaderSubgroupExtendedTypes);
684 return true;
685 }
686
687 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES: {
688 VkPhysicalDeviceTimelineSemaphoreFeatures *features = (void *) ext;
689 CORE_FEATURE(timelineSemaphore);
690 return true;
691 }
692
693 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES: {
694 VkPhysicalDeviceUniformBufferStandardLayoutFeatures *features = (void *)ext;
695 CORE_FEATURE(uniformBufferStandardLayout);
696 return true;
697 }
698
699 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES: {
700 VkPhysicalDeviceVulkanMemoryModelFeatures *features = (void *)ext;
701 CORE_FEATURE(vulkanMemoryModel);
702 CORE_FEATURE(vulkanMemoryModelDeviceScope);
703 CORE_FEATURE(vulkanMemoryModelAvailabilityVisibilityChains);
704 return true;
705 }
706
707 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
708 copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
709 return true;
710
711 default:
712 return false;
713 }
714 }
715
716 bool
vk_get_physical_device_core_1_3_feature_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan13Features * core)717 vk_get_physical_device_core_1_3_feature_ext(struct VkBaseOutStructure *ext,
718 const VkPhysicalDeviceVulkan13Features *core)
719 {
720 switch (ext->sType) {
721 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES: {
722 VkPhysicalDeviceDynamicRenderingFeatures *features = (void *)ext;
723 CORE_FEATURE(dynamicRendering);
724 return true;
725 }
726
727 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES: {
728 VkPhysicalDeviceImageRobustnessFeatures *features = (void *)ext;
729 CORE_FEATURE(robustImageAccess);
730 return true;
731 }
732
733 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES: {
734 VkPhysicalDeviceInlineUniformBlockFeatures *features = (void *)ext;
735 CORE_FEATURE(inlineUniformBlock);
736 CORE_FEATURE(descriptorBindingInlineUniformBlockUpdateAfterBind);
737 return true;
738 }
739
740 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES: {
741 VkPhysicalDeviceMaintenance4Features *features = (void *)ext;
742 CORE_FEATURE(maintenance4);
743 return true;
744 }
745
746 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES: {
747 VkPhysicalDevicePipelineCreationCacheControlFeatures *features = (void *)ext;
748 CORE_FEATURE(pipelineCreationCacheControl);
749 return true;
750 }
751
752 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES: {
753 VkPhysicalDevicePrivateDataFeatures *features = (void *)ext;
754 CORE_FEATURE(privateData);
755 return true;
756 }
757
758 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES: {
759 VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *features = (void *)ext;
760 CORE_FEATURE(shaderDemoteToHelperInvocation);
761 return true;
762 }
763
764 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES: {
765 VkPhysicalDeviceShaderIntegerDotProductFeatures *features = (void *)ext;
766 CORE_FEATURE(shaderIntegerDotProduct);
767 return true;
768 };
769
770 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES: {
771 VkPhysicalDeviceShaderTerminateInvocationFeatures *features = (void *)ext;
772 CORE_FEATURE(shaderTerminateInvocation);
773 return true;
774 }
775
776 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES: {
777 VkPhysicalDeviceSubgroupSizeControlFeatures *features = (void *)ext;
778 CORE_FEATURE(subgroupSizeControl);
779 CORE_FEATURE(computeFullSubgroups);
780 return true;
781 }
782
783 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES: {
784 VkPhysicalDeviceSynchronization2Features *features = (void *)ext;
785 CORE_FEATURE(synchronization2);
786 return true;
787 }
788
789 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES: {
790 VkPhysicalDeviceTextureCompressionASTCHDRFeatures *features = (void *)ext;
791 CORE_FEATURE(textureCompressionASTC_HDR);
792 return true;
793 }
794
795 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES: {
796 VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *features = (void *)ext;
797 CORE_FEATURE(shaderZeroInitializeWorkgroupMemory);
798 return true;
799 }
800
801 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES:
802 copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
803 return true;
804
805 default:
806 return false;
807 }
808 }
809
810 #undef CORE_FEATURE
811
812 #define CORE_RENAMED_PROPERTY(ext_property, core_property) \
813 memcpy(&properties->ext_property, &core->core_property, sizeof(core->core_property))
814
815 #define CORE_PROPERTY(property) CORE_RENAMED_PROPERTY(property, property)
816
817 bool
vk_get_physical_device_core_1_1_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan11Properties * core)818 vk_get_physical_device_core_1_1_property_ext(struct VkBaseOutStructure *ext,
819 const VkPhysicalDeviceVulkan11Properties *core)
820 {
821 switch (ext->sType) {
822 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
823 VkPhysicalDeviceIDProperties *properties = (void *)ext;
824 CORE_PROPERTY(deviceUUID);
825 CORE_PROPERTY(driverUUID);
826 CORE_PROPERTY(deviceLUID);
827 CORE_PROPERTY(deviceNodeMask);
828 CORE_PROPERTY(deviceLUIDValid);
829 return true;
830 }
831
832 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: {
833 VkPhysicalDeviceMaintenance3Properties *properties = (void *)ext;
834 CORE_PROPERTY(maxPerSetDescriptors);
835 CORE_PROPERTY(maxMemoryAllocationSize);
836 return true;
837 }
838
839 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: {
840 VkPhysicalDeviceMultiviewProperties *properties = (void *)ext;
841 CORE_PROPERTY(maxMultiviewViewCount);
842 CORE_PROPERTY(maxMultiviewInstanceIndex);
843 return true;
844 }
845
846 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
847 VkPhysicalDevicePointClippingProperties *properties = (void *) ext;
848 CORE_PROPERTY(pointClippingBehavior);
849 return true;
850 }
851
852 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: {
853 VkPhysicalDeviceProtectedMemoryProperties *properties = (void *)ext;
854 CORE_PROPERTY(protectedNoFault);
855 return true;
856 }
857
858 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: {
859 VkPhysicalDeviceSubgroupProperties *properties = (void *)ext;
860 CORE_PROPERTY(subgroupSize);
861 CORE_RENAMED_PROPERTY(supportedStages,
862 subgroupSupportedStages);
863 CORE_RENAMED_PROPERTY(supportedOperations,
864 subgroupSupportedOperations);
865 CORE_RENAMED_PROPERTY(quadOperationsInAllStages,
866 subgroupQuadOperationsInAllStages);
867 return true;
868 }
869
870 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
871 copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
872 return true;
873
874 default:
875 return false;
876 }
877 }
878
879 bool
vk_get_physical_device_core_1_2_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan12Properties * core)880 vk_get_physical_device_core_1_2_property_ext(struct VkBaseOutStructure *ext,
881 const VkPhysicalDeviceVulkan12Properties *core)
882 {
883 switch (ext->sType) {
884 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES: {
885 VkPhysicalDeviceDepthStencilResolveProperties *properties = (void *)ext;
886 CORE_PROPERTY(supportedDepthResolveModes);
887 CORE_PROPERTY(supportedStencilResolveModes);
888 CORE_PROPERTY(independentResolveNone);
889 CORE_PROPERTY(independentResolve);
890 return true;
891 }
892
893 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES: {
894 VkPhysicalDeviceDescriptorIndexingProperties *properties = (void *)ext;
895 CORE_PROPERTY(maxUpdateAfterBindDescriptorsInAllPools);
896 CORE_PROPERTY(shaderUniformBufferArrayNonUniformIndexingNative);
897 CORE_PROPERTY(shaderSampledImageArrayNonUniformIndexingNative);
898 CORE_PROPERTY(shaderStorageBufferArrayNonUniformIndexingNative);
899 CORE_PROPERTY(shaderStorageImageArrayNonUniformIndexingNative);
900 CORE_PROPERTY(shaderInputAttachmentArrayNonUniformIndexingNative);
901 CORE_PROPERTY(robustBufferAccessUpdateAfterBind);
902 CORE_PROPERTY(quadDivergentImplicitLod);
903 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindSamplers);
904 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindUniformBuffers);
905 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindStorageBuffers);
906 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindSampledImages);
907 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindStorageImages);
908 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindInputAttachments);
909 CORE_PROPERTY(maxPerStageUpdateAfterBindResources);
910 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindSamplers);
911 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindUniformBuffers);
912 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindUniformBuffersDynamic);
913 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageBuffers);
914 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageBuffersDynamic);
915 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindSampledImages);
916 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageImages);
917 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindInputAttachments);
918 return true;
919 }
920
921 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES: {
922 VkPhysicalDeviceDriverProperties *properties = (void *) ext;
923 CORE_PROPERTY(driverID);
924 CORE_PROPERTY(driverName);
925 CORE_PROPERTY(driverInfo);
926 CORE_PROPERTY(conformanceVersion);
927 return true;
928 }
929
930 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES: {
931 VkPhysicalDeviceSamplerFilterMinmaxProperties *properties = (void *)ext;
932 CORE_PROPERTY(filterMinmaxImageComponentMapping);
933 CORE_PROPERTY(filterMinmaxSingleComponentFormats);
934 return true;
935 }
936
937 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES : {
938 VkPhysicalDeviceFloatControlsProperties *properties = (void *)ext;
939 CORE_PROPERTY(denormBehaviorIndependence);
940 CORE_PROPERTY(roundingModeIndependence);
941 CORE_PROPERTY(shaderDenormFlushToZeroFloat16);
942 CORE_PROPERTY(shaderDenormPreserveFloat16);
943 CORE_PROPERTY(shaderRoundingModeRTEFloat16);
944 CORE_PROPERTY(shaderRoundingModeRTZFloat16);
945 CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat16);
946 CORE_PROPERTY(shaderDenormFlushToZeroFloat32);
947 CORE_PROPERTY(shaderDenormPreserveFloat32);
948 CORE_PROPERTY(shaderRoundingModeRTEFloat32);
949 CORE_PROPERTY(shaderRoundingModeRTZFloat32);
950 CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat32);
951 CORE_PROPERTY(shaderDenormFlushToZeroFloat64);
952 CORE_PROPERTY(shaderDenormPreserveFloat64);
953 CORE_PROPERTY(shaderRoundingModeRTEFloat64);
954 CORE_PROPERTY(shaderRoundingModeRTZFloat64);
955 CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat64);
956 return true;
957 }
958
959 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES: {
960 VkPhysicalDeviceTimelineSemaphoreProperties *properties = (void *) ext;
961 CORE_PROPERTY(maxTimelineSemaphoreValueDifference);
962 return true;
963 }
964
965 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
966 copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
967 return true;
968
969 default:
970 return false;
971 }
972 }
973
974 bool
vk_get_physical_device_core_1_3_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan13Properties * core)975 vk_get_physical_device_core_1_3_property_ext(struct VkBaseOutStructure *ext,
976 const VkPhysicalDeviceVulkan13Properties *core)
977 {
978 switch (ext->sType) {
979 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES: {
980 VkPhysicalDeviceInlineUniformBlockProperties *properties = (void *)ext;
981 CORE_PROPERTY(maxInlineUniformBlockSize);
982 CORE_PROPERTY(maxPerStageDescriptorInlineUniformBlocks);
983 CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks);
984 CORE_PROPERTY(maxDescriptorSetInlineUniformBlocks);
985 CORE_PROPERTY(maxDescriptorSetUpdateAfterBindInlineUniformBlocks);
986 return true;
987 }
988
989 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES: {
990 VkPhysicalDeviceMaintenance4Properties *properties = (void *)ext;
991 CORE_PROPERTY(maxBufferSize);
992 return true;
993 }
994
995 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES: {
996 VkPhysicalDeviceShaderIntegerDotProductProperties *properties = (void *)ext;
997
998 #define IDP_PROPERTY(x) CORE_PROPERTY(integerDotProduct##x)
999 IDP_PROPERTY(8BitUnsignedAccelerated);
1000 IDP_PROPERTY(8BitSignedAccelerated);
1001 IDP_PROPERTY(8BitMixedSignednessAccelerated);
1002 IDP_PROPERTY(4x8BitPackedUnsignedAccelerated);
1003 IDP_PROPERTY(4x8BitPackedSignedAccelerated);
1004 IDP_PROPERTY(4x8BitPackedMixedSignednessAccelerated);
1005 IDP_PROPERTY(16BitUnsignedAccelerated);
1006 IDP_PROPERTY(16BitSignedAccelerated);
1007 IDP_PROPERTY(16BitMixedSignednessAccelerated);
1008 IDP_PROPERTY(32BitUnsignedAccelerated);
1009 IDP_PROPERTY(32BitSignedAccelerated);
1010 IDP_PROPERTY(32BitMixedSignednessAccelerated);
1011 IDP_PROPERTY(64BitUnsignedAccelerated);
1012 IDP_PROPERTY(64BitSignedAccelerated);
1013 IDP_PROPERTY(64BitMixedSignednessAccelerated);
1014 IDP_PROPERTY(AccumulatingSaturating8BitUnsignedAccelerated);
1015 IDP_PROPERTY(AccumulatingSaturating8BitSignedAccelerated);
1016 IDP_PROPERTY(AccumulatingSaturating8BitMixedSignednessAccelerated);
1017 IDP_PROPERTY(AccumulatingSaturating4x8BitPackedUnsignedAccelerated);
1018 IDP_PROPERTY(AccumulatingSaturating4x8BitPackedSignedAccelerated);
1019 IDP_PROPERTY(AccumulatingSaturating4x8BitPackedMixedSignednessAccelerated);
1020 IDP_PROPERTY(AccumulatingSaturating16BitUnsignedAccelerated);
1021 IDP_PROPERTY(AccumulatingSaturating16BitSignedAccelerated);
1022 IDP_PROPERTY(AccumulatingSaturating16BitMixedSignednessAccelerated);
1023 IDP_PROPERTY(AccumulatingSaturating32BitUnsignedAccelerated);
1024 IDP_PROPERTY(AccumulatingSaturating32BitSignedAccelerated);
1025 IDP_PROPERTY(AccumulatingSaturating32BitMixedSignednessAccelerated);
1026 IDP_PROPERTY(AccumulatingSaturating64BitUnsignedAccelerated);
1027 IDP_PROPERTY(AccumulatingSaturating64BitSignedAccelerated);
1028 IDP_PROPERTY(AccumulatingSaturating64BitMixedSignednessAccelerated);
1029 #undef IDP_PROPERTY
1030 return true;
1031 }
1032
1033 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES: {
1034 VkPhysicalDeviceSubgroupSizeControlProperties *properties = (void *)ext;
1035 CORE_PROPERTY(minSubgroupSize);
1036 CORE_PROPERTY(maxSubgroupSize);
1037 CORE_PROPERTY(maxComputeWorkgroupSubgroups);
1038 CORE_PROPERTY(requiredSubgroupSizeStages);
1039 return true;
1040 }
1041
1042 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES: {
1043 VkPhysicalDeviceTexelBufferAlignmentProperties *properties = (void *)ext;
1044 CORE_PROPERTY(storageTexelBufferOffsetAlignmentBytes);
1045 CORE_PROPERTY(storageTexelBufferOffsetSingleTexelAlignment);
1046 CORE_PROPERTY(uniformTexelBufferOffsetAlignmentBytes);
1047 CORE_PROPERTY(uniformTexelBufferOffsetSingleTexelAlignment);
1048 return true;
1049 }
1050
1051 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES:
1052 copy_vk_struct_guts(ext, (void *)core, sizeof(*core));
1053 return true;
1054
1055 default:
1056 return false;
1057 }
1058 }
1059
1060 #undef CORE_RENAMED_PROPERTY
1061 #undef CORE_PROPERTY
1062
1063