• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2020 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include "vk_device.h"
25 
26 #include "vk_common_entrypoints.h"
27 #include "vk_instance.h"
28 #include "vk_log.h"
29 #include "vk_physical_device.h"
30 #include "vk_queue.h"
31 #include "vk_sync.h"
32 #include "vk_sync_timeline.h"
33 #include "vk_util.h"
34 #include "util/u_debug.h"
35 #include "util/hash_table.h"
36 #include "util/perf/cpu_trace.h"
37 #include "util/ralloc.h"
38 
39 static enum vk_device_timeline_mode
get_timeline_mode(struct vk_physical_device * physical_device)40 get_timeline_mode(struct vk_physical_device *physical_device)
41 {
42    if (physical_device->supported_sync_types == NULL)
43       return VK_DEVICE_TIMELINE_MODE_NONE;
44 
45    const struct vk_sync_type *timeline_type = NULL;
46    for (const struct vk_sync_type *const *t =
47         physical_device->supported_sync_types; *t; t++) {
48       if ((*t)->features & VK_SYNC_FEATURE_TIMELINE) {
49          /* We can only have one timeline mode */
50          assert(timeline_type == NULL);
51          timeline_type = *t;
52       }
53    }
54 
55    if (timeline_type == NULL)
56       return VK_DEVICE_TIMELINE_MODE_NONE;
57 
58    if (vk_sync_type_is_vk_sync_timeline(timeline_type))
59       return VK_DEVICE_TIMELINE_MODE_EMULATED;
60 
61    if (timeline_type->features & VK_SYNC_FEATURE_WAIT_BEFORE_SIGNAL)
62       return VK_DEVICE_TIMELINE_MODE_NATIVE;
63 
64    /* For assisted mode, we require a few additional things of all sync types
65     * which may be used as semaphores.
66     */
67    for (const struct vk_sync_type *const *t =
68         physical_device->supported_sync_types; *t; t++) {
69       if ((*t)->features & VK_SYNC_FEATURE_GPU_WAIT) {
70          assert((*t)->features & VK_SYNC_FEATURE_WAIT_PENDING);
71          if ((*t)->features & VK_SYNC_FEATURE_BINARY)
72             assert((*t)->features & VK_SYNC_FEATURE_CPU_RESET);
73       }
74    }
75 
76    return VK_DEVICE_TIMELINE_MODE_ASSISTED;
77 }
78 
79 static void
collect_enabled_features(struct vk_device * device,const VkDeviceCreateInfo * pCreateInfo)80 collect_enabled_features(struct vk_device *device,
81                          const VkDeviceCreateInfo *pCreateInfo)
82 {
83    if (pCreateInfo->pEnabledFeatures)
84       vk_set_physical_device_features_1_0(&device->enabled_features, pCreateInfo->pEnabledFeatures);
85    vk_set_physical_device_features(&device->enabled_features, pCreateInfo->pNext);
86 }
87 
88 VkResult
vk_device_init(struct vk_device * device,struct vk_physical_device * physical_device,const struct vk_device_dispatch_table * dispatch_table,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * alloc)89 vk_device_init(struct vk_device *device,
90                struct vk_physical_device *physical_device,
91                const struct vk_device_dispatch_table *dispatch_table,
92                const VkDeviceCreateInfo *pCreateInfo,
93                const VkAllocationCallbacks *alloc)
94 {
95    memset(device, 0, sizeof(*device));
96    vk_object_base_init(device, &device->base, VK_OBJECT_TYPE_DEVICE);
97    if (alloc != NULL)
98       device->alloc = *alloc;
99    else
100       device->alloc = physical_device->instance->alloc;
101 
102    device->physical = physical_device;
103 
104    if (dispatch_table) {
105       device->dispatch_table = *dispatch_table;
106 
107       /* Add common entrypoints without overwriting driver-provided ones. */
108       vk_device_dispatch_table_from_entrypoints(
109          &device->dispatch_table, &vk_common_device_entrypoints, false);
110    }
111 
112    for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
113       int idx;
114       for (idx = 0; idx < VK_DEVICE_EXTENSION_COUNT; idx++) {
115          if (strcmp(pCreateInfo->ppEnabledExtensionNames[i],
116                     vk_device_extensions[idx].extensionName) == 0)
117             break;
118       }
119 
120       if (idx >= VK_DEVICE_EXTENSION_COUNT)
121          return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
122                           "%s not supported",
123                           pCreateInfo->ppEnabledExtensionNames[i]);
124 
125       if (!physical_device->supported_extensions.extensions[idx])
126          return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
127                           "%s not supported",
128                           pCreateInfo->ppEnabledExtensionNames[i]);
129 
130 #ifdef ANDROID_STRICT
131       if (!vk_android_allowed_device_extensions.extensions[idx])
132          return vk_errorf(physical_device, VK_ERROR_EXTENSION_NOT_PRESENT,
133                           "%s not supported",
134                           pCreateInfo->ppEnabledExtensionNames[i]);
135 #endif
136 
137       device->enabled_extensions.extensions[idx] = true;
138    }
139 
140    VkResult result =
141       vk_physical_device_check_device_features(physical_device,
142                                                pCreateInfo);
143    if (result != VK_SUCCESS)
144       return result;
145 
146    collect_enabled_features(device, pCreateInfo);
147 
148    p_atomic_set(&device->private_data_next_index, 0);
149 
150    list_inithead(&device->queues);
151 
152    device->drm_fd = -1;
153 
154    device->timeline_mode = get_timeline_mode(physical_device);
155 
156    switch (device->timeline_mode) {
157    case VK_DEVICE_TIMELINE_MODE_NONE:
158    case VK_DEVICE_TIMELINE_MODE_NATIVE:
159       device->submit_mode = VK_QUEUE_SUBMIT_MODE_IMMEDIATE;
160       break;
161 
162    case VK_DEVICE_TIMELINE_MODE_EMULATED:
163       device->submit_mode = VK_QUEUE_SUBMIT_MODE_DEFERRED;
164       break;
165 
166    case VK_DEVICE_TIMELINE_MODE_ASSISTED:
167       if (debug_get_bool_option("MESA_VK_ENABLE_SUBMIT_THREAD", false)) {
168          device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED;
169       } else {
170          device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED_ON_DEMAND;
171       }
172       break;
173 
174    default:
175       unreachable("Invalid timeline mode");
176    }
177 
178 #if DETECT_OS_ANDROID
179    mtx_init(&device->swapchain_private_mtx, mtx_plain);
180    device->swapchain_private = NULL;
181 #endif /* DETECT_OS_ANDROID */
182 
183    simple_mtx_init(&device->trace_mtx, mtx_plain);
184 
185    return VK_SUCCESS;
186 }
187 
188 void
vk_device_finish(struct vk_device * device)189 vk_device_finish(struct vk_device *device)
190 {
191    /* Drivers should tear down their own queues */
192    assert(list_is_empty(&device->queues));
193 
194    vk_memory_trace_finish(device);
195 
196 #if DETECT_OS_ANDROID
197    if (device->swapchain_private) {
198       hash_table_foreach(device->swapchain_private, entry)
199          util_sparse_array_finish(entry->data);
200       ralloc_free(device->swapchain_private);
201    }
202 #endif /* DETECT_OS_ANDROID */
203 
204    simple_mtx_destroy(&device->trace_mtx);
205 
206    vk_object_base_finish(&device->base);
207 }
208 
209 void
vk_device_enable_threaded_submit(struct vk_device * device)210 vk_device_enable_threaded_submit(struct vk_device *device)
211 {
212    /* This must be called before any queues are created */
213    assert(list_is_empty(&device->queues));
214 
215    /* In order to use threaded submit, we need every sync type that can be
216     * used as a wait fence for vkQueueSubmit() to support WAIT_PENDING.
217     * It's required for cross-thread/process submit re-ordering.
218     */
219    for (const struct vk_sync_type *const *t =
220         device->physical->supported_sync_types; *t; t++) {
221       if ((*t)->features & VK_SYNC_FEATURE_GPU_WAIT)
222          assert((*t)->features & VK_SYNC_FEATURE_WAIT_PENDING);
223    }
224 
225    /* Any binary vk_sync types which will be used as permanent semaphore
226     * payloads also need to support vk_sync_type::move, but that's a lot
227     * harder to assert since it only applies to permanent semaphore payloads.
228     */
229 
230    if (device->submit_mode != VK_QUEUE_SUBMIT_MODE_THREADED)
231       device->submit_mode = VK_QUEUE_SUBMIT_MODE_THREADED_ON_DEMAND;
232 }
233 
234 VkResult
vk_device_flush(struct vk_device * device)235 vk_device_flush(struct vk_device *device)
236 {
237    if (device->submit_mode != VK_QUEUE_SUBMIT_MODE_DEFERRED)
238       return VK_SUCCESS;
239 
240    bool progress;
241    do {
242       progress = false;
243 
244       vk_foreach_queue(queue, device) {
245          uint32_t queue_submit_count;
246          VkResult result = vk_queue_flush(queue, &queue_submit_count);
247          if (unlikely(result != VK_SUCCESS))
248             return result;
249 
250          if (queue_submit_count)
251             progress = true;
252       }
253    } while (progress);
254 
255    return VK_SUCCESS;
256 }
257 
258 static const char *
timeline_mode_str(struct vk_device * device)259 timeline_mode_str(struct vk_device *device)
260 {
261    switch (device->timeline_mode) {
262 #define CASE(X) case VK_DEVICE_TIMELINE_MODE_##X: return #X;
263    CASE(NONE)
264    CASE(EMULATED)
265    CASE(ASSISTED)
266    CASE(NATIVE)
267 #undef CASE
268    default: return "UNKNOWN";
269    }
270 }
271 
272 void
_vk_device_report_lost(struct vk_device * device)273 _vk_device_report_lost(struct vk_device *device)
274 {
275    assert(p_atomic_read(&device->_lost.lost) > 0);
276 
277    device->_lost.reported = true;
278 
279    vk_foreach_queue(queue, device) {
280       if (queue->_lost.lost) {
281          __vk_errorf(queue, VK_ERROR_DEVICE_LOST,
282                      queue->_lost.error_file, queue->_lost.error_line,
283                      "%s", queue->_lost.error_msg);
284       }
285    }
286 
287    vk_logd(VK_LOG_OBJS(device), "Timeline mode is %s.",
288            timeline_mode_str(device));
289 }
290 
291 VkResult
_vk_device_set_lost(struct vk_device * device,const char * file,int line,const char * msg,...)292 _vk_device_set_lost(struct vk_device *device,
293                     const char *file, int line,
294                     const char *msg, ...)
295 {
296    /* This flushes out any per-queue device lost messages */
297    if (vk_device_is_lost(device))
298       return VK_ERROR_DEVICE_LOST;
299 
300    p_atomic_inc(&device->_lost.lost);
301    device->_lost.reported = true;
302 
303    va_list ap;
304    va_start(ap, msg);
305    __vk_errorv(device, VK_ERROR_DEVICE_LOST, file, line, msg, ap);
306    va_end(ap);
307 
308    vk_logd(VK_LOG_OBJS(device), "Timeline mode is %s.",
309            timeline_mode_str(device));
310 
311    if (debug_get_bool_option("MESA_VK_ABORT_ON_DEVICE_LOSS", false))
312       abort();
313 
314    return VK_ERROR_DEVICE_LOST;
315 }
316 
317 PFN_vkVoidFunction
vk_device_get_proc_addr(const struct vk_device * device,const char * name)318 vk_device_get_proc_addr(const struct vk_device *device,
319                         const char *name)
320 {
321    if (device == NULL || name == NULL)
322       return NULL;
323 
324    struct vk_instance *instance = device->physical->instance;
325    return vk_device_dispatch_table_get_if_supported(&device->dispatch_table,
326                                                     name,
327                                                     instance->app_info.api_version,
328                                                     &instance->enabled_extensions,
329                                                     &device->enabled_extensions);
330 }
331 
332 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
vk_common_GetDeviceProcAddr(VkDevice _device,const char * pName)333 vk_common_GetDeviceProcAddr(VkDevice _device,
334                             const char *pName)
335 {
336    VK_FROM_HANDLE(vk_device, device, _device);
337    return vk_device_get_proc_addr(device, pName);
338 }
339 
340 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceQueue(VkDevice _device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)341 vk_common_GetDeviceQueue(VkDevice _device,
342                          uint32_t queueFamilyIndex,
343                          uint32_t queueIndex,
344                          VkQueue *pQueue)
345 {
346    VK_FROM_HANDLE(vk_device, device, _device);
347 
348    const VkDeviceQueueInfo2 info = {
349       .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
350       .pNext = NULL,
351       /* flags = 0 because (Vulkan spec 1.2.170 - vkGetDeviceQueue):
352        *
353        *    "vkGetDeviceQueue must only be used to get queues that were
354        *     created with the flags parameter of VkDeviceQueueCreateInfo set
355        *     to zero. To get queues that were created with a non-zero flags
356        *     parameter use vkGetDeviceQueue2."
357        */
358       .flags = 0,
359       .queueFamilyIndex = queueFamilyIndex,
360       .queueIndex = queueIndex,
361    };
362 
363    device->dispatch_table.GetDeviceQueue2(_device, &info, pQueue);
364 }
365 
366 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceQueue2(VkDevice _device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)367 vk_common_GetDeviceQueue2(VkDevice _device,
368                           const VkDeviceQueueInfo2 *pQueueInfo,
369                           VkQueue *pQueue)
370 {
371    VK_FROM_HANDLE(vk_device, device, _device);
372 
373    struct vk_queue *queue = NULL;
374    vk_foreach_queue(iter, device) {
375       if (iter->queue_family_index == pQueueInfo->queueFamilyIndex &&
376           iter->index_in_family == pQueueInfo->queueIndex) {
377          queue = iter;
378          break;
379       }
380    }
381 
382    /* From the Vulkan 1.1.70 spec:
383     *
384     *    "The queue returned by vkGetDeviceQueue2 must have the same flags
385     *    value from this structure as that used at device creation time in a
386     *    VkDeviceQueueCreateInfo instance. If no matching flags were specified
387     *    at device creation time then pQueue will return VK_NULL_HANDLE."
388     */
389    if (queue && queue->flags == pQueueInfo->flags)
390       *pQueue = vk_queue_to_handle(queue);
391    else
392       *pQueue = VK_NULL_HANDLE;
393 }
394 
395 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_MapMemory(VkDevice _device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)396 vk_common_MapMemory(VkDevice _device,
397                     VkDeviceMemory memory,
398                     VkDeviceSize offset,
399                     VkDeviceSize size,
400                     VkMemoryMapFlags flags,
401                     void **ppData)
402 {
403    VK_FROM_HANDLE(vk_device, device, _device);
404 
405    const VkMemoryMapInfoKHR info = {
406       .sType = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR,
407       .flags = flags,
408       .memory = memory,
409       .offset = offset,
410       .size = size,
411    };
412 
413    return device->dispatch_table.MapMemory2KHR(_device, &info, ppData);
414 }
415 
416 VKAPI_ATTR void VKAPI_CALL
vk_common_UnmapMemory(VkDevice _device,VkDeviceMemory memory)417 vk_common_UnmapMemory(VkDevice _device,
418                       VkDeviceMemory memory)
419 {
420    VK_FROM_HANDLE(vk_device, device, _device);
421    ASSERTED VkResult result;
422 
423    const VkMemoryUnmapInfoKHR info = {
424       .sType = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR,
425       .memory = memory,
426    };
427 
428    result = device->dispatch_table.UnmapMemory2KHR(_device, &info);
429    assert(result == VK_SUCCESS);
430 }
431 
432 VKAPI_ATTR void VKAPI_CALL
vk_common_GetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)433 vk_common_GetDeviceGroupPeerMemoryFeatures(
434    VkDevice device,
435    uint32_t heapIndex,
436    uint32_t localDeviceIndex,
437    uint32_t remoteDeviceIndex,
438    VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
439 {
440    assert(localDeviceIndex == 0 && remoteDeviceIndex == 0);
441    *pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
442                           VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
443                           VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
444                           VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
445 }
446 
447 VKAPI_ATTR void VKAPI_CALL
vk_common_GetImageMemoryRequirements(VkDevice _device,VkImage image,VkMemoryRequirements * pMemoryRequirements)448 vk_common_GetImageMemoryRequirements(VkDevice _device,
449                                      VkImage image,
450                                      VkMemoryRequirements *pMemoryRequirements)
451 {
452    VK_FROM_HANDLE(vk_device, device, _device);
453 
454    VkImageMemoryRequirementsInfo2 info = {
455       .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
456       .image = image,
457    };
458    VkMemoryRequirements2 reqs = {
459       .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
460    };
461    device->dispatch_table.GetImageMemoryRequirements2(_device, &info, &reqs);
462 
463    *pMemoryRequirements = reqs.memoryRequirements;
464 }
465 
466 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_BindImageMemory(VkDevice _device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)467 vk_common_BindImageMemory(VkDevice _device,
468                           VkImage image,
469                           VkDeviceMemory memory,
470                           VkDeviceSize memoryOffset)
471 {
472    VK_FROM_HANDLE(vk_device, device, _device);
473 
474    VkBindImageMemoryInfo bind = {
475       .sType         = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
476       .image         = image,
477       .memory        = memory,
478       .memoryOffset  = memoryOffset,
479    };
480 
481    return device->dispatch_table.BindImageMemory2(_device, 1, &bind);
482 }
483 
484 VKAPI_ATTR void VKAPI_CALL
vk_common_GetImageSparseMemoryRequirements(VkDevice _device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)485 vk_common_GetImageSparseMemoryRequirements(VkDevice _device,
486                                            VkImage image,
487                                            uint32_t *pSparseMemoryRequirementCount,
488                                            VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
489 {
490    VK_FROM_HANDLE(vk_device, device, _device);
491 
492    VkImageSparseMemoryRequirementsInfo2 info = {
493       .sType = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
494       .image = image,
495    };
496 
497    if (!pSparseMemoryRequirements) {
498       device->dispatch_table.GetImageSparseMemoryRequirements2(_device,
499                                                                &info,
500                                                                pSparseMemoryRequirementCount,
501                                                                NULL);
502       return;
503    }
504 
505    STACK_ARRAY(VkSparseImageMemoryRequirements2, mem_reqs2, *pSparseMemoryRequirementCount);
506 
507    for (unsigned i = 0; i < *pSparseMemoryRequirementCount; ++i) {
508       mem_reqs2[i].sType = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2;
509       mem_reqs2[i].pNext = NULL;
510    }
511 
512    device->dispatch_table.GetImageSparseMemoryRequirements2(_device,
513                                                             &info,
514                                                             pSparseMemoryRequirementCount,
515                                                             mem_reqs2);
516 
517    for (unsigned i = 0; i < *pSparseMemoryRequirementCount; ++i)
518       pSparseMemoryRequirements[i] = mem_reqs2[i].memoryRequirements;
519 
520    STACK_ARRAY_FINISH(mem_reqs2);
521 }
522 
523 VKAPI_ATTR VkResult VKAPI_CALL
vk_common_DeviceWaitIdle(VkDevice _device)524 vk_common_DeviceWaitIdle(VkDevice _device)
525 {
526    MESA_TRACE_FUNC();
527 
528    VK_FROM_HANDLE(vk_device, device, _device);
529    const struct vk_device_dispatch_table *disp = &device->dispatch_table;
530 
531    vk_foreach_queue(queue, device) {
532       VkResult result = disp->QueueWaitIdle(vk_queue_to_handle(queue));
533       if (result != VK_SUCCESS)
534          return result;
535    }
536 
537    return VK_SUCCESS;
538 }
539 
540 #ifndef _WIN32
541 
542 uint64_t
vk_clock_gettime(clockid_t clock_id)543 vk_clock_gettime(clockid_t clock_id)
544 {
545    struct timespec current;
546    int ret;
547 
548    ret = clock_gettime(clock_id, &current);
549 #ifdef CLOCK_MONOTONIC_RAW
550    if (ret < 0 && clock_id == CLOCK_MONOTONIC_RAW)
551       ret = clock_gettime(CLOCK_MONOTONIC, &current);
552 #endif
553    if (ret < 0)
554       return 0;
555 
556    return (uint64_t)current.tv_sec * 1000000000ULL + current.tv_nsec;
557 }
558 
559 #endif //!_WIN32
560 
561 #define CORE_RENAMED_PROPERTY(ext_property, core_property) \
562    memcpy(&properties->ext_property, &core->core_property, sizeof(core->core_property))
563 
564 #define CORE_PROPERTY(property) CORE_RENAMED_PROPERTY(property, property)
565 
566 bool
vk_get_physical_device_core_1_1_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan11Properties * core)567 vk_get_physical_device_core_1_1_property_ext(struct VkBaseOutStructure *ext,
568                                              const VkPhysicalDeviceVulkan11Properties *core)
569 {
570    switch (ext->sType) {
571    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
572       VkPhysicalDeviceIDProperties *properties = (void *)ext;
573       CORE_PROPERTY(deviceUUID);
574       CORE_PROPERTY(driverUUID);
575       CORE_PROPERTY(deviceLUID);
576       CORE_PROPERTY(deviceNodeMask);
577       CORE_PROPERTY(deviceLUIDValid);
578       return true;
579    }
580 
581    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: {
582       VkPhysicalDeviceMaintenance3Properties *properties = (void *)ext;
583       CORE_PROPERTY(maxPerSetDescriptors);
584       CORE_PROPERTY(maxMemoryAllocationSize);
585       return true;
586    }
587 
588    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: {
589       VkPhysicalDeviceMultiviewProperties *properties = (void *)ext;
590       CORE_PROPERTY(maxMultiviewViewCount);
591       CORE_PROPERTY(maxMultiviewInstanceIndex);
592       return true;
593    }
594 
595    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
596       VkPhysicalDevicePointClippingProperties *properties = (void *) ext;
597       CORE_PROPERTY(pointClippingBehavior);
598       return true;
599    }
600 
601    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: {
602       VkPhysicalDeviceProtectedMemoryProperties *properties = (void *)ext;
603       CORE_PROPERTY(protectedNoFault);
604       return true;
605    }
606 
607    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: {
608       VkPhysicalDeviceSubgroupProperties *properties = (void *)ext;
609       CORE_PROPERTY(subgroupSize);
610       CORE_RENAMED_PROPERTY(supportedStages,
611                                     subgroupSupportedStages);
612       CORE_RENAMED_PROPERTY(supportedOperations,
613                                     subgroupSupportedOperations);
614       CORE_RENAMED_PROPERTY(quadOperationsInAllStages,
615                                     subgroupQuadOperationsInAllStages);
616       return true;
617    }
618 
619    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
620       vk_copy_struct_guts(ext, (void *)core, sizeof(*core));
621       return true;
622 
623    default:
624       return false;
625    }
626 }
627 
628 bool
vk_get_physical_device_core_1_2_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan12Properties * core)629 vk_get_physical_device_core_1_2_property_ext(struct VkBaseOutStructure *ext,
630                                              const VkPhysicalDeviceVulkan12Properties *core)
631 {
632    switch (ext->sType) {
633    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES: {
634       VkPhysicalDeviceDepthStencilResolveProperties *properties = (void *)ext;
635       CORE_PROPERTY(supportedDepthResolveModes);
636       CORE_PROPERTY(supportedStencilResolveModes);
637       CORE_PROPERTY(independentResolveNone);
638       CORE_PROPERTY(independentResolve);
639       return true;
640    }
641 
642    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES: {
643       VkPhysicalDeviceDescriptorIndexingProperties *properties = (void *)ext;
644       CORE_PROPERTY(maxUpdateAfterBindDescriptorsInAllPools);
645       CORE_PROPERTY(shaderUniformBufferArrayNonUniformIndexingNative);
646       CORE_PROPERTY(shaderSampledImageArrayNonUniformIndexingNative);
647       CORE_PROPERTY(shaderStorageBufferArrayNonUniformIndexingNative);
648       CORE_PROPERTY(shaderStorageImageArrayNonUniformIndexingNative);
649       CORE_PROPERTY(shaderInputAttachmentArrayNonUniformIndexingNative);
650       CORE_PROPERTY(robustBufferAccessUpdateAfterBind);
651       CORE_PROPERTY(quadDivergentImplicitLod);
652       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindSamplers);
653       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindUniformBuffers);
654       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindStorageBuffers);
655       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindSampledImages);
656       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindStorageImages);
657       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindInputAttachments);
658       CORE_PROPERTY(maxPerStageUpdateAfterBindResources);
659       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindSamplers);
660       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindUniformBuffers);
661       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindUniformBuffersDynamic);
662       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageBuffers);
663       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageBuffersDynamic);
664       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindSampledImages);
665       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindStorageImages);
666       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindInputAttachments);
667       return true;
668    }
669 
670    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES: {
671       VkPhysicalDeviceDriverProperties *properties = (void *) ext;
672       CORE_PROPERTY(driverID);
673       CORE_PROPERTY(driverName);
674       CORE_PROPERTY(driverInfo);
675       CORE_PROPERTY(conformanceVersion);
676       return true;
677    }
678 
679    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES: {
680       VkPhysicalDeviceSamplerFilterMinmaxProperties *properties = (void *)ext;
681       CORE_PROPERTY(filterMinmaxImageComponentMapping);
682       CORE_PROPERTY(filterMinmaxSingleComponentFormats);
683       return true;
684    }
685 
686    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES : {
687       VkPhysicalDeviceFloatControlsProperties *properties = (void *)ext;
688       CORE_PROPERTY(denormBehaviorIndependence);
689       CORE_PROPERTY(roundingModeIndependence);
690       CORE_PROPERTY(shaderDenormFlushToZeroFloat16);
691       CORE_PROPERTY(shaderDenormPreserveFloat16);
692       CORE_PROPERTY(shaderRoundingModeRTEFloat16);
693       CORE_PROPERTY(shaderRoundingModeRTZFloat16);
694       CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat16);
695       CORE_PROPERTY(shaderDenormFlushToZeroFloat32);
696       CORE_PROPERTY(shaderDenormPreserveFloat32);
697       CORE_PROPERTY(shaderRoundingModeRTEFloat32);
698       CORE_PROPERTY(shaderRoundingModeRTZFloat32);
699       CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat32);
700       CORE_PROPERTY(shaderDenormFlushToZeroFloat64);
701       CORE_PROPERTY(shaderDenormPreserveFloat64);
702       CORE_PROPERTY(shaderRoundingModeRTEFloat64);
703       CORE_PROPERTY(shaderRoundingModeRTZFloat64);
704       CORE_PROPERTY(shaderSignedZeroInfNanPreserveFloat64);
705       return true;
706    }
707 
708    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES: {
709       VkPhysicalDeviceTimelineSemaphoreProperties *properties = (void *) ext;
710       CORE_PROPERTY(maxTimelineSemaphoreValueDifference);
711       return true;
712    }
713 
714    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
715       vk_copy_struct_guts(ext, (void *)core, sizeof(*core));
716       return true;
717 
718    default:
719       return false;
720    }
721 }
722 
723 bool
vk_get_physical_device_core_1_3_property_ext(struct VkBaseOutStructure * ext,const VkPhysicalDeviceVulkan13Properties * core)724 vk_get_physical_device_core_1_3_property_ext(struct VkBaseOutStructure *ext,
725                                              const VkPhysicalDeviceVulkan13Properties *core)
726 {
727    switch (ext->sType) {
728    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES: {
729       VkPhysicalDeviceInlineUniformBlockProperties *properties = (void *)ext;
730       CORE_PROPERTY(maxInlineUniformBlockSize);
731       CORE_PROPERTY(maxPerStageDescriptorInlineUniformBlocks);
732       CORE_PROPERTY(maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks);
733       CORE_PROPERTY(maxDescriptorSetInlineUniformBlocks);
734       CORE_PROPERTY(maxDescriptorSetUpdateAfterBindInlineUniformBlocks);
735       return true;
736    }
737 
738    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES: {
739       VkPhysicalDeviceMaintenance4Properties *properties = (void *)ext;
740       CORE_PROPERTY(maxBufferSize);
741       return true;
742    }
743 
744    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES: {
745       VkPhysicalDeviceShaderIntegerDotProductProperties *properties = (void *)ext;
746 
747 #define IDP_PROPERTY(x) CORE_PROPERTY(integerDotProduct##x)
748       IDP_PROPERTY(8BitUnsignedAccelerated);
749       IDP_PROPERTY(8BitSignedAccelerated);
750       IDP_PROPERTY(8BitMixedSignednessAccelerated);
751       IDP_PROPERTY(4x8BitPackedUnsignedAccelerated);
752       IDP_PROPERTY(4x8BitPackedSignedAccelerated);
753       IDP_PROPERTY(4x8BitPackedMixedSignednessAccelerated);
754       IDP_PROPERTY(16BitUnsignedAccelerated);
755       IDP_PROPERTY(16BitSignedAccelerated);
756       IDP_PROPERTY(16BitMixedSignednessAccelerated);
757       IDP_PROPERTY(32BitUnsignedAccelerated);
758       IDP_PROPERTY(32BitSignedAccelerated);
759       IDP_PROPERTY(32BitMixedSignednessAccelerated);
760       IDP_PROPERTY(64BitUnsignedAccelerated);
761       IDP_PROPERTY(64BitSignedAccelerated);
762       IDP_PROPERTY(64BitMixedSignednessAccelerated);
763       IDP_PROPERTY(AccumulatingSaturating8BitUnsignedAccelerated);
764       IDP_PROPERTY(AccumulatingSaturating8BitSignedAccelerated);
765       IDP_PROPERTY(AccumulatingSaturating8BitMixedSignednessAccelerated);
766       IDP_PROPERTY(AccumulatingSaturating4x8BitPackedUnsignedAccelerated);
767       IDP_PROPERTY(AccumulatingSaturating4x8BitPackedSignedAccelerated);
768       IDP_PROPERTY(AccumulatingSaturating4x8BitPackedMixedSignednessAccelerated);
769       IDP_PROPERTY(AccumulatingSaturating16BitUnsignedAccelerated);
770       IDP_PROPERTY(AccumulatingSaturating16BitSignedAccelerated);
771       IDP_PROPERTY(AccumulatingSaturating16BitMixedSignednessAccelerated);
772       IDP_PROPERTY(AccumulatingSaturating32BitUnsignedAccelerated);
773       IDP_PROPERTY(AccumulatingSaturating32BitSignedAccelerated);
774       IDP_PROPERTY(AccumulatingSaturating32BitMixedSignednessAccelerated);
775       IDP_PROPERTY(AccumulatingSaturating64BitUnsignedAccelerated);
776       IDP_PROPERTY(AccumulatingSaturating64BitSignedAccelerated);
777       IDP_PROPERTY(AccumulatingSaturating64BitMixedSignednessAccelerated);
778 #undef IDP_PROPERTY
779       return true;
780    }
781 
782    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES: {
783       VkPhysicalDeviceSubgroupSizeControlProperties *properties = (void *)ext;
784       CORE_PROPERTY(minSubgroupSize);
785       CORE_PROPERTY(maxSubgroupSize);
786       CORE_PROPERTY(maxComputeWorkgroupSubgroups);
787       CORE_PROPERTY(requiredSubgroupSizeStages);
788       return true;
789    }
790 
791    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES: {
792       VkPhysicalDeviceTexelBufferAlignmentProperties *properties = (void *)ext;
793       CORE_PROPERTY(storageTexelBufferOffsetAlignmentBytes);
794       CORE_PROPERTY(storageTexelBufferOffsetSingleTexelAlignment);
795       CORE_PROPERTY(uniformTexelBufferOffsetAlignmentBytes);
796       CORE_PROPERTY(uniformTexelBufferOffsetSingleTexelAlignment);
797       return true;
798    }
799 
800    case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES:
801       vk_copy_struct_guts(ext, (void *)core, sizeof(*core));
802       return true;
803 
804    default:
805       return false;
806    }
807 }
808 
809 #undef CORE_RENAMED_PROPERTY
810 #undef CORE_PROPERTY
811 
812