1 /*
2 *
3 * Copyright (c) 2015-2016 The Khronos Group Inc.
4 * Copyright (c) 2015-2016 Valve Corporation
5 * Copyright (c) 2015-2016 LunarG, Inc.
6 * Copyright (C) 2015 Google Inc.
7 *
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
11 *
12 * http://www.apache.org/licenses/LICENSE-2.0
13 *
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 *
20 * Author: Courtney Goeltzenleuchter <courtney@lunarg.com>
21 * Author: Jon Ashburn <jon@lunarg.com>
22 * Author: Tony Barbour <tony@LunarG.com>
23 * Author: Chia-I Wu <olv@lunarg.com>
24 */
25 #define _GNU_SOURCE
26 #include <stdlib.h>
27 #include <string.h>
28
29 #include "vk_loader_platform.h"
30 #include "loader.h"
31 #include "debug_report.h"
32 #include "wsi.h"
33 #include "extensions.h"
34 #include "gpa_helper.h"
35 #include "table_ops.h"
36
37 /* Trampoline entrypoints are in this file for core Vulkan commands */
38 /**
39 * Get an instance level or global level entry point address.
40 * @param instance
41 * @param pName
42 * @return
43 * If instance == NULL returns a global level functions only
44 * If instance is valid returns a trampoline entry point for all dispatchable
45 * Vulkan
46 * functions both core and extensions.
47 */
48 LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
vkGetInstanceProcAddr(VkInstance instance,const char * pName)49 vkGetInstanceProcAddr(VkInstance instance, const char *pName) {
50
51 void *addr;
52
53 addr = globalGetProcAddr(pName);
54 if (instance == VK_NULL_HANDLE) {
55 // get entrypoint addresses that are global (no dispatchable object)
56
57 return addr;
58 } else {
59 // if a global entrypoint return NULL
60 if (addr)
61 return NULL;
62 }
63
64 struct loader_instance *ptr_instance = loader_get_instance(instance);
65 if (ptr_instance == NULL)
66 return NULL;
67 // Return trampoline code for non-global entrypoints including any
68 // extensions.
69 // Device extensions are returned if a layer or ICD supports the extension.
70 // Instance extensions are returned if the extension is enabled and the
71 // loader
72 // or someone else supports the extension
73 return trampolineGetProcAddr(ptr_instance, pName);
74 }
75
76 /**
77 * Get a device level or global level entry point address.
78 * @param device
79 * @param pName
80 * @return
81 * If device is valid, returns a device relative entry point for device level
82 * entry points both core and extensions.
83 * Device relative means call down the device chain.
84 */
85 LOADER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
vkGetDeviceProcAddr(VkDevice device,const char * pName)86 vkGetDeviceProcAddr(VkDevice device, const char *pName) {
87 void *addr;
88
89 /* for entrypoints that loader must handle (ie non-dispatchable or create
90 object)
91 make sure the loader entrypoint is returned */
92 addr = loader_non_passthrough_gdpa(pName);
93 if (addr) {
94 return addr;
95 }
96
97 /* Although CreateDevice is on device chain it's dispatchable object isn't
98 * a VkDevice or child of VkDevice so return NULL.
99 */
100 if (!strcmp(pName, "CreateDevice"))
101 return NULL;
102
103 /* return the dispatch table entrypoint for the fastest case */
104 const VkLayerDispatchTable *disp_table = *(VkLayerDispatchTable **)device;
105 if (disp_table == NULL)
106 return NULL;
107
108 addr = loader_lookup_device_dispatch_table(disp_table, pName);
109 if (addr)
110 return addr;
111
112 if (disp_table->GetDeviceProcAddr == NULL)
113 return NULL;
114 return disp_table->GetDeviceProcAddr(device, pName);
115 }
116
117 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
vkEnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)118 vkEnumerateInstanceExtensionProperties(const char *pLayerName,
119 uint32_t *pPropertyCount,
120 VkExtensionProperties *pProperties) {
121 struct loader_extension_list *global_ext_list = NULL;
122 struct loader_layer_list instance_layers;
123 struct loader_extension_list local_ext_list;
124 struct loader_icd_libs icd_libs;
125 uint32_t copy_size;
126 VkResult res = VK_SUCCESS;
127
128 tls_instance = NULL;
129 memset(&local_ext_list, 0, sizeof(local_ext_list));
130 memset(&instance_layers, 0, sizeof(instance_layers));
131 loader_platform_thread_once(&once_init, loader_initialize);
132
133 /* get layer libraries if needed */
134 if (pLayerName && strlen(pLayerName) != 0) {
135 if (vk_string_validate(MaxLoaderStringLength, pLayerName) !=
136 VK_STRING_ERROR_NONE) {
137 assert(VK_FALSE && "vkEnumerateInstanceExtensionProperties: "
138 "pLayerName is too long or is badly formed");
139 res = VK_ERROR_EXTENSION_NOT_PRESENT;
140 goto out;
141 }
142
143 loader_layer_scan(NULL, &instance_layers);
144 if (strcmp(pLayerName, std_validation_str) == 0) {
145 struct loader_layer_list local_list;
146 memset(&local_list, 0, sizeof(local_list));
147 for (uint32_t i = 0; i < sizeof(std_validation_names) /
148 sizeof(std_validation_names[0]);
149 i++) {
150 loader_find_layer_name_add_list(NULL, std_validation_names[i],
151 VK_LAYER_TYPE_INSTANCE_EXPLICIT,
152 &instance_layers, &local_list);
153 }
154 for (uint32_t i = 0; i < local_list.count; i++) {
155 struct loader_extension_list *ext_list =
156 &local_list.list[i].instance_extension_list;
157 loader_add_to_ext_list(NULL, &local_ext_list, ext_list->count,
158 ext_list->list);
159 }
160 loader_destroy_layer_list(NULL, NULL, &local_list);
161 global_ext_list = &local_ext_list;
162
163 } else {
164 for (uint32_t i = 0; i < instance_layers.count; i++) {
165 struct loader_layer_properties *props =
166 &instance_layers.list[i];
167 if (strcmp(props->info.layerName, pLayerName) == 0) {
168 global_ext_list = &props->instance_extension_list;
169 break;
170 }
171 }
172 }
173 } else {
174 /* Scan/discover all ICD libraries */
175 memset(&icd_libs, 0, sizeof(struct loader_icd_libs));
176 res = loader_icd_scan(NULL, &icd_libs);
177 if (VK_SUCCESS != res) {
178 goto out;
179 }
180 /* get extensions from all ICD's, merge so no duplicates */
181 res = loader_get_icd_loader_instance_extensions(NULL, &icd_libs,
182 &local_ext_list);
183 if (VK_SUCCESS != res) {
184 goto out;
185 }
186 loader_scanned_icd_clear(NULL, &icd_libs);
187
188 // Append implicit layers.
189 loader_implicit_layer_scan(NULL, &instance_layers);
190 for (uint32_t i = 0; i < instance_layers.count; i++) {
191 struct loader_extension_list *ext_list =
192 &instance_layers.list[i].instance_extension_list;
193 loader_add_to_ext_list(NULL, &local_ext_list, ext_list->count,
194 ext_list->list);
195 }
196
197 global_ext_list = &local_ext_list;
198 }
199
200 if (global_ext_list == NULL) {
201 res = VK_ERROR_LAYER_NOT_PRESENT;
202 goto out;
203 }
204
205 if (pProperties == NULL) {
206 *pPropertyCount = global_ext_list->count;
207 goto out;
208 }
209
210 copy_size = *pPropertyCount < global_ext_list->count
211 ? *pPropertyCount
212 : global_ext_list->count;
213 for (uint32_t i = 0; i < copy_size; i++) {
214 memcpy(&pProperties[i], &global_ext_list->list[i],
215 sizeof(VkExtensionProperties));
216 }
217 *pPropertyCount = copy_size;
218
219 if (copy_size < global_ext_list->count) {
220 res = VK_INCOMPLETE;
221 goto out;
222 }
223
224 out:
225 loader_destroy_generic_list(NULL, (struct loader_generic_list *)&local_ext_list);
226 loader_delete_layer_properties(NULL, &instance_layers);
227 return res;
228 }
229
230 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
vkEnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)231 vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
232 VkLayerProperties *pProperties) {
233
234 struct loader_layer_list instance_layer_list;
235 tls_instance = NULL;
236
237 loader_platform_thread_once(&once_init, loader_initialize);
238
239 uint32_t copy_size;
240
241 /* get layer libraries */
242 memset(&instance_layer_list, 0, sizeof(instance_layer_list));
243 loader_layer_scan(NULL, &instance_layer_list);
244
245 if (pProperties == NULL) {
246 *pPropertyCount = instance_layer_list.count;
247 loader_destroy_layer_list(NULL, NULL, &instance_layer_list);
248 return VK_SUCCESS;
249 }
250
251 copy_size = (*pPropertyCount < instance_layer_list.count)
252 ? *pPropertyCount
253 : instance_layer_list.count;
254 for (uint32_t i = 0; i < copy_size; i++) {
255 memcpy(&pProperties[i], &instance_layer_list.list[i].info,
256 sizeof(VkLayerProperties));
257 }
258
259 *pPropertyCount = copy_size;
260
261 if (copy_size < instance_layer_list.count) {
262 loader_destroy_layer_list(NULL, NULL, &instance_layer_list);
263 return VK_INCOMPLETE;
264 }
265
266 loader_destroy_layer_list(NULL, NULL, &instance_layer_list);
267
268 return VK_SUCCESS;
269 }
270
vkCreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)271 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(
272 const VkInstanceCreateInfo *pCreateInfo,
273 const VkAllocationCallbacks *pAllocator, VkInstance *pInstance) {
274 struct loader_instance *ptr_instance = NULL;
275 VkInstance created_instance = VK_NULL_HANDLE;
276 bool loaderLocked = false;
277 VkResult res = VK_ERROR_INITIALIZATION_FAILED;
278
279 loader_platform_thread_once(&once_init, loader_initialize);
280
281 #if (DEBUG_DISABLE_APP_ALLOCATORS == 1)
282 {
283 #else
284 if (pAllocator) {
285 ptr_instance = (struct loader_instance *)pAllocator->pfnAllocation(
286 pAllocator->pUserData, sizeof(struct loader_instance),
287 sizeof(int *), VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
288 } else {
289 #endif
290 ptr_instance =
291 (struct loader_instance *)malloc(sizeof(struct loader_instance));
292 }
293
294 VkInstanceCreateInfo ici = *pCreateInfo;
295
296 if (ptr_instance == NULL) {
297 res = VK_ERROR_OUT_OF_HOST_MEMORY;
298 goto out;
299 }
300
301 tls_instance = ptr_instance;
302 loader_platform_thread_lock_mutex(&loader_lock);
303 loaderLocked = true;
304 memset(ptr_instance, 0, sizeof(struct loader_instance));
305 if (pAllocator) {
306 ptr_instance->alloc_callbacks = *pAllocator;
307 }
308
309 /*
310 * Look for one or more debug report create info structures
311 * and setup a callback(s) for each one found.
312 */
313 ptr_instance->num_tmp_callbacks = 0;
314 ptr_instance->tmp_dbg_create_infos = NULL;
315 ptr_instance->tmp_callbacks = NULL;
316 if (util_CopyDebugReportCreateInfos(pCreateInfo->pNext, pAllocator,
317 &ptr_instance->num_tmp_callbacks,
318 &ptr_instance->tmp_dbg_create_infos,
319 &ptr_instance->tmp_callbacks)) {
320 // One or more were found, but allocation failed. Therefore, clean up
321 // and fail this function:
322 res = VK_ERROR_OUT_OF_HOST_MEMORY;
323 goto out;
324 } else if (ptr_instance->num_tmp_callbacks > 0) {
325 // Setup the temporary callback(s) here to catch early issues:
326 if (util_CreateDebugReportCallbacks(ptr_instance, pAllocator,
327 ptr_instance->num_tmp_callbacks,
328 ptr_instance->tmp_dbg_create_infos,
329 ptr_instance->tmp_callbacks)) {
330 // Failure of setting up one or more of the callback. Therefore,
331 // clean up and fail this function:
332 res = VK_ERROR_OUT_OF_HOST_MEMORY;
333 goto out;
334 }
335 }
336
337 /* Due to implicit layers need to get layer list even if
338 * enabledLayerCount == 0 and VK_INSTANCE_LAYERS is unset. For now always
339 * get layer list via loader_layer_scan(). */
340 memset(&ptr_instance->instance_layer_list, 0,
341 sizeof(ptr_instance->instance_layer_list));
342 loader_layer_scan(ptr_instance, &ptr_instance->instance_layer_list);
343
344 /* validate the app requested layers to be enabled */
345 if (pCreateInfo->enabledLayerCount > 0) {
346 res =
347 loader_validate_layers(ptr_instance, pCreateInfo->enabledLayerCount,
348 pCreateInfo->ppEnabledLayerNames,
349 &ptr_instance->instance_layer_list);
350 if (res != VK_SUCCESS) {
351 goto out;
352 }
353 }
354
355 /* convert any meta layers to the actual layers makes a copy of layer name*/
356 VkResult layerErr = loader_expand_layer_names(
357 ptr_instance, std_validation_str,
358 sizeof(std_validation_names) / sizeof(std_validation_names[0]),
359 std_validation_names, &ici.enabledLayerCount, &ici.ppEnabledLayerNames);
360 if (VK_SUCCESS != layerErr) {
361 res = layerErr;
362 goto out;
363 }
364
365 /* Scan/discover all ICD libraries */
366 memset(&ptr_instance->icd_libs, 0, sizeof(ptr_instance->icd_libs));
367 res = loader_icd_scan(ptr_instance, &ptr_instance->icd_libs);
368 if (res != VK_SUCCESS) {
369 goto out;
370 }
371
372 /* get extensions from all ICD's, merge so no duplicates, then validate */
373 res = loader_get_icd_loader_instance_extensions(
374 ptr_instance, &ptr_instance->icd_libs, &ptr_instance->ext_list);
375 if (res != VK_SUCCESS) {
376 goto out;
377 }
378 res = loader_validate_instance_extensions(
379 ptr_instance, &ptr_instance->ext_list,
380 &ptr_instance->instance_layer_list, &ici);
381 if (res != VK_SUCCESS) {
382 goto out;
383 }
384
385 ptr_instance->disp = loader_instance_heap_alloc(
386 ptr_instance, sizeof(VkLayerInstanceDispatchTable),
387 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
388 if (ptr_instance->disp == NULL) {
389 res = VK_ERROR_OUT_OF_HOST_MEMORY;
390 goto out;
391 }
392 memcpy(ptr_instance->disp, &instance_disp, sizeof(instance_disp));
393 ptr_instance->next = loader.instances;
394 loader.instances = ptr_instance;
395
396 /* activate any layers on instance chain */
397 res = loader_enable_instance_layers(ptr_instance, &ici,
398 &ptr_instance->instance_layer_list);
399 if (res != VK_SUCCESS) {
400 goto out;
401 }
402
403 created_instance = (VkInstance)ptr_instance;
404 res = loader_create_instance_chain(&ici, pAllocator, ptr_instance,
405 &created_instance);
406
407 if (res == VK_SUCCESS) {
408 wsi_create_instance(ptr_instance, &ici);
409 debug_report_create_instance(ptr_instance, &ici);
410 extensions_create_instance(ptr_instance, &ici);
411
412 *pInstance = created_instance;
413
414 /*
415 * Finally have the layers in place and everyone has seen
416 * the CreateInstance command go by. This allows the layer's
417 * GetInstanceProcAddr functions to return valid extension functions
418 * if enabled.
419 */
420 loader_activate_instance_layer_extensions(ptr_instance, *pInstance);
421 }
422
423 out:
424
425 if (NULL != ptr_instance) {
426 if (res != VK_SUCCESS) {
427 if (NULL != ptr_instance->next) {
428 loader.instances = ptr_instance->next;
429 }
430 if (NULL != ptr_instance->disp) {
431 loader_instance_heap_free(ptr_instance, ptr_instance->disp);
432 }
433 if (ptr_instance->num_tmp_callbacks > 0) {
434 util_DestroyDebugReportCallbacks(
435 ptr_instance, pAllocator, ptr_instance->num_tmp_callbacks,
436 ptr_instance->tmp_callbacks);
437 util_FreeDebugReportCreateInfos(
438 pAllocator, ptr_instance->tmp_dbg_create_infos,
439 ptr_instance->tmp_callbacks);
440 }
441
442 loader_deactivate_layers(ptr_instance, NULL,
443 &ptr_instance->activated_layer_list);
444
445 loader_delete_shadow_inst_layer_names(ptr_instance, pCreateInfo,
446 &ici);
447 loader_delete_layer_properties(ptr_instance,
448 &ptr_instance->instance_layer_list);
449 loader_scanned_icd_clear(ptr_instance, &ptr_instance->icd_libs);
450 loader_destroy_generic_list(
451 ptr_instance,
452 (struct loader_generic_list *)&ptr_instance->ext_list);
453
454 loader_instance_heap_free(ptr_instance, ptr_instance);
455 } else {
456 /* Remove temporary debug_report callback */
457 util_DestroyDebugReportCallbacks(ptr_instance, pAllocator,
458 ptr_instance->num_tmp_callbacks,
459 ptr_instance->tmp_callbacks);
460 loader_delete_shadow_inst_layer_names(ptr_instance, pCreateInfo,
461 &ici);
462 }
463
464 if (loaderLocked) {
465 loader_platform_thread_unlock_mutex(&loader_lock);
466 }
467 }
468
469 return res;
470 }
471
472 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(
473 VkInstance instance, const VkAllocationCallbacks *pAllocator) {
474 const VkLayerInstanceDispatchTable *disp;
475 struct loader_instance *ptr_instance = NULL;
476 bool callback_setup = false;
477
478 if (instance == VK_NULL_HANDLE) {
479 return;
480 }
481
482 disp = loader_get_instance_dispatch(instance);
483
484 loader_platform_thread_lock_mutex(&loader_lock);
485
486 ptr_instance = loader_get_instance(instance);
487
488 if (pAllocator) {
489 ptr_instance->alloc_callbacks = *pAllocator;
490 }
491
492 if (ptr_instance->num_tmp_callbacks > 0) {
493 // Setup the temporary callback(s) here to catch cleanup issues:
494 if (!util_CreateDebugReportCallbacks(ptr_instance, pAllocator,
495 ptr_instance->num_tmp_callbacks,
496 ptr_instance->tmp_dbg_create_infos,
497 ptr_instance->tmp_callbacks)) {
498 callback_setup = true;
499 }
500 }
501
502 disp->DestroyInstance(instance, pAllocator);
503
504 loader_deactivate_layers(ptr_instance, NULL,
505 &ptr_instance->activated_layer_list);
506 if (ptr_instance->phys_devs) {
507 loader_instance_heap_free(ptr_instance, ptr_instance->phys_devs);
508 }
509 if (callback_setup) {
510 util_DestroyDebugReportCallbacks(ptr_instance, pAllocator,
511 ptr_instance->num_tmp_callbacks,
512 ptr_instance->tmp_callbacks);
513 util_FreeDebugReportCreateInfos(pAllocator,
514 ptr_instance->tmp_dbg_create_infos,
515 ptr_instance->tmp_callbacks);
516 }
517 loader_instance_heap_free(ptr_instance, ptr_instance->disp);
518 loader_instance_heap_free(ptr_instance, ptr_instance);
519 loader_platform_thread_unlock_mutex(&loader_lock);
520 }
521
522 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
523 vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount,
524 VkPhysicalDevice *pPhysicalDevices) {
525 const VkLayerInstanceDispatchTable *disp;
526 VkResult res;
527 uint32_t count, i;
528 struct loader_instance *inst;
529 disp = loader_get_instance_dispatch(instance);
530
531 loader_platform_thread_lock_mutex(&loader_lock);
532 res = disp->EnumeratePhysicalDevices(instance, pPhysicalDeviceCount,
533 pPhysicalDevices);
534
535 if (res != VK_SUCCESS && res != VK_INCOMPLETE) {
536 loader_platform_thread_unlock_mutex(&loader_lock);
537 return res;
538 }
539
540 if (!pPhysicalDevices) {
541 loader_platform_thread_unlock_mutex(&loader_lock);
542 return res;
543 }
544
545 // wrap the PhysDev object for loader usage, return wrapped objects
546 inst = loader_get_instance(instance);
547 if (!inst) {
548 loader_platform_thread_unlock_mutex(&loader_lock);
549 return VK_ERROR_INITIALIZATION_FAILED;
550 }
551 count = (inst->total_gpu_count < *pPhysicalDeviceCount)
552 ? inst->total_gpu_count
553 : *pPhysicalDeviceCount;
554 *pPhysicalDeviceCount = count;
555 if (!inst->phys_devs) {
556 inst->phys_devs =
557 (struct loader_physical_device_tramp *)loader_instance_heap_alloc(
558 inst, inst->total_gpu_count *
559 sizeof(struct loader_physical_device_tramp),
560 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
561 }
562 if (!inst->phys_devs) {
563 loader_platform_thread_unlock_mutex(&loader_lock);
564 return VK_ERROR_OUT_OF_HOST_MEMORY;
565 }
566
567 for (i = 0; i < count; i++) {
568
569 // initialize the loader's physicalDevice object
570 loader_set_dispatch((void *)&inst->phys_devs[i], inst->disp);
571 inst->phys_devs[i].this_instance = inst;
572 inst->phys_devs[i].phys_dev = pPhysicalDevices[i];
573
574 // copy wrapped object into Application provided array
575 pPhysicalDevices[i] = (VkPhysicalDevice)&inst->phys_devs[i];
576 }
577 loader_platform_thread_unlock_mutex(&loader_lock);
578 return res;
579 }
580
581 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
582 vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
583 VkPhysicalDeviceFeatures *pFeatures) {
584 const VkLayerInstanceDispatchTable *disp;
585 VkPhysicalDevice unwrapped_phys_dev =
586 loader_unwrap_physical_device(physicalDevice);
587 disp = loader_get_instance_dispatch(physicalDevice);
588 disp->GetPhysicalDeviceFeatures(unwrapped_phys_dev, pFeatures);
589 }
590
591 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
592 vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,
593 VkFormat format,
594 VkFormatProperties *pFormatInfo) {
595 const VkLayerInstanceDispatchTable *disp;
596 VkPhysicalDevice unwrapped_pd =
597 loader_unwrap_physical_device(physicalDevice);
598 disp = loader_get_instance_dispatch(physicalDevice);
599 disp->GetPhysicalDeviceFormatProperties(unwrapped_pd, format, pFormatInfo);
600 }
601
602 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
603 vkGetPhysicalDeviceImageFormatProperties(
604 VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
605 VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
606 VkImageFormatProperties *pImageFormatProperties) {
607 const VkLayerInstanceDispatchTable *disp;
608 VkPhysicalDevice unwrapped_phys_dev =
609 loader_unwrap_physical_device(physicalDevice);
610 disp = loader_get_instance_dispatch(physicalDevice);
611 return disp->GetPhysicalDeviceImageFormatProperties(
612 unwrapped_phys_dev, format, type, tiling, usage, flags,
613 pImageFormatProperties);
614 }
615
616 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
617 vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
618 VkPhysicalDeviceProperties *pProperties) {
619 const VkLayerInstanceDispatchTable *disp;
620 VkPhysicalDevice unwrapped_phys_dev =
621 loader_unwrap_physical_device(physicalDevice);
622 disp = loader_get_instance_dispatch(physicalDevice);
623 disp->GetPhysicalDeviceProperties(unwrapped_phys_dev, pProperties);
624 }
625
626 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
627 vkGetPhysicalDeviceQueueFamilyProperties(
628 VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount,
629 VkQueueFamilyProperties *pQueueProperties) {
630 const VkLayerInstanceDispatchTable *disp;
631 VkPhysicalDevice unwrapped_phys_dev =
632 loader_unwrap_physical_device(physicalDevice);
633 disp = loader_get_instance_dispatch(physicalDevice);
634 disp->GetPhysicalDeviceQueueFamilyProperties(
635 unwrapped_phys_dev, pQueueFamilyPropertyCount, pQueueProperties);
636 }
637
638 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(
639 VkPhysicalDevice physicalDevice,
640 VkPhysicalDeviceMemoryProperties *pMemoryProperties) {
641 const VkLayerInstanceDispatchTable *disp;
642 VkPhysicalDevice unwrapped_phys_dev =
643 loader_unwrap_physical_device(physicalDevice);
644 disp = loader_get_instance_dispatch(physicalDevice);
645 disp->GetPhysicalDeviceMemoryProperties(unwrapped_phys_dev,
646 pMemoryProperties);
647 }
648
649 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(
650 VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
651 const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
652 VkResult res;
653 struct loader_physical_device_tramp *phys_dev = NULL;
654 struct loader_device *dev = NULL;
655 struct loader_instance *inst = NULL;
656
657 assert(pCreateInfo->queueCreateInfoCount >= 1);
658
659 loader_platform_thread_lock_mutex(&loader_lock);
660
661 phys_dev = (struct loader_physical_device_tramp *)physicalDevice;
662 inst = (struct loader_instance *)phys_dev->this_instance;
663
664 /* Get the physical device (ICD) extensions */
665 struct loader_extension_list icd_exts;
666 icd_exts.list = NULL;
667 res =
668 loader_init_generic_list(inst, (struct loader_generic_list *)&icd_exts,
669 sizeof(VkExtensionProperties));
670 if (VK_SUCCESS != res) {
671 goto out;
672 }
673
674 res = loader_add_device_extensions(
675 inst, inst->disp->EnumerateDeviceExtensionProperties,
676 phys_dev->phys_dev, "Unknown", &icd_exts);
677 if (res != VK_SUCCESS) {
678 goto out;
679 }
680
681 /* make sure requested extensions to be enabled are supported */
682 res = loader_validate_device_extensions(
683 phys_dev, &inst->activated_layer_list, &icd_exts, pCreateInfo);
684 if (res != VK_SUCCESS) {
685 goto out;
686 }
687
688 dev = loader_create_logical_device(inst, pAllocator);
689 if (dev == NULL) {
690 res = VK_ERROR_OUT_OF_HOST_MEMORY;
691 goto out;
692 }
693
694 /* copy the instance layer list into the device */
695 dev->activated_layer_list.capacity = inst->activated_layer_list.capacity;
696 dev->activated_layer_list.count = inst->activated_layer_list.count;
697 dev->activated_layer_list.list =
698 loader_device_heap_alloc(dev, inst->activated_layer_list.capacity,
699 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
700 if (dev->activated_layer_list.list == NULL) {
701 res = VK_ERROR_OUT_OF_HOST_MEMORY;
702 goto out;
703 }
704 memcpy(dev->activated_layer_list.list, inst->activated_layer_list.list,
705 sizeof(*dev->activated_layer_list.list) *
706 dev->activated_layer_list.count);
707
708 res = loader_create_device_chain(phys_dev, pCreateInfo, pAllocator, inst,
709 dev);
710 if (res != VK_SUCCESS) {
711 goto out;
712 }
713
714 *pDevice = dev->device;
715
716 // Initialize any device extension dispatch entry's from the instance list
717 loader_init_dispatch_dev_ext(inst, dev);
718
719 // Initialize WSI device extensions as part of core dispatch since loader
720 // has dedicated trampoline code for these*/
721 loader_init_device_extension_dispatch_table(
722 &dev->loader_dispatch,
723 dev->loader_dispatch.core_dispatch.GetDeviceProcAddr, *pDevice);
724
725 // The loader needs to override some terminating device procs. Usually,
726 // these are device procs which need to go through a loader terminator.
727 // This needs to occur if the loader needs to perform some work prior
728 // to passing the work along to the ICD.
729 loader_override_terminating_device_proc(*pDevice, &dev->loader_dispatch);
730
731 out:
732
733 // Failure cleanup
734 if (VK_SUCCESS != res) {
735 if (NULL != dev) {
736 loader_destroy_logical_device(inst, dev, pAllocator);
737 }
738 }
739
740 if (NULL != icd_exts.list) {
741 loader_destroy_generic_list(inst,
742 (struct loader_generic_list *)&icd_exts);
743 }
744 loader_platform_thread_unlock_mutex(&loader_lock);
745 return res;
746 }
747
748 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
749 vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) {
750 const VkLayerDispatchTable *disp;
751 struct loader_device *dev;
752
753 if (device == VK_NULL_HANDLE) {
754 return;
755 }
756
757 loader_platform_thread_lock_mutex(&loader_lock);
758
759 struct loader_icd *icd = loader_get_icd_and_device(device, &dev, NULL);
760 const struct loader_instance *inst = icd->this_instance;
761 disp = loader_get_dispatch(device);
762
763 disp->DestroyDevice(device, pAllocator);
764 dev->device = NULL;
765 loader_remove_logical_device(inst, icd, dev, pAllocator);
766
767 loader_platform_thread_unlock_mutex(&loader_lock);
768 }
769
770 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
771 vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
772 const char *pLayerName,
773 uint32_t *pPropertyCount,
774 VkExtensionProperties *pProperties) {
775 VkResult res = VK_SUCCESS;
776 struct loader_physical_device_tramp *phys_dev;
777 phys_dev = (struct loader_physical_device_tramp *)physicalDevice;
778
779 loader_platform_thread_lock_mutex(&loader_lock);
780
781 /* If pLayerName == NULL, then querying ICD extensions, pass this call
782 down the instance chain which will terminate in the ICD. This allows
783 layers to filter the extensions coming back up the chain.
784 If pLayerName != NULL then get layer extensions from manifest file. */
785 if (pLayerName == NULL || strlen(pLayerName) == 0) {
786 const VkLayerInstanceDispatchTable *disp;
787
788 disp = loader_get_instance_dispatch(physicalDevice);
789 res = disp->EnumerateDeviceExtensionProperties(
790 phys_dev->phys_dev, NULL, pPropertyCount, pProperties);
791 } else {
792
793 uint32_t count;
794 uint32_t copy_size;
795 const struct loader_instance *inst = phys_dev->this_instance;
796 struct loader_device_extension_list *dev_ext_list = NULL;
797 struct loader_device_extension_list local_ext_list;
798 memset(&local_ext_list, 0, sizeof(local_ext_list));
799 if (vk_string_validate(MaxLoaderStringLength, pLayerName) ==
800 VK_STRING_ERROR_NONE) {
801 if (strcmp(pLayerName, std_validation_str) == 0) {
802 struct loader_layer_list local_list;
803 memset(&local_list, 0, sizeof(local_list));
804 for (uint32_t i = 0; i < sizeof(std_validation_names) /
805 sizeof(std_validation_names[0]);
806 i++) {
807 loader_find_layer_name_add_list(
808 NULL, std_validation_names[i],
809 VK_LAYER_TYPE_INSTANCE_EXPLICIT, &inst->instance_layer_list,
810 &local_list);
811 }
812 for (uint32_t i = 0; i < local_list.count; i++) {
813 struct loader_device_extension_list *ext_list =
814 &local_list.list[i].device_extension_list;
815 for (uint32_t j = 0; j < ext_list->count; j++) {
816 loader_add_to_dev_ext_list(NULL, &local_ext_list,
817 &ext_list->list[j].props, 0,
818 NULL);
819 }
820 }
821 dev_ext_list = &local_ext_list;
822
823 } else {
824 for (uint32_t i = 0; i < inst->instance_layer_list.count; i++) {
825 struct loader_layer_properties *props =
826 &inst->instance_layer_list.list[i];
827 if (strcmp(props->info.layerName, pLayerName) == 0) {
828 dev_ext_list = &props->device_extension_list;
829 }
830 }
831 }
832
833 count = (dev_ext_list == NULL) ? 0 : dev_ext_list->count;
834 if (pProperties == NULL) {
835 *pPropertyCount = count;
836 loader_destroy_generic_list(
837 inst, (struct loader_generic_list *)&local_ext_list);
838 loader_platform_thread_unlock_mutex(&loader_lock);
839 return VK_SUCCESS;
840 }
841
842 copy_size = *pPropertyCount < count ? *pPropertyCount : count;
843 for (uint32_t i = 0; i < copy_size; i++) {
844 memcpy(&pProperties[i], &dev_ext_list->list[i].props,
845 sizeof(VkExtensionProperties));
846 }
847 *pPropertyCount = copy_size;
848
849 loader_destroy_generic_list(
850 inst, (struct loader_generic_list *)&local_ext_list);
851 if (copy_size < count) {
852 loader_platform_thread_unlock_mutex(&loader_lock);
853 return VK_INCOMPLETE;
854 }
855 } else {
856 loader_log(inst, VK_DEBUG_REPORT_ERROR_BIT_EXT, 0,
857 "vkEnumerateDeviceExtensionProperties: pLayerName "
858 "is too long or is badly formed");
859 loader_platform_thread_unlock_mutex(&loader_lock);
860 return VK_ERROR_EXTENSION_NOT_PRESENT;
861 }
862 }
863
864 loader_platform_thread_unlock_mutex(&loader_lock);
865 return res;
866 }
867
868 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
869 vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
870 uint32_t *pPropertyCount,
871 VkLayerProperties *pProperties) {
872 uint32_t copy_size;
873 struct loader_physical_device_tramp *phys_dev;
874 struct loader_layer_list *enabled_layers, layers_list;
875 uint32_t std_val_count = sizeof(std_validation_names) /
876 sizeof(std_validation_names[0]);
877 memset(&layers_list, 0, sizeof(layers_list));
878 loader_platform_thread_lock_mutex(&loader_lock);
879
880 /* Don't dispatch this call down the instance chain, want all device layers
881 enumerated and instance chain may not contain all device layers */
882 // TODO re-evaluate the above statement we maybe able to start calling
883 // down the chain
884
885 phys_dev = (struct loader_physical_device_tramp *)physicalDevice;
886 const struct loader_instance *inst = phys_dev->this_instance;
887
888 uint32_t count = inst->activated_layer_list.count;
889 if (inst->activated_layers_are_std_val)
890 count = count - std_val_count + 1;
891 if (pProperties == NULL) {
892 *pPropertyCount = count;
893 loader_platform_thread_unlock_mutex(&loader_lock);
894 return VK_SUCCESS;
895 }
896 /* make sure to enumerate standard_validation if that is what was used
897 at the instance layer enablement */
898 if (inst->activated_layers_are_std_val) {
899 enabled_layers = &layers_list;
900 enabled_layers->count = count;
901 enabled_layers->capacity = enabled_layers->count *
902 sizeof(struct loader_layer_properties);
903 enabled_layers->list = loader_instance_heap_alloc(inst, enabled_layers->capacity,
904 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
905 if (!enabled_layers->list)
906 return VK_ERROR_OUT_OF_HOST_MEMORY;
907
908 uint32_t j = 0;
909 for (uint32_t i = 0; i < inst->activated_layer_list.count; j++) {
910
911 if (loader_find_layer_name_array(
912 inst->activated_layer_list.list[i].info.layerName,
913 std_val_count, std_validation_names)) {
914 struct loader_layer_properties props;
915 loader_init_std_validation_props(&props);
916 VkResult err = loader_copy_layer_properties(inst,
917 &enabled_layers->list[j],
918 &props);
919 if (err != VK_SUCCESS) {
920 return err;
921 }
922 i += std_val_count;
923 }
924 else {
925 VkResult err = loader_copy_layer_properties(inst,
926 &enabled_layers->list[j],
927 &inst->activated_layer_list.list[i++]);
928 if (err != VK_SUCCESS) {
929 return err;
930 }
931 }
932 }
933 }
934 else {
935 enabled_layers = (struct loader_layer_list *) &inst->activated_layer_list;
936 }
937
938
939 copy_size = (*pPropertyCount < count) ? *pPropertyCount : count;
940 for (uint32_t i = 0; i < copy_size; i++) {
941 memcpy(&pProperties[i], &(enabled_layers->list[i].info),
942 sizeof(VkLayerProperties));
943 }
944 *pPropertyCount = copy_size;
945
946 if (inst->activated_layers_are_std_val) {
947 loader_delete_layer_properties(inst, enabled_layers);
948 }
949 if (copy_size < count) {
950 loader_platform_thread_unlock_mutex(&loader_lock);
951 return VK_INCOMPLETE;
952 }
953
954 loader_platform_thread_unlock_mutex(&loader_lock);
955 return VK_SUCCESS;
956 }
957
958 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
959 vkGetDeviceQueue(VkDevice device, uint32_t queueNodeIndex, uint32_t queueIndex,
960 VkQueue *pQueue) {
961 const VkLayerDispatchTable *disp;
962
963 disp = loader_get_dispatch(device);
964
965 disp->GetDeviceQueue(device, queueNodeIndex, queueIndex, pQueue);
966 loader_set_dispatch(*pQueue, disp);
967 }
968
969 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
970 vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits,
971 VkFence fence) {
972 const VkLayerDispatchTable *disp;
973
974 disp = loader_get_dispatch(queue);
975
976 return disp->QueueSubmit(queue, submitCount, pSubmits, fence);
977 }
978
979 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue) {
980 const VkLayerDispatchTable *disp;
981
982 disp = loader_get_dispatch(queue);
983
984 return disp->QueueWaitIdle(queue);
985 }
986
987 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device) {
988 const VkLayerDispatchTable *disp;
989
990 disp = loader_get_dispatch(device);
991
992 return disp->DeviceWaitIdle(device);
993 }
994
995 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
996 vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo,
997 const VkAllocationCallbacks *pAllocator,
998 VkDeviceMemory *pMemory) {
999 const VkLayerDispatchTable *disp;
1000
1001 disp = loader_get_dispatch(device);
1002
1003 return disp->AllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
1004 }
1005
1006 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1007 vkFreeMemory(VkDevice device, VkDeviceMemory mem,
1008 const VkAllocationCallbacks *pAllocator) {
1009 const VkLayerDispatchTable *disp;
1010
1011 disp = loader_get_dispatch(device);
1012
1013 disp->FreeMemory(device, mem, pAllocator);
1014 }
1015
1016 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1017 vkMapMemory(VkDevice device, VkDeviceMemory mem, VkDeviceSize offset,
1018 VkDeviceSize size, VkFlags flags, void **ppData) {
1019 const VkLayerDispatchTable *disp;
1020
1021 disp = loader_get_dispatch(device);
1022
1023 return disp->MapMemory(device, mem, offset, size, flags, ppData);
1024 }
1025
1026 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1027 vkUnmapMemory(VkDevice device, VkDeviceMemory mem) {
1028 const VkLayerDispatchTable *disp;
1029
1030 disp = loader_get_dispatch(device);
1031
1032 disp->UnmapMemory(device, mem);
1033 }
1034
1035 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1036 vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
1037 const VkMappedMemoryRange *pMemoryRanges) {
1038 const VkLayerDispatchTable *disp;
1039
1040 disp = loader_get_dispatch(device);
1041
1042 return disp->FlushMappedMemoryRanges(device, memoryRangeCount,
1043 pMemoryRanges);
1044 }
1045
1046 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1047 vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount,
1048 const VkMappedMemoryRange *pMemoryRanges) {
1049 const VkLayerDispatchTable *disp;
1050
1051 disp = loader_get_dispatch(device);
1052
1053 return disp->InvalidateMappedMemoryRanges(device, memoryRangeCount,
1054 pMemoryRanges);
1055 }
1056
1057 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1058 vkGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory,
1059 VkDeviceSize *pCommittedMemoryInBytes) {
1060 const VkLayerDispatchTable *disp;
1061
1062 disp = loader_get_dispatch(device);
1063
1064 disp->GetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
1065 }
1066
1067 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1068 vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem,
1069 VkDeviceSize offset) {
1070 const VkLayerDispatchTable *disp;
1071
1072 disp = loader_get_dispatch(device);
1073
1074 return disp->BindBufferMemory(device, buffer, mem, offset);
1075 }
1076
1077 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1078 vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem,
1079 VkDeviceSize offset) {
1080 const VkLayerDispatchTable *disp;
1081
1082 disp = loader_get_dispatch(device);
1083
1084 return disp->BindImageMemory(device, image, mem, offset);
1085 }
1086
1087 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1088 vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer,
1089 VkMemoryRequirements *pMemoryRequirements) {
1090 const VkLayerDispatchTable *disp;
1091
1092 disp = loader_get_dispatch(device);
1093
1094 disp->GetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
1095 }
1096
1097 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1098 vkGetImageMemoryRequirements(VkDevice device, VkImage image,
1099 VkMemoryRequirements *pMemoryRequirements) {
1100 const VkLayerDispatchTable *disp;
1101
1102 disp = loader_get_dispatch(device);
1103
1104 disp->GetImageMemoryRequirements(device, image, pMemoryRequirements);
1105 }
1106
1107 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(
1108 VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount,
1109 VkSparseImageMemoryRequirements *pSparseMemoryRequirements) {
1110 const VkLayerDispatchTable *disp;
1111
1112 disp = loader_get_dispatch(device);
1113
1114 disp->GetImageSparseMemoryRequirements(device, image,
1115 pSparseMemoryRequirementCount,
1116 pSparseMemoryRequirements);
1117 }
1118
1119 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1120 vkGetPhysicalDeviceSparseImageFormatProperties(
1121 VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type,
1122 VkSampleCountFlagBits samples, VkImageUsageFlags usage,
1123 VkImageTiling tiling, uint32_t *pPropertyCount,
1124 VkSparseImageFormatProperties *pProperties) {
1125 const VkLayerInstanceDispatchTable *disp;
1126 VkPhysicalDevice unwrapped_phys_dev =
1127 loader_unwrap_physical_device(physicalDevice);
1128 disp = loader_get_instance_dispatch(physicalDevice);
1129
1130 disp->GetPhysicalDeviceSparseImageFormatProperties(
1131 unwrapped_phys_dev, format, type, samples, usage, tiling,
1132 pPropertyCount, pProperties);
1133 }
1134
1135 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1136 vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount,
1137 const VkBindSparseInfo *pBindInfo, VkFence fence) {
1138 const VkLayerDispatchTable *disp;
1139
1140 disp = loader_get_dispatch(queue);
1141
1142 return disp->QueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
1143 }
1144
1145 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1146 vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo,
1147 const VkAllocationCallbacks *pAllocator, VkFence *pFence) {
1148 const VkLayerDispatchTable *disp;
1149
1150 disp = loader_get_dispatch(device);
1151
1152 return disp->CreateFence(device, pCreateInfo, pAllocator, pFence);
1153 }
1154
1155 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1156 vkDestroyFence(VkDevice device, VkFence fence,
1157 const VkAllocationCallbacks *pAllocator) {
1158 const VkLayerDispatchTable *disp;
1159
1160 disp = loader_get_dispatch(device);
1161
1162 disp->DestroyFence(device, fence, pAllocator);
1163 }
1164
1165 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1166 vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) {
1167 const VkLayerDispatchTable *disp;
1168
1169 disp = loader_get_dispatch(device);
1170
1171 return disp->ResetFences(device, fenceCount, pFences);
1172 }
1173
1174 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1175 vkGetFenceStatus(VkDevice device, VkFence fence) {
1176 const VkLayerDispatchTable *disp;
1177
1178 disp = loader_get_dispatch(device);
1179
1180 return disp->GetFenceStatus(device, fence);
1181 }
1182
1183 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1184 vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences,
1185 VkBool32 waitAll, uint64_t timeout) {
1186 const VkLayerDispatchTable *disp;
1187
1188 disp = loader_get_dispatch(device);
1189
1190 return disp->WaitForFences(device, fenceCount, pFences, waitAll, timeout);
1191 }
1192
1193 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1194 vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
1195 const VkAllocationCallbacks *pAllocator,
1196 VkSemaphore *pSemaphore) {
1197 const VkLayerDispatchTable *disp;
1198
1199 disp = loader_get_dispatch(device);
1200
1201 return disp->CreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
1202 }
1203
1204 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1205 vkDestroySemaphore(VkDevice device, VkSemaphore semaphore,
1206 const VkAllocationCallbacks *pAllocator) {
1207 const VkLayerDispatchTable *disp;
1208
1209 disp = loader_get_dispatch(device);
1210
1211 disp->DestroySemaphore(device, semaphore, pAllocator);
1212 }
1213
1214 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1215 vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo,
1216 const VkAllocationCallbacks *pAllocator, VkEvent *pEvent) {
1217 const VkLayerDispatchTable *disp;
1218
1219 disp = loader_get_dispatch(device);
1220
1221 return disp->CreateEvent(device, pCreateInfo, pAllocator, pEvent);
1222 }
1223
1224 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1225 vkDestroyEvent(VkDevice device, VkEvent event,
1226 const VkAllocationCallbacks *pAllocator) {
1227 const VkLayerDispatchTable *disp;
1228
1229 disp = loader_get_dispatch(device);
1230
1231 disp->DestroyEvent(device, event, pAllocator);
1232 }
1233
1234 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1235 vkGetEventStatus(VkDevice device, VkEvent event) {
1236 const VkLayerDispatchTable *disp;
1237
1238 disp = loader_get_dispatch(device);
1239
1240 return disp->GetEventStatus(device, event);
1241 }
1242
1243 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1244 vkSetEvent(VkDevice device, VkEvent event) {
1245 const VkLayerDispatchTable *disp;
1246
1247 disp = loader_get_dispatch(device);
1248
1249 return disp->SetEvent(device, event);
1250 }
1251
1252 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1253 vkResetEvent(VkDevice device, VkEvent event) {
1254 const VkLayerDispatchTable *disp;
1255
1256 disp = loader_get_dispatch(device);
1257
1258 return disp->ResetEvent(device, event);
1259 }
1260
1261 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1262 vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo,
1263 const VkAllocationCallbacks *pAllocator,
1264 VkQueryPool *pQueryPool) {
1265 const VkLayerDispatchTable *disp;
1266
1267 disp = loader_get_dispatch(device);
1268
1269 return disp->CreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
1270 }
1271
1272 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1273 vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool,
1274 const VkAllocationCallbacks *pAllocator) {
1275 const VkLayerDispatchTable *disp;
1276
1277 disp = loader_get_dispatch(device);
1278
1279 disp->DestroyQueryPool(device, queryPool, pAllocator);
1280 }
1281
1282 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1283 vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool,
1284 uint32_t firstQuery, uint32_t queryCount, size_t dataSize,
1285 void *pData, VkDeviceSize stride,
1286 VkQueryResultFlags flags) {
1287 const VkLayerDispatchTable *disp;
1288
1289 disp = loader_get_dispatch(device);
1290
1291 return disp->GetQueryPoolResults(device, queryPool, firstQuery, queryCount,
1292 dataSize, pData, stride, flags);
1293 }
1294
1295 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1296 vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo,
1297 const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer) {
1298 const VkLayerDispatchTable *disp;
1299
1300 disp = loader_get_dispatch(device);
1301
1302 return disp->CreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1303 }
1304
1305 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1306 vkDestroyBuffer(VkDevice device, VkBuffer buffer,
1307 const VkAllocationCallbacks *pAllocator) {
1308 const VkLayerDispatchTable *disp;
1309
1310 disp = loader_get_dispatch(device);
1311
1312 disp->DestroyBuffer(device, buffer, pAllocator);
1313 }
1314
1315 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1316 vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo,
1317 const VkAllocationCallbacks *pAllocator,
1318 VkBufferView *pView) {
1319 const VkLayerDispatchTable *disp;
1320
1321 disp = loader_get_dispatch(device);
1322
1323 return disp->CreateBufferView(device, pCreateInfo, pAllocator, pView);
1324 }
1325
1326 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1327 vkDestroyBufferView(VkDevice device, VkBufferView bufferView,
1328 const VkAllocationCallbacks *pAllocator) {
1329 const VkLayerDispatchTable *disp;
1330
1331 disp = loader_get_dispatch(device);
1332
1333 disp->DestroyBufferView(device, bufferView, pAllocator);
1334 }
1335
1336 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1337 vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo,
1338 const VkAllocationCallbacks *pAllocator, VkImage *pImage) {
1339 const VkLayerDispatchTable *disp;
1340
1341 disp = loader_get_dispatch(device);
1342
1343 return disp->CreateImage(device, pCreateInfo, pAllocator, pImage);
1344 }
1345
1346 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1347 vkDestroyImage(VkDevice device, VkImage image,
1348 const VkAllocationCallbacks *pAllocator) {
1349 const VkLayerDispatchTable *disp;
1350
1351 disp = loader_get_dispatch(device);
1352
1353 disp->DestroyImage(device, image, pAllocator);
1354 }
1355
1356 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1357 vkGetImageSubresourceLayout(VkDevice device, VkImage image,
1358 const VkImageSubresource *pSubresource,
1359 VkSubresourceLayout *pLayout) {
1360 const VkLayerDispatchTable *disp;
1361
1362 disp = loader_get_dispatch(device);
1363
1364 disp->GetImageSubresourceLayout(device, image, pSubresource, pLayout);
1365 }
1366
1367 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1368 vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo,
1369 const VkAllocationCallbacks *pAllocator, VkImageView *pView) {
1370 const VkLayerDispatchTable *disp;
1371
1372 disp = loader_get_dispatch(device);
1373
1374 return disp->CreateImageView(device, pCreateInfo, pAllocator, pView);
1375 }
1376
1377 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1378 vkDestroyImageView(VkDevice device, VkImageView imageView,
1379 const VkAllocationCallbacks *pAllocator) {
1380 const VkLayerDispatchTable *disp;
1381
1382 disp = loader_get_dispatch(device);
1383
1384 disp->DestroyImageView(device, imageView, pAllocator);
1385 }
1386
1387 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1388 vkCreateShaderModule(VkDevice device,
1389 const VkShaderModuleCreateInfo *pCreateInfo,
1390 const VkAllocationCallbacks *pAllocator,
1391 VkShaderModule *pShader) {
1392 const VkLayerDispatchTable *disp;
1393
1394 disp = loader_get_dispatch(device);
1395
1396 return disp->CreateShaderModule(device, pCreateInfo, pAllocator, pShader);
1397 }
1398
1399 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1400 vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule,
1401 const VkAllocationCallbacks *pAllocator) {
1402 const VkLayerDispatchTable *disp;
1403
1404 disp = loader_get_dispatch(device);
1405
1406 disp->DestroyShaderModule(device, shaderModule, pAllocator);
1407 }
1408
1409 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1410 vkCreatePipelineCache(VkDevice device,
1411 const VkPipelineCacheCreateInfo *pCreateInfo,
1412 const VkAllocationCallbacks *pAllocator,
1413 VkPipelineCache *pPipelineCache) {
1414 const VkLayerDispatchTable *disp;
1415
1416 disp = loader_get_dispatch(device);
1417
1418 return disp->CreatePipelineCache(device, pCreateInfo, pAllocator,
1419 pPipelineCache);
1420 }
1421
1422 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1423 vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache,
1424 const VkAllocationCallbacks *pAllocator) {
1425 const VkLayerDispatchTable *disp;
1426
1427 disp = loader_get_dispatch(device);
1428
1429 disp->DestroyPipelineCache(device, pipelineCache, pAllocator);
1430 }
1431
1432 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1433 vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache,
1434 size_t *pDataSize, void *pData) {
1435 const VkLayerDispatchTable *disp;
1436
1437 disp = loader_get_dispatch(device);
1438
1439 return disp->GetPipelineCacheData(device, pipelineCache, pDataSize, pData);
1440 }
1441
1442 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1443 vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache,
1444 uint32_t srcCacheCount,
1445 const VkPipelineCache *pSrcCaches) {
1446 const VkLayerDispatchTable *disp;
1447
1448 disp = loader_get_dispatch(device);
1449
1450 return disp->MergePipelineCaches(device, dstCache, srcCacheCount,
1451 pSrcCaches);
1452 }
1453
1454 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1455 vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache,
1456 uint32_t createInfoCount,
1457 const VkGraphicsPipelineCreateInfo *pCreateInfos,
1458 const VkAllocationCallbacks *pAllocator,
1459 VkPipeline *pPipelines) {
1460 const VkLayerDispatchTable *disp;
1461
1462 disp = loader_get_dispatch(device);
1463
1464 return disp->CreateGraphicsPipelines(device, pipelineCache, createInfoCount,
1465 pCreateInfos, pAllocator, pPipelines);
1466 }
1467
1468 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1469 vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache,
1470 uint32_t createInfoCount,
1471 const VkComputePipelineCreateInfo *pCreateInfos,
1472 const VkAllocationCallbacks *pAllocator,
1473 VkPipeline *pPipelines) {
1474 const VkLayerDispatchTable *disp;
1475
1476 disp = loader_get_dispatch(device);
1477
1478 return disp->CreateComputePipelines(device, pipelineCache, createInfoCount,
1479 pCreateInfos, pAllocator, pPipelines);
1480 }
1481
1482 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1483 vkDestroyPipeline(VkDevice device, VkPipeline pipeline,
1484 const VkAllocationCallbacks *pAllocator) {
1485 const VkLayerDispatchTable *disp;
1486
1487 disp = loader_get_dispatch(device);
1488
1489 disp->DestroyPipeline(device, pipeline, pAllocator);
1490 }
1491
1492 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1493 vkCreatePipelineLayout(VkDevice device,
1494 const VkPipelineLayoutCreateInfo *pCreateInfo,
1495 const VkAllocationCallbacks *pAllocator,
1496 VkPipelineLayout *pPipelineLayout) {
1497 const VkLayerDispatchTable *disp;
1498
1499 disp = loader_get_dispatch(device);
1500
1501 return disp->CreatePipelineLayout(device, pCreateInfo, pAllocator,
1502 pPipelineLayout);
1503 }
1504
1505 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1506 vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout,
1507 const VkAllocationCallbacks *pAllocator) {
1508 const VkLayerDispatchTable *disp;
1509
1510 disp = loader_get_dispatch(device);
1511
1512 disp->DestroyPipelineLayout(device, pipelineLayout, pAllocator);
1513 }
1514
1515 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1516 vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo,
1517 const VkAllocationCallbacks *pAllocator, VkSampler *pSampler) {
1518 const VkLayerDispatchTable *disp;
1519
1520 disp = loader_get_dispatch(device);
1521
1522 return disp->CreateSampler(device, pCreateInfo, pAllocator, pSampler);
1523 }
1524
1525 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1526 vkDestroySampler(VkDevice device, VkSampler sampler,
1527 const VkAllocationCallbacks *pAllocator) {
1528 const VkLayerDispatchTable *disp;
1529
1530 disp = loader_get_dispatch(device);
1531
1532 disp->DestroySampler(device, sampler, pAllocator);
1533 }
1534
1535 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1536 vkCreateDescriptorSetLayout(VkDevice device,
1537 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1538 const VkAllocationCallbacks *pAllocator,
1539 VkDescriptorSetLayout *pSetLayout) {
1540 const VkLayerDispatchTable *disp;
1541
1542 disp = loader_get_dispatch(device);
1543
1544 return disp->CreateDescriptorSetLayout(device, pCreateInfo, pAllocator,
1545 pSetLayout);
1546 }
1547
1548 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1549 vkDestroyDescriptorSetLayout(VkDevice device,
1550 VkDescriptorSetLayout descriptorSetLayout,
1551 const VkAllocationCallbacks *pAllocator) {
1552 const VkLayerDispatchTable *disp;
1553
1554 disp = loader_get_dispatch(device);
1555
1556 disp->DestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
1557 }
1558
1559 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1560 vkCreateDescriptorPool(VkDevice device,
1561 const VkDescriptorPoolCreateInfo *pCreateInfo,
1562 const VkAllocationCallbacks *pAllocator,
1563 VkDescriptorPool *pDescriptorPool) {
1564 const VkLayerDispatchTable *disp;
1565
1566 disp = loader_get_dispatch(device);
1567
1568 return disp->CreateDescriptorPool(device, pCreateInfo, pAllocator,
1569 pDescriptorPool);
1570 }
1571
1572 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1573 vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1574 const VkAllocationCallbacks *pAllocator) {
1575 const VkLayerDispatchTable *disp;
1576
1577 disp = loader_get_dispatch(device);
1578
1579 disp->DestroyDescriptorPool(device, descriptorPool, pAllocator);
1580 }
1581
1582 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1583 vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool,
1584 VkDescriptorPoolResetFlags flags) {
1585 const VkLayerDispatchTable *disp;
1586
1587 disp = loader_get_dispatch(device);
1588
1589 return disp->ResetDescriptorPool(device, descriptorPool, flags);
1590 }
1591
1592 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1593 vkAllocateDescriptorSets(VkDevice device,
1594 const VkDescriptorSetAllocateInfo *pAllocateInfo,
1595 VkDescriptorSet *pDescriptorSets) {
1596 const VkLayerDispatchTable *disp;
1597
1598 disp = loader_get_dispatch(device);
1599
1600 return disp->AllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
1601 }
1602
1603 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1604 vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool,
1605 uint32_t descriptorSetCount,
1606 const VkDescriptorSet *pDescriptorSets) {
1607 const VkLayerDispatchTable *disp;
1608
1609 disp = loader_get_dispatch(device);
1610
1611 return disp->FreeDescriptorSets(device, descriptorPool, descriptorSetCount,
1612 pDescriptorSets);
1613 }
1614
1615 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1616 vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount,
1617 const VkWriteDescriptorSet *pDescriptorWrites,
1618 uint32_t descriptorCopyCount,
1619 const VkCopyDescriptorSet *pDescriptorCopies) {
1620 const VkLayerDispatchTable *disp;
1621
1622 disp = loader_get_dispatch(device);
1623
1624 disp->UpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites,
1625 descriptorCopyCount, pDescriptorCopies);
1626 }
1627
1628 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1629 vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo,
1630 const VkAllocationCallbacks *pAllocator,
1631 VkFramebuffer *pFramebuffer) {
1632 const VkLayerDispatchTable *disp;
1633
1634 disp = loader_get_dispatch(device);
1635
1636 return disp->CreateFramebuffer(device, pCreateInfo, pAllocator,
1637 pFramebuffer);
1638 }
1639
1640 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1641 vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer,
1642 const VkAllocationCallbacks *pAllocator) {
1643 const VkLayerDispatchTable *disp;
1644
1645 disp = loader_get_dispatch(device);
1646
1647 disp->DestroyFramebuffer(device, framebuffer, pAllocator);
1648 }
1649
1650 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1651 vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo,
1652 const VkAllocationCallbacks *pAllocator,
1653 VkRenderPass *pRenderPass) {
1654 const VkLayerDispatchTable *disp;
1655
1656 disp = loader_get_dispatch(device);
1657
1658 return disp->CreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
1659 }
1660
1661 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1662 vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass,
1663 const VkAllocationCallbacks *pAllocator) {
1664 const VkLayerDispatchTable *disp;
1665
1666 disp = loader_get_dispatch(device);
1667
1668 disp->DestroyRenderPass(device, renderPass, pAllocator);
1669 }
1670
1671 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1672 vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass,
1673 VkExtent2D *pGranularity) {
1674 const VkLayerDispatchTable *disp;
1675
1676 disp = loader_get_dispatch(device);
1677
1678 disp->GetRenderAreaGranularity(device, renderPass, pGranularity);
1679 }
1680
1681 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1682 vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo,
1683 const VkAllocationCallbacks *pAllocator,
1684 VkCommandPool *pCommandPool) {
1685 const VkLayerDispatchTable *disp;
1686
1687 disp = loader_get_dispatch(device);
1688
1689 return disp->CreateCommandPool(device, pCreateInfo, pAllocator,
1690 pCommandPool);
1691 }
1692
1693 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1694 vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool,
1695 const VkAllocationCallbacks *pAllocator) {
1696 const VkLayerDispatchTable *disp;
1697
1698 disp = loader_get_dispatch(device);
1699
1700 disp->DestroyCommandPool(device, commandPool, pAllocator);
1701 }
1702
1703 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1704 vkResetCommandPool(VkDevice device, VkCommandPool commandPool,
1705 VkCommandPoolResetFlags flags) {
1706 const VkLayerDispatchTable *disp;
1707
1708 disp = loader_get_dispatch(device);
1709
1710 return disp->ResetCommandPool(device, commandPool, flags);
1711 }
1712
1713 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1714 vkAllocateCommandBuffers(VkDevice device,
1715 const VkCommandBufferAllocateInfo *pAllocateInfo,
1716 VkCommandBuffer *pCommandBuffers) {
1717 const VkLayerDispatchTable *disp;
1718 VkResult res;
1719
1720 disp = loader_get_dispatch(device);
1721
1722 res = disp->AllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
1723 if (res == VK_SUCCESS) {
1724 for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
1725 if (pCommandBuffers[i]) {
1726 loader_init_dispatch(pCommandBuffers[i], disp);
1727 }
1728 }
1729 }
1730
1731 return res;
1732 }
1733
1734 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1735 vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool,
1736 uint32_t commandBufferCount,
1737 const VkCommandBuffer *pCommandBuffers) {
1738 const VkLayerDispatchTable *disp;
1739
1740 disp = loader_get_dispatch(device);
1741
1742 disp->FreeCommandBuffers(device, commandPool, commandBufferCount,
1743 pCommandBuffers);
1744 }
1745
1746 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1747 vkBeginCommandBuffer(VkCommandBuffer commandBuffer,
1748 const VkCommandBufferBeginInfo *pBeginInfo) {
1749 const VkLayerDispatchTable *disp;
1750
1751 disp = loader_get_dispatch(commandBuffer);
1752
1753 return disp->BeginCommandBuffer(commandBuffer, pBeginInfo);
1754 }
1755
1756 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1757 vkEndCommandBuffer(VkCommandBuffer commandBuffer) {
1758 const VkLayerDispatchTable *disp;
1759
1760 disp = loader_get_dispatch(commandBuffer);
1761
1762 return disp->EndCommandBuffer(commandBuffer);
1763 }
1764
1765 LOADER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL
1766 vkResetCommandBuffer(VkCommandBuffer commandBuffer,
1767 VkCommandBufferResetFlags flags) {
1768 const VkLayerDispatchTable *disp;
1769
1770 disp = loader_get_dispatch(commandBuffer);
1771
1772 return disp->ResetCommandBuffer(commandBuffer, flags);
1773 }
1774
1775 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1776 vkCmdBindPipeline(VkCommandBuffer commandBuffer,
1777 VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
1778 const VkLayerDispatchTable *disp;
1779
1780 disp = loader_get_dispatch(commandBuffer);
1781
1782 disp->CmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
1783 }
1784
1785 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1786 vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport,
1787 uint32_t viewportCount, const VkViewport *pViewports) {
1788 const VkLayerDispatchTable *disp;
1789
1790 disp = loader_get_dispatch(commandBuffer);
1791
1792 disp->CmdSetViewport(commandBuffer, firstViewport, viewportCount,
1793 pViewports);
1794 }
1795
1796 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1797 vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor,
1798 uint32_t scissorCount, const VkRect2D *pScissors) {
1799 const VkLayerDispatchTable *disp;
1800
1801 disp = loader_get_dispatch(commandBuffer);
1802
1803 disp->CmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
1804 }
1805
1806 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1807 vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) {
1808 const VkLayerDispatchTable *disp;
1809
1810 disp = loader_get_dispatch(commandBuffer);
1811
1812 disp->CmdSetLineWidth(commandBuffer, lineWidth);
1813 }
1814
1815 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1816 vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor,
1817 float depthBiasClamp, float depthBiasSlopeFactor) {
1818 const VkLayerDispatchTable *disp;
1819
1820 disp = loader_get_dispatch(commandBuffer);
1821
1822 disp->CmdSetDepthBias(commandBuffer, depthBiasConstantFactor,
1823 depthBiasClamp, depthBiasSlopeFactor);
1824 }
1825
1826 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1827 vkCmdSetBlendConstants(VkCommandBuffer commandBuffer,
1828 const float blendConstants[4]) {
1829 const VkLayerDispatchTable *disp;
1830
1831 disp = loader_get_dispatch(commandBuffer);
1832
1833 disp->CmdSetBlendConstants(commandBuffer, blendConstants);
1834 }
1835
1836 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1837 vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds,
1838 float maxDepthBounds) {
1839 const VkLayerDispatchTable *disp;
1840
1841 disp = loader_get_dispatch(commandBuffer);
1842
1843 disp->CmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
1844 }
1845
1846 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1847 vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
1848 VkStencilFaceFlags faceMask, uint32_t compareMask) {
1849 const VkLayerDispatchTable *disp;
1850
1851 disp = loader_get_dispatch(commandBuffer);
1852
1853 disp->CmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
1854 }
1855
1856 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1857 vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
1858 VkStencilFaceFlags faceMask, uint32_t writeMask) {
1859 const VkLayerDispatchTable *disp;
1860
1861 disp = loader_get_dispatch(commandBuffer);
1862
1863 disp->CmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
1864 }
1865
1866 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1867 vkCmdSetStencilReference(VkCommandBuffer commandBuffer,
1868 VkStencilFaceFlags faceMask, uint32_t reference) {
1869 const VkLayerDispatchTable *disp;
1870
1871 disp = loader_get_dispatch(commandBuffer);
1872
1873 disp->CmdSetStencilReference(commandBuffer, faceMask, reference);
1874 }
1875
1876 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(
1877 VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint,
1878 VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount,
1879 const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount,
1880 const uint32_t *pDynamicOffsets) {
1881 const VkLayerDispatchTable *disp;
1882
1883 disp = loader_get_dispatch(commandBuffer);
1884
1885 disp->CmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout,
1886 firstSet, descriptorSetCount, pDescriptorSets,
1887 dynamicOffsetCount, pDynamicOffsets);
1888 }
1889
1890 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1891 vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer,
1892 VkDeviceSize offset, VkIndexType indexType) {
1893 const VkLayerDispatchTable *disp;
1894
1895 disp = loader_get_dispatch(commandBuffer);
1896
1897 disp->CmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
1898 }
1899
1900 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1901 vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding,
1902 uint32_t bindingCount, const VkBuffer *pBuffers,
1903 const VkDeviceSize *pOffsets) {
1904 const VkLayerDispatchTable *disp;
1905
1906 disp = loader_get_dispatch(commandBuffer);
1907
1908 disp->CmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount,
1909 pBuffers, pOffsets);
1910 }
1911
1912 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1913 vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount,
1914 uint32_t instanceCount, uint32_t firstVertex,
1915 uint32_t firstInstance) {
1916 const VkLayerDispatchTable *disp;
1917
1918 disp = loader_get_dispatch(commandBuffer);
1919
1920 disp->CmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex,
1921 firstInstance);
1922 }
1923
1924 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1925 vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount,
1926 uint32_t instanceCount, uint32_t firstIndex,
1927 int32_t vertexOffset, uint32_t firstInstance) {
1928 const VkLayerDispatchTable *disp;
1929
1930 disp = loader_get_dispatch(commandBuffer);
1931
1932 disp->CmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex,
1933 vertexOffset, firstInstance);
1934 }
1935
1936 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1937 vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
1938 VkDeviceSize offset, uint32_t drawCount, uint32_t stride) {
1939 const VkLayerDispatchTable *disp;
1940
1941 disp = loader_get_dispatch(commandBuffer);
1942
1943 disp->CmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
1944 }
1945
1946 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1947 vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
1948 VkDeviceSize offset, uint32_t drawCount,
1949 uint32_t stride) {
1950 const VkLayerDispatchTable *disp;
1951
1952 disp = loader_get_dispatch(commandBuffer);
1953
1954 disp->CmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount,
1955 stride);
1956 }
1957
1958 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1959 vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t x, uint32_t y,
1960 uint32_t z) {
1961 const VkLayerDispatchTable *disp;
1962
1963 disp = loader_get_dispatch(commandBuffer);
1964
1965 disp->CmdDispatch(commandBuffer, x, y, z);
1966 }
1967
1968 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1969 vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer,
1970 VkDeviceSize offset) {
1971 const VkLayerDispatchTable *disp;
1972
1973 disp = loader_get_dispatch(commandBuffer);
1974
1975 disp->CmdDispatchIndirect(commandBuffer, buffer, offset);
1976 }
1977
1978 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1979 vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
1980 VkBuffer dstBuffer, uint32_t regionCount,
1981 const VkBufferCopy *pRegions) {
1982 const VkLayerDispatchTable *disp;
1983
1984 disp = loader_get_dispatch(commandBuffer);
1985
1986 disp->CmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount,
1987 pRegions);
1988 }
1989
1990 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
1991 vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage,
1992 VkImageLayout srcImageLayout, VkImage dstImage,
1993 VkImageLayout dstImageLayout, uint32_t regionCount,
1994 const VkImageCopy *pRegions) {
1995 const VkLayerDispatchTable *disp;
1996
1997 disp = loader_get_dispatch(commandBuffer);
1998
1999 disp->CmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage,
2000 dstImageLayout, regionCount, pRegions);
2001 }
2002
2003 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2004 vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage,
2005 VkImageLayout srcImageLayout, VkImage dstImage,
2006 VkImageLayout dstImageLayout, uint32_t regionCount,
2007 const VkImageBlit *pRegions, VkFilter filter) {
2008 const VkLayerDispatchTable *disp;
2009
2010 disp = loader_get_dispatch(commandBuffer);
2011
2012 disp->CmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage,
2013 dstImageLayout, regionCount, pRegions, filter);
2014 }
2015
2016 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2017 vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
2018 VkImage dstImage, VkImageLayout dstImageLayout,
2019 uint32_t regionCount,
2020 const VkBufferImageCopy *pRegions) {
2021 const VkLayerDispatchTable *disp;
2022
2023 disp = loader_get_dispatch(commandBuffer);
2024
2025 disp->CmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage,
2026 dstImageLayout, regionCount, pRegions);
2027 }
2028
2029 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2030 vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage,
2031 VkImageLayout srcImageLayout, VkBuffer dstBuffer,
2032 uint32_t regionCount,
2033 const VkBufferImageCopy *pRegions) {
2034 const VkLayerDispatchTable *disp;
2035
2036 disp = loader_get_dispatch(commandBuffer);
2037
2038 disp->CmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout,
2039 dstBuffer, regionCount, pRegions);
2040 }
2041
2042 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2043 vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
2044 VkDeviceSize dstOffset, VkDeviceSize dataSize,
2045 const void *pData) {
2046 const VkLayerDispatchTable *disp;
2047
2048 disp = loader_get_dispatch(commandBuffer);
2049
2050 disp->CmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
2051 }
2052
2053 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2054 vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer,
2055 VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) {
2056 const VkLayerDispatchTable *disp;
2057
2058 disp = loader_get_dispatch(commandBuffer);
2059
2060 disp->CmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
2061 }
2062
2063 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2064 vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image,
2065 VkImageLayout imageLayout, const VkClearColorValue *pColor,
2066 uint32_t rangeCount,
2067 const VkImageSubresourceRange *pRanges) {
2068 const VkLayerDispatchTable *disp;
2069
2070 disp = loader_get_dispatch(commandBuffer);
2071
2072 disp->CmdClearColorImage(commandBuffer, image, imageLayout, pColor,
2073 rangeCount, pRanges);
2074 }
2075
2076 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2077 vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image,
2078 VkImageLayout imageLayout,
2079 const VkClearDepthStencilValue *pDepthStencil,
2080 uint32_t rangeCount,
2081 const VkImageSubresourceRange *pRanges) {
2082 const VkLayerDispatchTable *disp;
2083
2084 disp = loader_get_dispatch(commandBuffer);
2085
2086 disp->CmdClearDepthStencilImage(commandBuffer, image, imageLayout,
2087 pDepthStencil, rangeCount, pRanges);
2088 }
2089
2090 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2091 vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount,
2092 const VkClearAttachment *pAttachments, uint32_t rectCount,
2093 const VkClearRect *pRects) {
2094 const VkLayerDispatchTable *disp;
2095
2096 disp = loader_get_dispatch(commandBuffer);
2097
2098 disp->CmdClearAttachments(commandBuffer, attachmentCount, pAttachments,
2099 rectCount, pRects);
2100 }
2101
2102 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2103 vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage,
2104 VkImageLayout srcImageLayout, VkImage dstImage,
2105 VkImageLayout dstImageLayout, uint32_t regionCount,
2106 const VkImageResolve *pRegions) {
2107 const VkLayerDispatchTable *disp;
2108
2109 disp = loader_get_dispatch(commandBuffer);
2110
2111 disp->CmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage,
2112 dstImageLayout, regionCount, pRegions);
2113 }
2114
2115 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2116 vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event,
2117 VkPipelineStageFlags stageMask) {
2118 const VkLayerDispatchTable *disp;
2119
2120 disp = loader_get_dispatch(commandBuffer);
2121
2122 disp->CmdSetEvent(commandBuffer, event, stageMask);
2123 }
2124
2125 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2126 vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event,
2127 VkPipelineStageFlags stageMask) {
2128 const VkLayerDispatchTable *disp;
2129
2130 disp = loader_get_dispatch(commandBuffer);
2131
2132 disp->CmdResetEvent(commandBuffer, event, stageMask);
2133 }
2134
2135 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2136 vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount,
2137 const VkEvent *pEvents, VkPipelineStageFlags sourceStageMask,
2138 VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount,
2139 const VkMemoryBarrier *pMemoryBarriers,
2140 uint32_t bufferMemoryBarrierCount,
2141 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2142 uint32_t imageMemoryBarrierCount,
2143 const VkImageMemoryBarrier *pImageMemoryBarriers) {
2144 const VkLayerDispatchTable *disp;
2145
2146 disp = loader_get_dispatch(commandBuffer);
2147
2148 disp->CmdWaitEvents(commandBuffer, eventCount, pEvents, sourceStageMask,
2149 dstStageMask, memoryBarrierCount, pMemoryBarriers,
2150 bufferMemoryBarrierCount, pBufferMemoryBarriers,
2151 imageMemoryBarrierCount, pImageMemoryBarriers);
2152 }
2153
2154 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(
2155 VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
2156 VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
2157 uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers,
2158 uint32_t bufferMemoryBarrierCount,
2159 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
2160 uint32_t imageMemoryBarrierCount,
2161 const VkImageMemoryBarrier *pImageMemoryBarriers) {
2162 const VkLayerDispatchTable *disp;
2163
2164 disp = loader_get_dispatch(commandBuffer);
2165
2166 disp->CmdPipelineBarrier(
2167 commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
2168 memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
2169 pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2170 }
2171
2172 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2173 vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
2174 uint32_t slot, VkFlags flags) {
2175 const VkLayerDispatchTable *disp;
2176
2177 disp = loader_get_dispatch(commandBuffer);
2178
2179 disp->CmdBeginQuery(commandBuffer, queryPool, slot, flags);
2180 }
2181
2182 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2183 vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
2184 uint32_t slot) {
2185 const VkLayerDispatchTable *disp;
2186
2187 disp = loader_get_dispatch(commandBuffer);
2188
2189 disp->CmdEndQuery(commandBuffer, queryPool, slot);
2190 }
2191
2192 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2193 vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
2194 uint32_t firstQuery, uint32_t queryCount) {
2195 const VkLayerDispatchTable *disp;
2196
2197 disp = loader_get_dispatch(commandBuffer);
2198
2199 disp->CmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
2200 }
2201
2202 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2203 vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,
2204 VkPipelineStageFlagBits pipelineStage,
2205 VkQueryPool queryPool, uint32_t slot) {
2206 const VkLayerDispatchTable *disp;
2207
2208 disp = loader_get_dispatch(commandBuffer);
2209
2210 disp->CmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, slot);
2211 }
2212
2213 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2214 vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool,
2215 uint32_t firstQuery, uint32_t queryCount,
2216 VkBuffer dstBuffer, VkDeviceSize dstOffset,
2217 VkDeviceSize stride, VkFlags flags) {
2218 const VkLayerDispatchTable *disp;
2219
2220 disp = loader_get_dispatch(commandBuffer);
2221
2222 disp->CmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery,
2223 queryCount, dstBuffer, dstOffset, stride,
2224 flags);
2225 }
2226
2227 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2228 vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout,
2229 VkShaderStageFlags stageFlags, uint32_t offset,
2230 uint32_t size, const void *pValues) {
2231 const VkLayerDispatchTable *disp;
2232
2233 disp = loader_get_dispatch(commandBuffer);
2234
2235 disp->CmdPushConstants(commandBuffer, layout, stageFlags, offset, size,
2236 pValues);
2237 }
2238
2239 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2240 vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,
2241 const VkRenderPassBeginInfo *pRenderPassBegin,
2242 VkSubpassContents contents) {
2243 const VkLayerDispatchTable *disp;
2244
2245 disp = loader_get_dispatch(commandBuffer);
2246
2247 disp->CmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
2248 }
2249
2250 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2251 vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) {
2252 const VkLayerDispatchTable *disp;
2253
2254 disp = loader_get_dispatch(commandBuffer);
2255
2256 disp->CmdNextSubpass(commandBuffer, contents);
2257 }
2258
2259 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2260 vkCmdEndRenderPass(VkCommandBuffer commandBuffer) {
2261 const VkLayerDispatchTable *disp;
2262
2263 disp = loader_get_dispatch(commandBuffer);
2264
2265 disp->CmdEndRenderPass(commandBuffer);
2266 }
2267
2268 LOADER_EXPORT VKAPI_ATTR void VKAPI_CALL
2269 vkCmdExecuteCommands(VkCommandBuffer commandBuffer,
2270 uint32_t commandBuffersCount,
2271 const VkCommandBuffer *pCommandBuffers) {
2272 const VkLayerDispatchTable *disp;
2273
2274 disp = loader_get_dispatch(commandBuffer);
2275
2276 disp->CmdExecuteCommands(commandBuffer, commandBuffersCount,
2277 pCommandBuffers);
2278 }
2279