1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "VkBuffer.hpp"
16 #include "VkBufferView.hpp"
17 #include "VkCommandBuffer.hpp"
18 #include "VkCommandPool.hpp"
19 #include "VkConfig.h"
20 #include "VkDescriptorPool.hpp"
21 #include "VkDescriptorSetLayout.hpp"
22 #include "VkDescriptorUpdateTemplate.hpp"
23 #include "VkDestroy.h"
24 #include "VkDevice.hpp"
25 #include "VkDeviceMemory.hpp"
26 #include "VkEvent.hpp"
27 #include "VkFence.hpp"
28 #include "VkFramebuffer.hpp"
29 #include "VkGetProcAddress.h"
30 #include "VkImage.hpp"
31 #include "VkImageView.hpp"
32 #include "VkInstance.hpp"
33 #include "VkPhysicalDevice.hpp"
34 #include "VkPipeline.hpp"
35 #include "VkPipelineCache.hpp"
36 #include "VkPipelineLayout.hpp"
37 #include "VkQueryPool.hpp"
38 #include "VkQueue.hpp"
39 #include "VkRenderPass.hpp"
40 #include "VkSampler.hpp"
41 #include "VkSemaphore.hpp"
42 #include "VkShaderModule.hpp"
43 #include "VkStringify.hpp"
44
45 #include "System/Debug.hpp"
46
47 #if defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_MACOS_MVK)
48 # include "WSI/MetalSurface.h"
49 #endif
50
51 #ifdef VK_USE_PLATFORM_XCB_KHR
52 # include "WSI/XcbSurfaceKHR.hpp"
53 #endif
54
55 #ifdef VK_USE_PLATFORM_XLIB_KHR
56 # include "WSI/XlibSurfaceKHR.hpp"
57 #endif
58
59 #ifdef VK_USE_PLATFORM_WIN32_KHR
60 # include "WSI/Win32SurfaceKHR.hpp"
61 #endif
62
63 #ifdef __ANDROID__
64 # include "commit.h"
65 # include "System/GrallocAndroid.hpp"
66 # include <android/log.h>
67 # include <hardware/gralloc1.h>
68 # include <sync/sync.h>
69 #endif
70
71 #include "WSI/VkSwapchainKHR.hpp"
72
73 #include "Reactor/Nucleus.hpp"
74
75 #include "marl/scheduler.h"
76 #include "marl/thread.h"
77
78 #include "System/CPUID.hpp"
79
80 #include <algorithm>
81 #include <cstring>
82 #include <map>
83 #include <string>
84
85 namespace {
86
87 // Enable commit_id.py and #include commit.h for other platforms.
88 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
logBuildVersionInformation()89 void logBuildVersionInformation()
90 {
91 // TODO(b/144093703): Don't call __android_log_print() directly
92 __android_log_print(ANDROID_LOG_INFO, "SwiftShader", "SwiftShader Version: %s", SWIFTSHADER_VERSION_STRING);
93 }
94 #endif // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
95
HasExtensionProperty(const char * extensionName,const VkExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)96 bool HasExtensionProperty(const char *extensionName, const VkExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
97 {
98 for(uint32_t j = 0; j < extensionPropertiesCount; ++j)
99 {
100 if(strcmp(extensionName, extensionProperties[j].extensionName) == 0)
101 {
102 return true;
103 }
104 }
105
106 return false;
107 }
108
109 // setReactorDefaultConfig() sets the default configuration for Vulkan's use of
110 // Reactor.
setReactorDefaultConfig()111 void setReactorDefaultConfig()
112 {
113 auto cfg = rr::Config::Edit()
114 .set(rr::Optimization::Level::Default)
115 .clearOptimizationPasses()
116 .add(rr::Optimization::Pass::ScalarReplAggregates)
117 .add(rr::Optimization::Pass::SCCP)
118 .add(rr::Optimization::Pass::CFGSimplification)
119 .add(rr::Optimization::Pass::EarlyCSEPass)
120 .add(rr::Optimization::Pass::CFGSimplification)
121 .add(rr::Optimization::Pass::InstructionCombining);
122
123 rr::Nucleus::adjustDefaultConfig(cfg);
124 }
125
setCPUDefaults()126 void setCPUDefaults()
127 {
128 sw::CPUID::setEnableSSE4_1(true);
129 sw::CPUID::setEnableSSSE3(true);
130 sw::CPUID::setEnableSSE3(true);
131 sw::CPUID::setEnableSSE2(true);
132 sw::CPUID::setEnableSSE(true);
133 }
134
getOrCreateScheduler()135 std::shared_ptr<marl::Scheduler> getOrCreateScheduler()
136 {
137 static std::mutex mutex;
138 static std::weak_ptr<marl::Scheduler> schedulerWeak;
139 std::unique_lock<std::mutex> lock(mutex);
140 auto scheduler = schedulerWeak.lock();
141 if(!scheduler)
142 {
143 scheduler = std::make_shared<marl::Scheduler>();
144 scheduler->setThreadInitializer([] {
145 sw::CPUID::setFlushToZero(true);
146 sw::CPUID::setDenormalsAreZero(true);
147 });
148 scheduler->setWorkerThreadCount(std::min<size_t>(marl::Thread::numLogicalCPUs(), 16));
149 schedulerWeak = scheduler;
150 }
151 return scheduler;
152 }
153
154 // initializeLibrary() is called by vkCreateInstance() to perform one-off global
155 // initialization of the swiftshader driver.
initializeLibrary()156 void initializeLibrary()
157 {
158 static bool doOnce = [] {
159 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
160 logBuildVersionInformation();
161 #endif // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
162 setReactorDefaultConfig();
163 setCPUDefaults();
164 return true;
165 }();
166 (void)doOnce;
167 }
168
169 template<class T>
ValidateRenderPassPNextChain(VkDevice device,const T * pCreateInfo)170 void ValidateRenderPassPNextChain(VkDevice device, const T *pCreateInfo)
171 {
172 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
173
174 while(extensionCreateInfo)
175 {
176 switch(extensionCreateInfo->sType)
177 {
178 case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
179 {
180 const VkRenderPassInputAttachmentAspectCreateInfo *inputAttachmentAspectCreateInfo = reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>(extensionCreateInfo);
181
182 for(uint32_t i = 0; i < inputAttachmentAspectCreateInfo->aspectReferenceCount; i++)
183 {
184 const auto &aspectReference = inputAttachmentAspectCreateInfo->pAspectReferences[i];
185 ASSERT(aspectReference.subpass < pCreateInfo->subpassCount);
186 const auto &subpassDescription = pCreateInfo->pSubpasses[aspectReference.subpass];
187 ASSERT(aspectReference.inputAttachmentIndex < subpassDescription.inputAttachmentCount);
188 const auto &attachmentReference = subpassDescription.pInputAttachments[aspectReference.inputAttachmentIndex];
189 if(attachmentReference.attachment != VK_ATTACHMENT_UNUSED)
190 {
191 // If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, for any
192 // element of the pInputAttachments member of any element of pSubpasses where the attachment member
193 // is not VK_ATTACHMENT_UNUSED, the aspectMask member of the corresponding element of
194 // VkRenderPassInputAttachmentAspectCreateInfo::pAspectReferences must only include aspects that are
195 // present in images of the format specified by the element of pAttachments at attachment
196 vk::Format format(pCreateInfo->pAttachments[attachmentReference.attachment].format);
197 bool isDepth = format.isDepth();
198 bool isStencil = format.isStencil();
199 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) || (!isDepth && !isStencil));
200 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) || isDepth);
201 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) || isStencil);
202 }
203 }
204 }
205 break;
206 case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
207 {
208 const VkRenderPassMultiviewCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>(extensionCreateInfo);
209 ASSERT((multiviewCreateInfo->subpassCount == 0) || (multiviewCreateInfo->subpassCount == pCreateInfo->subpassCount));
210 ASSERT((multiviewCreateInfo->dependencyCount == 0) || (multiviewCreateInfo->dependencyCount == pCreateInfo->dependencyCount));
211
212 bool zeroMask = (multiviewCreateInfo->pViewMasks[0] == 0);
213 for(uint32_t i = 1; i < multiviewCreateInfo->subpassCount; i++)
214 {
215 ASSERT((multiviewCreateInfo->pViewMasks[i] == 0) == zeroMask);
216 }
217
218 if(zeroMask)
219 {
220 ASSERT(multiviewCreateInfo->correlationMaskCount == 0);
221 }
222
223 for(uint32_t i = 0; i < multiviewCreateInfo->dependencyCount; i++)
224 {
225 const auto &dependency = pCreateInfo->pDependencies[i];
226 if(multiviewCreateInfo->pViewOffsets[i] != 0)
227 {
228 ASSERT(dependency.srcSubpass != dependency.dstSubpass);
229 ASSERT(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT);
230 }
231 if(zeroMask)
232 {
233 ASSERT(!(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT));
234 }
235 }
236
237 // If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo,
238 // each element of its pViewMask member must not include a bit at a position
239 // greater than the value of VkPhysicalDeviceLimits::maxFramebufferLayers
240 // pViewMask is a 32 bit value. If maxFramebufferLayers > 32, it's impossible
241 // for pViewMask to contain a bit at an illegal position
242 // Note: Verify pViewMask values instead if we hit this assert
243 ASSERT(vk::Cast(device)->getPhysicalDevice()->getProperties().limits.maxFramebufferLayers >= 32);
244 }
245 break;
246 default:
247 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
248 break;
249 }
250
251 extensionCreateInfo = extensionCreateInfo->pNext;
252 }
253 }
254
255 } // namespace
256
257 extern "C" {
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)258 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
259 {
260 TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
261
262 return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
263 }
264
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)265 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
266 {
267 *pSupportedVersion = 3;
268 return VK_SUCCESS;
269 }
270
271 static const VkExtensionProperties instanceExtensionProperties[] = {
272 { VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION },
273 { VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION },
274 { VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION },
275 { VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION },
276 { VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION },
277 #ifndef __ANDROID__
278 { VK_KHR_SURFACE_EXTENSION_NAME, VK_KHR_SURFACE_SPEC_VERSION },
279 #endif
280 #ifdef VK_USE_PLATFORM_XCB_KHR
281 { VK_KHR_XCB_SURFACE_EXTENSION_NAME, VK_KHR_XCB_SURFACE_SPEC_VERSION },
282 #endif
283 #ifdef VK_USE_PLATFORM_XLIB_KHR
284 { VK_KHR_XLIB_SURFACE_EXTENSION_NAME, VK_KHR_XLIB_SURFACE_SPEC_VERSION },
285 #endif
286 #ifdef VK_USE_PLATFORM_MACOS_MVK
287 { VK_MVK_MACOS_SURFACE_EXTENSION_NAME, VK_MVK_MACOS_SURFACE_SPEC_VERSION },
288 #endif
289 #ifdef VK_USE_PLATFORM_METAL_EXT
290 { VK_EXT_METAL_SURFACE_EXTENSION_NAME, VK_EXT_METAL_SURFACE_SPEC_VERSION },
291 #endif
292 #ifdef VK_USE_PLATFORM_WIN32_KHR
293 { VK_KHR_WIN32_SURFACE_EXTENSION_NAME, VK_KHR_WIN32_SURFACE_SPEC_VERSION },
294 #endif
295 };
296
297 static const VkExtensionProperties deviceExtensionProperties[] = {
298 { VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION },
299 // Vulkan 1.1 promoted extensions
300 { VK_KHR_16BIT_STORAGE_EXTENSION_NAME, VK_KHR_16BIT_STORAGE_SPEC_VERSION },
301 { VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, VK_KHR_BIND_MEMORY_2_SPEC_VERSION },
302 { VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION },
303 { VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION },
304 { VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION },
305 { VK_KHR_DEVICE_GROUP_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_SPEC_VERSION },
306 { VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_SPEC_VERSION },
307 { VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION },
308 { VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION },
309 { VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION },
310 { VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_KHR_MAINTENANCE1_SPEC_VERSION },
311 { VK_KHR_MAINTENANCE2_EXTENSION_NAME, VK_KHR_MAINTENANCE2_SPEC_VERSION },
312 { VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_KHR_MAINTENANCE3_SPEC_VERSION },
313 { VK_KHR_MULTIVIEW_EXTENSION_NAME, VK_KHR_MULTIVIEW_SPEC_VERSION },
314 { VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME, VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION },
315 { VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION },
316 { VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION },
317 // Only 1.1 core version of this is supported. The extension has additional requirements
318 //{ VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION },
319 { VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION },
320 // Only 1.1 core version of this is supported. The extension has additional requirements
321 //{ VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, VK_KHR_VARIABLE_POINTERS_SPEC_VERSION },
322 { VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION },
323 // The following extension is only used to add support for Bresenham lines
324 { VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, VK_EXT_LINE_RASTERIZATION_SPEC_VERSION },
325 // The following extension is used by ANGLE to emulate blitting the stencil buffer
326 { VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION },
327 #ifndef __ANDROID__
328 // We fully support the KHR_swapchain v70 additions, so just track the spec version.
329 { VK_KHR_SWAPCHAIN_EXTENSION_NAME, VK_KHR_SWAPCHAIN_SPEC_VERSION },
330 #else
331 // We only support V7 of this extension. Missing functionality: in V8,
332 // it becomes possible to pass a VkNativeBufferANDROID structure to
333 // vkBindImageMemory2. Android's swapchain implementation does this in
334 // order to support passing VkBindImageMemorySwapchainInfoKHR
335 // (from KHR_swapchain v70) to vkBindImageMemory2.
336 { VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME, 7 },
337 #endif
338 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
339 { VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION },
340 #endif
341 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
342 { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION },
343 #endif
344 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
345 { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION },
346 #endif
347
348 { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION },
349
350 #if VK_USE_PLATFORM_FUCHSIA
351 { VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION },
352 #endif
353 { VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, VK_EXT_PROVOKING_VERTEX_SPEC_VERSION },
354 };
355
vkCreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)356 VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance)
357 {
358 TRACE("(const VkInstanceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkInstance* pInstance = %p)",
359 pCreateInfo, pAllocator, pInstance);
360
361 initializeLibrary();
362
363 if(pCreateInfo->flags != 0)
364 {
365 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
366 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
367 }
368
369 if(pCreateInfo->enabledLayerCount != 0)
370 {
371 UNIMPLEMENTED("b/148240133: pCreateInfo->enabledLayerCount != 0"); // FIXME(b/148240133)
372 }
373
374 uint32_t extensionPropertiesCount = sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]);
375 for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
376 {
377 if(!HasExtensionProperty(pCreateInfo->ppEnabledExtensionNames[i], instanceExtensionProperties, extensionPropertiesCount))
378 {
379 return VK_ERROR_EXTENSION_NOT_PRESENT;
380 }
381 }
382
383 if(pCreateInfo->pNext)
384 {
385 const VkBaseInStructure *createInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
386 switch(createInfo->sType)
387 {
388 case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
389 // According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
390 // "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
391 // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
392 // internal use by the loader, and do not have corresponding
393 // Vulkan structures in this Specification."
394 break;
395 default:
396 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(createInfo->sType).c_str());
397 break;
398 }
399 }
400
401 *pInstance = VK_NULL_HANDLE;
402 VkPhysicalDevice physicalDevice = VK_NULL_HANDLE;
403
404 VkResult result = vk::DispatchablePhysicalDevice::Create(pAllocator, pCreateInfo, &physicalDevice);
405 if(result != VK_SUCCESS)
406 {
407 return result;
408 }
409
410 result = vk::DispatchableInstance::Create(pAllocator, pCreateInfo, pInstance, physicalDevice);
411 if(result != VK_SUCCESS)
412 {
413 vk::destroy(physicalDevice, pAllocator);
414 return result;
415 }
416
417 return result;
418 }
419
vkDestroyInstance(VkInstance instance,const VkAllocationCallbacks * pAllocator)420 VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator)
421 {
422 TRACE("(VkInstance instance = %p, const VkAllocationCallbacks* pAllocator = %p)", instance, pAllocator);
423
424 vk::destroy(instance, pAllocator);
425 }
426
vkEnumeratePhysicalDevices(VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)427 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices)
428 {
429 TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceCount = %p, VkPhysicalDevice* pPhysicalDevices = %p)",
430 instance, pPhysicalDeviceCount, pPhysicalDevices);
431
432 return vk::Cast(instance)->getPhysicalDevices(pPhysicalDeviceCount, pPhysicalDevices);
433 }
434
vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)435 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures)
436 {
437 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures* pFeatures = %p)",
438 physicalDevice, pFeatures);
439
440 *pFeatures = vk::Cast(physicalDevice)->getFeatures();
441 }
442
vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)443 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties)
444 {
445 TRACE("GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties* pFormatProperties = %p)",
446 physicalDevice, (int)format, pFormatProperties);
447
448 vk::Cast(physicalDevice)->getFormatProperties(format, pFormatProperties);
449 }
450
vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)451 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties)
452 {
453 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkImageTiling tiling = %d, VkImageUsageFlags usage = %d, VkImageCreateFlags flags = %d, VkImageFormatProperties* pImageFormatProperties = %p)",
454 physicalDevice, (int)format, (int)type, (int)tiling, usage, flags, pImageFormatProperties);
455
456 // "If the combination of parameters to vkGetPhysicalDeviceImageFormatProperties is not supported by the implementation
457 // for use in vkCreateImage, then all members of VkImageFormatProperties will be filled with zero."
458 memset(pImageFormatProperties, 0, sizeof(VkImageFormatProperties));
459
460 VkFormatProperties properties;
461 vk::Cast(physicalDevice)->getFormatProperties(format, &properties);
462
463 VkFormatFeatureFlags features;
464 switch(tiling)
465 {
466 case VK_IMAGE_TILING_LINEAR:
467 features = properties.linearTilingFeatures;
468 break;
469
470 case VK_IMAGE_TILING_OPTIMAL:
471 features = properties.optimalTilingFeatures;
472 break;
473
474 default:
475 UNSUPPORTED("VkImageTiling %d", int(tiling));
476 features = 0;
477 }
478
479 if(features == 0)
480 {
481 return VK_ERROR_FORMAT_NOT_SUPPORTED;
482 }
483
484 // Check for usage conflict with features
485 if((usage & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT))
486 {
487 return VK_ERROR_FORMAT_NOT_SUPPORTED;
488 }
489
490 if((usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
491 {
492 return VK_ERROR_FORMAT_NOT_SUPPORTED;
493 }
494
495 if((usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
496 {
497 return VK_ERROR_FORMAT_NOT_SUPPORTED;
498 }
499
500 if((usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
501 {
502 return VK_ERROR_FORMAT_NOT_SUPPORTED;
503 }
504
505 if((usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) && !(features & (VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)))
506 {
507 return VK_ERROR_FORMAT_NOT_SUPPORTED;
508 }
509
510 if((usage & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT))
511 {
512 return VK_ERROR_FORMAT_NOT_SUPPORTED;
513 }
514
515 if((usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_DST_BIT))
516 {
517 return VK_ERROR_FORMAT_NOT_SUPPORTED;
518 }
519
520 auto allRecognizedUsageBits = VK_IMAGE_USAGE_SAMPLED_BIT |
521 VK_IMAGE_USAGE_STORAGE_BIT |
522 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
523 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
524 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
525 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
526 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
527 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
528 ASSERT(!(usage & ~(allRecognizedUsageBits)));
529
530 // "Images created with tiling equal to VK_IMAGE_TILING_LINEAR have further restrictions on their limits and capabilities
531 // compared to images created with tiling equal to VK_IMAGE_TILING_OPTIMAL."
532 if(tiling == VK_IMAGE_TILING_LINEAR)
533 {
534 if(type != VK_IMAGE_TYPE_2D)
535 {
536 return VK_ERROR_FORMAT_NOT_SUPPORTED;
537 }
538
539 if(vk::Format(format).isDepth() || vk::Format(format).isStencil())
540 {
541 return VK_ERROR_FORMAT_NOT_SUPPORTED;
542 }
543 }
544
545 // "Images created with a format from one of those listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views
546 // have further restrictions on their limits and capabilities compared to images created with other formats."
547 if(vk::Format(format).isYcbcrFormat())
548 {
549 if(type != VK_IMAGE_TYPE_2D)
550 {
551 return VK_ERROR_FORMAT_NOT_SUPPORTED;
552 }
553 }
554
555 vk::Cast(physicalDevice)->getImageFormatProperties(format, type, tiling, usage, flags, pImageFormatProperties);
556
557 return VK_SUCCESS;
558 }
559
vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)560 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties)
561 {
562 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties* pProperties = %p)",
563 physicalDevice, pProperties);
564
565 *pProperties = vk::Cast(physicalDevice)->getProperties();
566 }
567
vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)568 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties)
569 {
570 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties* pQueueFamilyProperties = %p))", physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
571
572 if(!pQueueFamilyProperties)
573 {
574 *pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
575 }
576 else
577 {
578 vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
579 }
580 }
581
vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)582 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties)
583 {
584 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
585
586 *pMemoryProperties = vk::Cast(physicalDevice)->getMemoryProperties();
587 }
588
vkGetInstanceProcAddr(VkInstance instance,const char * pName)589 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *pName)
590 {
591 TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
592
593 return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
594 }
595
vkGetDeviceProcAddr(VkDevice device,const char * pName)596 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char *pName)
597 {
598 TRACE("(VkDevice device = %p, const char* pName = %p)", device, pName);
599
600 return vk::GetDeviceProcAddr(vk::Cast(device), pName);
601 }
602
vkCreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)603 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice)
604 {
605 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkDeviceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDevice* pDevice = %p)",
606 physicalDevice, pCreateInfo, pAllocator, pDevice);
607
608 if(pCreateInfo->flags != 0)
609 {
610 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
611 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
612 }
613
614 if(pCreateInfo->enabledLayerCount != 0)
615 {
616 // "The ppEnabledLayerNames and enabledLayerCount members of VkDeviceCreateInfo are deprecated and their values must be ignored by implementations."
617 UNSUPPORTED("pCreateInfo->enabledLayerCount != 0");
618 }
619
620 uint32_t extensionPropertiesCount = sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]);
621 for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
622 {
623 if(!HasExtensionProperty(pCreateInfo->ppEnabledExtensionNames[i], deviceExtensionProperties, extensionPropertiesCount))
624 {
625 return VK_ERROR_EXTENSION_NOT_PRESENT;
626 }
627 }
628
629 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
630
631 const VkPhysicalDeviceFeatures *enabledFeatures = pCreateInfo->pEnabledFeatures;
632
633 while(extensionCreateInfo)
634 {
635 // Casting to a long since some structures, such as
636 // VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT
637 // are not enumerated in the official Vulkan header
638 switch((long)(extensionCreateInfo->sType))
639 {
640 case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
641 // According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
642 // "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
643 // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
644 // internal use by the loader, and do not have corresponding
645 // Vulkan structures in this Specification."
646 break;
647 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
648 {
649 ASSERT(!pCreateInfo->pEnabledFeatures); // "If the pNext chain includes a VkPhysicalDeviceFeatures2 structure, then pEnabledFeatures must be NULL"
650
651 const VkPhysicalDeviceFeatures2 *physicalDeviceFeatures2 = reinterpret_cast<const VkPhysicalDeviceFeatures2 *>(extensionCreateInfo);
652
653 enabledFeatures = &physicalDeviceFeatures2->features;
654 }
655 break;
656 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
657 {
658 const VkPhysicalDeviceSamplerYcbcrConversionFeatures *samplerYcbcrConversionFeatures = reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(extensionCreateInfo);
659
660 // YCbCr conversion is supported.
661 // samplerYcbcrConversionFeatures->samplerYcbcrConversion can be VK_TRUE or VK_FALSE.
662 // No action needs to be taken on our end in either case; it's the apps responsibility that
663 // "To create a sampler Y'CbCr conversion, the samplerYcbcrConversion feature must be enabled."
664 (void)samplerYcbcrConversionFeatures->samplerYcbcrConversion;
665 }
666 break;
667 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
668 {
669 const VkPhysicalDevice16BitStorageFeatures *storage16BitFeatures = reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>(extensionCreateInfo);
670
671 if(storage16BitFeatures->storageBuffer16BitAccess != VK_FALSE ||
672 storage16BitFeatures->uniformAndStorageBuffer16BitAccess != VK_FALSE ||
673 storage16BitFeatures->storagePushConstant16 != VK_FALSE ||
674 storage16BitFeatures->storageInputOutput16 != VK_FALSE)
675 {
676 return VK_ERROR_FEATURE_NOT_PRESENT;
677 }
678 }
679 break;
680 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES:
681 {
682 const VkPhysicalDeviceVariablePointerFeatures *variablePointerFeatures = reinterpret_cast<const VkPhysicalDeviceVariablePointerFeatures *>(extensionCreateInfo);
683
684 if(variablePointerFeatures->variablePointersStorageBuffer != VK_FALSE ||
685 variablePointerFeatures->variablePointers != VK_FALSE)
686 {
687 return VK_ERROR_FEATURE_NOT_PRESENT;
688 }
689 }
690 break;
691 case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
692 {
693 const VkDeviceGroupDeviceCreateInfo *groupDeviceCreateInfo = reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>(extensionCreateInfo);
694
695 if((groupDeviceCreateInfo->physicalDeviceCount != 1) ||
696 (groupDeviceCreateInfo->pPhysicalDevices[0] != physicalDevice))
697 {
698 return VK_ERROR_FEATURE_NOT_PRESENT;
699 }
700 }
701 break;
702 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
703 {
704 const VkPhysicalDeviceMultiviewFeatures *multiviewFeatures = reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>(extensionCreateInfo);
705
706 if(multiviewFeatures->multiviewGeometryShader ||
707 multiviewFeatures->multiviewTessellationShader)
708 {
709 return VK_ERROR_FEATURE_NOT_PRESENT;
710 }
711 }
712 break;
713 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
714 {
715 const VkPhysicalDeviceShaderDrawParametersFeatures *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>(extensionCreateInfo);
716
717 if(shaderDrawParametersFeatures->shaderDrawParameters)
718 {
719 return VK_ERROR_FEATURE_NOT_PRESENT;
720 }
721 }
722 break;
723 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR:
724 {
725 const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(extensionCreateInfo);
726
727 // Separate depth and stencil layouts is already supported
728 (void)(shaderDrawParametersFeatures->separateDepthStencilLayouts);
729 }
730 break;
731 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
732 {
733 const VkPhysicalDeviceLineRasterizationFeaturesEXT *lineRasterizationFeatures = reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>(extensionCreateInfo);
734 if((lineRasterizationFeatures->smoothLines != VK_FALSE) ||
735 (lineRasterizationFeatures->stippledBresenhamLines != VK_FALSE) ||
736 (lineRasterizationFeatures->stippledRectangularLines != VK_FALSE) ||
737 (lineRasterizationFeatures->stippledSmoothLines != VK_FALSE))
738 {
739 return VK_ERROR_FEATURE_NOT_PRESENT;
740 }
741 }
742 break;
743 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT:
744 {
745 const VkPhysicalDeviceProvokingVertexFeaturesEXT *provokingVertexFeatures = reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>(extensionCreateInfo);
746
747 // Provoking vertex is supported.
748 // provokingVertexFeatures->provokingVertexLast can be VK_TRUE or VK_FALSE.
749 // No action needs to be taken on our end in either case; it's the apps responsibility to check
750 // that the provokingVertexLast feature is enabled before using the provoking vertex convention.
751 (void)provokingVertexFeatures->provokingVertexLast;
752 }
753 break;
754 default:
755 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
756 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
757 break;
758 }
759
760 extensionCreateInfo = extensionCreateInfo->pNext;
761 }
762
763 ASSERT(pCreateInfo->queueCreateInfoCount > 0);
764
765 if(enabledFeatures)
766 {
767 if(!vk::Cast(physicalDevice)->hasFeatures(*enabledFeatures))
768 {
769 return VK_ERROR_FEATURE_NOT_PRESENT;
770 }
771 }
772
773 uint32_t queueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
774
775 for(uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++)
776 {
777 const VkDeviceQueueCreateInfo &queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
778 if(queueCreateInfo.flags != 0)
779 {
780 UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d]->flags %d", i, queueCreateInfo.flags);
781 }
782
783 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(queueCreateInfo.pNext);
784 while(extInfo)
785 {
786 LOG_TRAP("pCreateInfo->pQueueCreateInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
787 extInfo = extInfo->pNext;
788 }
789
790 ASSERT(queueCreateInfo.queueFamilyIndex < queueFamilyPropertyCount);
791 (void)queueFamilyPropertyCount; // Silence unused variable warning
792 }
793
794 auto scheduler = getOrCreateScheduler();
795 return vk::DispatchableDevice::Create(pAllocator, pCreateInfo, pDevice, vk::Cast(physicalDevice), enabledFeatures, scheduler);
796 }
797
vkDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)798 VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator)
799 {
800 TRACE("(VkDevice device = %p, const VkAllocationCallbacks* pAllocator = %p)", device, pAllocator);
801
802 vk::destroy(device, pAllocator);
803 }
804
vkEnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)805 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
806 {
807 TRACE("(const char* pLayerName = %p, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)",
808 pLayerName, pPropertyCount, pProperties);
809
810 uint32_t extensionPropertiesCount = sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]);
811
812 if(!pProperties)
813 {
814 *pPropertyCount = extensionPropertiesCount;
815 return VK_SUCCESS;
816 }
817
818 auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
819 for(uint32_t i = 0; i < toCopy; i++)
820 {
821 pProperties[i] = instanceExtensionProperties[i];
822 }
823
824 *pPropertyCount = toCopy;
825 return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
826 }
827
vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)828 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
829 {
830 TRACE("(VkPhysicalDevice physicalDevice = %p, const char* pLayerName, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
831
832 uint32_t extensionPropertiesCount = sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]);
833
834 if(!pProperties)
835 {
836 *pPropertyCount = extensionPropertiesCount;
837 return VK_SUCCESS;
838 }
839
840 auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
841 for(uint32_t i = 0; i < toCopy; i++)
842 {
843 pProperties[i] = deviceExtensionProperties[i];
844 }
845
846 *pPropertyCount = toCopy;
847 return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
848 }
849
vkEnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)850 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties)
851 {
852 TRACE("(uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", pPropertyCount, pProperties);
853
854 if(!pProperties)
855 {
856 *pPropertyCount = 0;
857 return VK_SUCCESS;
858 }
859
860 return VK_SUCCESS;
861 }
862
vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)863 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkLayerProperties *pProperties)
864 {
865 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
866
867 if(!pProperties)
868 {
869 *pPropertyCount = 0;
870 return VK_SUCCESS;
871 }
872
873 return VK_SUCCESS;
874 }
875
vkGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)876 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue)
877 {
878 TRACE("(VkDevice device = %p, uint32_t queueFamilyIndex = %d, uint32_t queueIndex = %d, VkQueue* pQueue = %p)",
879 device, queueFamilyIndex, queueIndex, pQueue);
880
881 *pQueue = vk::Cast(device)->getQueue(queueFamilyIndex, queueIndex);
882 }
883
vkQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)884 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence)
885 {
886 TRACE("(VkQueue queue = %p, uint32_t submitCount = %d, const VkSubmitInfo* pSubmits = %p, VkFence fence = %p)",
887 queue, submitCount, pSubmits, static_cast<void *>(fence));
888
889 return vk::Cast(queue)->submit(submitCount, pSubmits, vk::Cast(fence));
890 }
891
vkQueueWaitIdle(VkQueue queue)892 VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue)
893 {
894 TRACE("(VkQueue queue = %p)", queue);
895
896 return vk::Cast(queue)->waitIdle();
897 }
898
vkDeviceWaitIdle(VkDevice device)899 VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device)
900 {
901 TRACE("(VkDevice device = %p)", device);
902
903 return vk::Cast(device)->waitIdle();
904 }
905
vkAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)906 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory)
907 {
908 TRACE("(VkDevice device = %p, const VkMemoryAllocateInfo* pAllocateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDeviceMemory* pMemory = %p)",
909 device, pAllocateInfo, pAllocator, pMemory);
910
911 const VkBaseInStructure *allocationInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
912 while(allocationInfo)
913 {
914 switch(allocationInfo->sType)
915 {
916 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
917 // This can safely be ignored, as the Vulkan spec mentions:
918 // "If the pNext chain includes a VkMemoryDedicatedAllocateInfo structure, then that structure
919 // includes a handle of the sole buffer or image resource that the memory *can* be bound to."
920 break;
921 case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
922 // This extension controls on which physical devices the memory gets allocated.
923 // SwiftShader only has a single physical device, so this extension does nothing in this case.
924 break;
925 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
926 case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
927 {
928 auto *importInfo = reinterpret_cast<const VkImportMemoryFdInfoKHR *>(allocationInfo);
929 if(importInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
930 {
931 UNSUPPORTED("importInfo->handleType %u", importInfo->handleType);
932 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
933 }
934 break;
935 }
936 #endif // SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
937 case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
938 {
939 auto *exportInfo = reinterpret_cast<const VkExportMemoryAllocateInfo *>(allocationInfo);
940 switch(exportInfo->handleTypes)
941 {
942 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
943 case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
944 break;
945 #endif
946 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
947 case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
948 break;
949 #endif
950 default:
951 UNSUPPORTED("exportInfo->handleTypes %u", exportInfo->handleTypes);
952 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
953 }
954 break;
955 }
956 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
957 case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
958 // Ignore
959 break;
960 #endif // SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
961 case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
962 {
963 auto *importInfo = reinterpret_cast<const VkImportMemoryHostPointerInfoEXT *>(allocationInfo);
964 if(importInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT && importInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT)
965 {
966 UNSUPPORTED("importInfo->handleType %u", importInfo->handleType);
967 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
968 }
969 break;
970 }
971 default:
972 LOG_TRAP("pAllocateInfo->pNext sType = %s", vk::Stringify(allocationInfo->sType).c_str());
973 break;
974 }
975
976 allocationInfo = allocationInfo->pNext;
977 }
978
979 VkResult result = vk::DeviceMemory::Create(pAllocator, pAllocateInfo, pMemory);
980 if(result != VK_SUCCESS)
981 {
982 return result;
983 }
984
985 // Make sure the memory allocation is done now so that OOM errors can be checked now
986 result = vk::Cast(*pMemory)->allocate();
987 if(result != VK_SUCCESS)
988 {
989 vk::destroy(*pMemory, pAllocator);
990 *pMemory = VK_NULL_HANDLE;
991 }
992
993 return result;
994 }
995
vkFreeMemory(VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)996 VKAPI_ATTR void VKAPI_CALL vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator)
997 {
998 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, const VkAllocationCallbacks* pAllocator = %p)",
999 device, static_cast<void *>(memory), pAllocator);
1000
1001 vk::destroy(memory, pAllocator);
1002 }
1003
1004 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
vkGetMemoryFdKHR(VkDevice device,const VkMemoryGetFdInfoKHR * getFdInfo,int * pFd)1005 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR *getFdInfo, int *pFd)
1006 {
1007 TRACE("(VkDevice device = %p, const VkMemoryGetFdInfoKHR* getFdInfo = %p, int* pFd = %p",
1008 device, getFdInfo, pFd);
1009
1010 if(getFdInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1011 {
1012 UNSUPPORTED("pGetFdInfo->handleType %u", getFdInfo->handleType);
1013 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1014 }
1015 return vk::Cast(getFdInfo->memory)->exportFd(pFd);
1016 }
1017
vkGetMemoryFdPropertiesKHR(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)1018 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR *pMemoryFdProperties)
1019 {
1020 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, int fd = %d, VkMemoryFdPropertiesKHR* pMemoryFdProperties = %p)",
1021 device, handleType, fd, pMemoryFdProperties);
1022
1023 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1024 {
1025 UNSUPPORTED("handleType %u", handleType);
1026 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1027 }
1028
1029 if(fd < 0)
1030 {
1031 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1032 }
1033
1034 const VkPhysicalDeviceMemoryProperties &memoryProperties =
1035 vk::Cast(device)->getPhysicalDevice()->getMemoryProperties();
1036
1037 // All SwiftShader memory types support this!
1038 pMemoryFdProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1039
1040 return VK_SUCCESS;
1041 }
1042 #endif // SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
1043
vkGetMemoryHostPointerPropertiesEXT(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties)1044 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void *pHostPointer, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties)
1045 {
1046 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, const void *pHostPointer = %p, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties = %p)",
1047 device, handleType, pHostPointer, pMemoryHostPointerProperties);
1048
1049 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT && handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT)
1050 {
1051 UNSUPPORTED("handleType %u", handleType);
1052 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1053 }
1054 pMemoryHostPointerProperties->memoryTypeBits = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1055
1056 return VK_SUCCESS;
1057 }
1058
1059 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1060 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo, struct AHardwareBuffer **pBuffer)
1061 {
1062 TRACE("(VkDevice device = %p, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo = %p, struct AHardwareBuffer **pBuffer = %p)",
1063 device, pInfo, pBuffer);
1064
1065 return vk::Cast(pInfo->memory)->exportAhb(pBuffer);
1066 }
1067
vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)1068 VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties)
1069 {
1070 TRACE("(VkDevice device = %p, const struct AHardwareBuffer *buffer = %p, VkAndroidHardwareBufferPropertiesANDROID *pProperties = %p)",
1071 device, buffer, pProperties);
1072
1073 return vk::DeviceMemory::getAhbProperties(buffer, pProperties);
1074 }
1075 #endif // SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
1076
vkMapMemory(VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1077 VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData)
1078 {
1079 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize offset = %d, VkDeviceSize size = %d, VkMemoryMapFlags flags = %d, void** ppData = %p)",
1080 device, static_cast<void *>(memory), int(offset), int(size), flags, ppData);
1081
1082 if(flags != 0)
1083 {
1084 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1085 UNSUPPORTED("flags %d", int(flags));
1086 }
1087
1088 return vk::Cast(memory)->map(offset, size, ppData);
1089 }
1090
vkUnmapMemory(VkDevice device,VkDeviceMemory memory)1091 VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(VkDevice device, VkDeviceMemory memory)
1092 {
1093 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p)", device, static_cast<void *>(memory));
1094
1095 // Noop, memory will be released when the DeviceMemory object is released
1096 }
1097
vkFlushMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1098 VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1099 {
1100 TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1101 device, memoryRangeCount, pMemoryRanges);
1102
1103 // Noop, host and device memory are the same to SwiftShader
1104
1105 return VK_SUCCESS;
1106 }
1107
vkInvalidateMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1108 VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1109 {
1110 TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1111 device, memoryRangeCount, pMemoryRanges);
1112
1113 // Noop, host and device memory are the same to SwiftShader
1114
1115 return VK_SUCCESS;
1116 }
1117
vkGetDeviceMemoryCommitment(VkDevice pDevice,VkDeviceMemory pMemory,VkDeviceSize * pCommittedMemoryInBytes)1118 VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(VkDevice pDevice, VkDeviceMemory pMemory, VkDeviceSize *pCommittedMemoryInBytes)
1119 {
1120 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize* pCommittedMemoryInBytes = %p)",
1121 pDevice, static_cast<void *>(pMemory), pCommittedMemoryInBytes);
1122
1123 auto memory = vk::Cast(pMemory);
1124
1125 #if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
1126 const auto &memoryProperties = vk::Cast(pDevice)->getPhysicalDevice()->getMemoryProperties();
1127 uint32_t typeIndex = memory->getMemoryTypeIndex();
1128 ASSERT(typeIndex < memoryProperties.memoryTypeCount);
1129 ASSERT(memoryProperties.memoryTypes[typeIndex].propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
1130 #endif
1131
1132 *pCommittedMemoryInBytes = memory->getCommittedMemoryInBytes();
1133 }
1134
vkBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1135 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1136 {
1137 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1138 device, static_cast<void *>(buffer), static_cast<void *>(memory), int(memoryOffset));
1139
1140 if(!vk::Cast(buffer)->canBindToMemory(vk::Cast(memory)))
1141 {
1142 UNSUPPORTED("vkBindBufferMemory with invalid external memory");
1143 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1144 }
1145 vk::Cast(buffer)->bind(vk::Cast(memory), memoryOffset);
1146 return VK_SUCCESS;
1147 }
1148
vkBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)1149 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1150 {
1151 TRACE("(VkDevice device = %p, VkImage image = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1152 device, static_cast<void *>(image), static_cast<void *>(memory), int(memoryOffset));
1153
1154 if(!vk::Cast(image)->canBindToMemory(vk::Cast(memory)))
1155 {
1156 UNSUPPORTED("vkBindImageMemory with invalid external memory");
1157 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1158 }
1159 vk::Cast(image)->bind(vk::Cast(memory), memoryOffset);
1160 return VK_SUCCESS;
1161 }
1162
vkGetBufferMemoryRequirements(VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)1163 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements)
1164 {
1165 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1166 device, static_cast<void *>(buffer), pMemoryRequirements);
1167
1168 *pMemoryRequirements = vk::Cast(buffer)->getMemoryRequirements();
1169 }
1170
vkGetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)1171 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements)
1172 {
1173 TRACE("(VkDevice device = %p, VkImage image = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1174 device, static_cast<void *>(image), pMemoryRequirements);
1175
1176 *pMemoryRequirements = vk::Cast(image)->getMemoryRequirements();
1177 }
1178
vkGetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1179 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
1180 {
1181 TRACE("(VkDevice device = %p, VkImage image = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements* pSparseMemoryRequirements = %p)",
1182 device, static_cast<void *>(image), pSparseMemoryRequirementCount, pSparseMemoryRequirements);
1183
1184 // The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
1185 // "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
1186 *pSparseMemoryRequirementCount = 0;
1187 }
1188
vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkSampleCountFlagBits samples,VkImageUsageFlags usage,VkImageTiling tiling,uint32_t * pPropertyCount,VkSparseImageFormatProperties * pProperties)1189 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties)
1190 {
1191 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkSampleCountFlagBits samples = %d, VkImageUsageFlags usage = %d, VkImageTiling tiling = %d, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties* pProperties = %p)",
1192 physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
1193
1194 // We do not support sparse images.
1195 *pPropertyCount = 0;
1196 }
1197
vkQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1198 VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence)
1199 {
1200 TRACE("()");
1201 UNSUPPORTED("vkQueueBindSparse");
1202 return VK_SUCCESS;
1203 }
1204
vkCreateFence(VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)1205 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence)
1206 {
1207 TRACE("(VkDevice device = %p, const VkFenceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFence* pFence = %p)",
1208 device, pCreateInfo, pAllocator, pFence);
1209
1210 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1211 while(nextInfo)
1212 {
1213 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1214 nextInfo = nextInfo->pNext;
1215 }
1216
1217 return vk::Fence::Create(pAllocator, pCreateInfo, pFence);
1218 }
1219
vkDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)1220 VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator)
1221 {
1222 TRACE("(VkDevice device = %p, VkFence fence = %p, const VkAllocationCallbacks* pAllocator = %p)",
1223 device, static_cast<void *>(fence), pAllocator);
1224
1225 vk::destroy(fence, pAllocator);
1226 }
1227
vkResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)1228 VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences)
1229 {
1230 TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p)",
1231 device, fenceCount, pFences);
1232
1233 for(uint32_t i = 0; i < fenceCount; i++)
1234 {
1235 vk::Cast(pFences[i])->reset();
1236 }
1237
1238 return VK_SUCCESS;
1239 }
1240
vkGetFenceStatus(VkDevice device,VkFence fence)1241 VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence)
1242 {
1243 TRACE("(VkDevice device = %p, VkFence fence = %p)", device, static_cast<void *>(fence));
1244
1245 return vk::Cast(fence)->getStatus();
1246 }
1247
vkWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)1248 VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout)
1249 {
1250 TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p, VkBool32 waitAll = %d, uint64_t timeout = %d)",
1251 device, int(fenceCount), pFences, int(waitAll), int(timeout));
1252
1253 return vk::Cast(device)->waitForFences(fenceCount, pFences, waitAll, timeout);
1254 }
1255
vkCreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)1256 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore)
1257 {
1258 TRACE("(VkDevice device = %p, const VkSemaphoreCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSemaphore* pSemaphore = %p)",
1259 device, pCreateInfo, pAllocator, pSemaphore);
1260
1261 if(pCreateInfo->flags != 0)
1262 {
1263 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1264 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1265 }
1266
1267 return vk::Semaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1268 }
1269
vkDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)1270 VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator)
1271 {
1272 TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, const VkAllocationCallbacks* pAllocator = %p)",
1273 device, static_cast<void *>(semaphore), pAllocator);
1274
1275 vk::destroy(semaphore, pAllocator);
1276 }
1277
1278 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
vkGetSemaphoreFdKHR(VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)1279 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd)
1280 {
1281 TRACE("(VkDevice device = %p, const VkSemaphoreGetFdInfoKHR* pGetFdInfo = %p, int* pFd = %p)",
1282 device, static_cast<const void *>(pGetFdInfo), static_cast<void *>(pFd));
1283
1284 if(pGetFdInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1285 {
1286 UNSUPPORTED("pGetFdInfo->handleType %d", int(pGetFdInfo->handleType));
1287 }
1288
1289 return vk::Cast(pGetFdInfo->semaphore)->exportFd(pFd);
1290 }
1291
vkImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreInfo)1292 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreInfo)
1293 {
1294 TRACE("(VkDevice device = %p, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreInfo = %p",
1295 device, static_cast<const void *>(pImportSemaphoreInfo));
1296
1297 if(pImportSemaphoreInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1298 {
1299 UNSUPPORTED("pImportSemaphoreInfo->handleType %d", int(pImportSemaphoreInfo->handleType));
1300 }
1301 bool temporaryImport = (pImportSemaphoreInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1302
1303 return vk::Cast(pImportSemaphoreInfo->semaphore)->importFd(pImportSemaphoreInfo->fd, temporaryImport);
1304 }
1305 #endif // SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
1306
1307 #if VK_USE_PLATFORM_FUCHSIA
vkImportSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo)1308 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA(
1309 VkDevice device,
1310 const VkImportSemaphoreZirconHandleInfoFUCHSIA *pImportSemaphoreZirconHandleInfo)
1311 {
1312 TRACE("(VkDevice device = %p, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo = %p)",
1313 device, pImportSemaphoreZirconHandleInfo);
1314
1315 if(pImportSemaphoreZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA)
1316 {
1317 UNSUPPORTED("pImportSemaphoreZirconHandleInfo->handleType %d", int(pImportSemaphoreZirconHandleInfo->handleType));
1318 }
1319 bool temporaryImport = (pImportSemaphoreZirconHandleInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1320
1321 return vk::Cast(pImportSemaphoreZirconHandleInfo->semaphore)->importHandle(pImportSemaphoreZirconHandleInfo->handle, temporaryImport);
1322 }
1323
vkGetSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle)1324 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA(
1325 VkDevice device,
1326 const VkSemaphoreGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo,
1327 zx_handle_t *pZirconHandle)
1328 {
1329 TRACE("(VkDevice device = %p, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo = %p, zx_handle_t* pZirconHandle = %p)",
1330 device, static_cast<const void *>(pGetZirconHandleInfo), static_cast<void *>(pZirconHandle));
1331
1332 if(pGetZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA)
1333 {
1334 UNSUPPORTED("pGetZirconHandleInfo->handleType %d", int(pGetZirconHandleInfo->handleType));
1335 }
1336
1337 return vk::Cast(pGetZirconHandleInfo->semaphore)->exportHandle(pZirconHandle);
1338 }
1339 #endif // VK_USE_PLATFORM_FUCHSIA
1340
vkCreateEvent(VkDevice device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)1341 VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkEvent *pEvent)
1342 {
1343 TRACE("(VkDevice device = %p, const VkEventCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkEvent* pEvent = %p)",
1344 device, pCreateInfo, pAllocator, pEvent);
1345
1346 if(pCreateInfo->flags != 0)
1347 {
1348 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1349 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1350 }
1351
1352 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1353 while(extInfo)
1354 {
1355 // Vulkan 1.2: "pNext must be NULL"
1356 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1357 extInfo = extInfo->pNext;
1358 }
1359
1360 return vk::Event::Create(pAllocator, pCreateInfo, pEvent);
1361 }
1362
vkDestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * pAllocator)1363 VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator)
1364 {
1365 TRACE("(VkDevice device = %p, VkEvent event = %p, const VkAllocationCallbacks* pAllocator = %p)",
1366 device, static_cast<void *>(event), pAllocator);
1367
1368 vk::destroy(event, pAllocator);
1369 }
1370
vkGetEventStatus(VkDevice device,VkEvent event)1371 VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(VkDevice device, VkEvent event)
1372 {
1373 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1374
1375 return vk::Cast(event)->getStatus();
1376 }
1377
vkSetEvent(VkDevice device,VkEvent event)1378 VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event)
1379 {
1380 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1381
1382 vk::Cast(event)->signal();
1383
1384 return VK_SUCCESS;
1385 }
1386
vkResetEvent(VkDevice device,VkEvent event)1387 VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(VkDevice device, VkEvent event)
1388 {
1389 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1390
1391 vk::Cast(event)->reset();
1392
1393 return VK_SUCCESS;
1394 }
1395
vkCreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkQueryPool * pQueryPool)1396 VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool)
1397 {
1398 TRACE("(VkDevice device = %p, const VkQueryPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkQueryPool* pQueryPool = %p)",
1399 device, pCreateInfo, pAllocator, pQueryPool);
1400
1401 if(pCreateInfo->flags != 0)
1402 {
1403 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1404 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1405 }
1406
1407 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1408 while(extInfo)
1409 {
1410 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1411 extInfo = extInfo->pNext;
1412 }
1413
1414 return vk::QueryPool::Create(pAllocator, pCreateInfo, pQueryPool);
1415 }
1416
vkDestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * pAllocator)1417 VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator)
1418 {
1419 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
1420 device, static_cast<void *>(queryPool), pAllocator);
1421
1422 vk::destroy(queryPool, pAllocator);
1423 }
1424
vkGetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)1425 VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags)
1426 {
1427 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, size_t dataSize = %d, void* pData = %p, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
1428 device, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), int(dataSize), pData, int(stride), flags);
1429
1430 return vk::Cast(queryPool)->getResults(firstQuery, queryCount, dataSize, pData, stride, flags);
1431 }
1432
vkCreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1433 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer)
1434 {
1435 TRACE("(VkDevice device = %p, const VkBufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBuffer* pBuffer = %p)",
1436 device, pCreateInfo, pAllocator, pBuffer);
1437
1438 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1439 while(nextInfo)
1440 {
1441 switch(nextInfo->sType)
1442 {
1443 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1444 // Do nothing. Should be handled by vk::Buffer::Create().
1445 break;
1446 default:
1447 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1448 break;
1449 }
1450 nextInfo = nextInfo->pNext;
1451 }
1452
1453 return vk::Buffer::Create(pAllocator, pCreateInfo, pBuffer);
1454 }
1455
vkDestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)1456 VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator)
1457 {
1458 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
1459 device, static_cast<void *>(buffer), pAllocator);
1460
1461 vk::destroy(buffer, pAllocator);
1462 }
1463
vkCreateBufferView(VkDevice device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView)1464 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBufferView *pView)
1465 {
1466 TRACE("(VkDevice device = %p, const VkBufferViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBufferView* pView = %p)",
1467 device, pCreateInfo, pAllocator, pView);
1468
1469 if(pCreateInfo->flags != 0)
1470 {
1471 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1472 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1473 }
1474
1475 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1476 while(extInfo)
1477 {
1478 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1479 extInfo = extInfo->pNext;
1480 }
1481
1482 return vk::BufferView::Create(pAllocator, pCreateInfo, pView);
1483 }
1484
vkDestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)1485 VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator)
1486 {
1487 TRACE("(VkDevice device = %p, VkBufferView bufferView = %p, const VkAllocationCallbacks* pAllocator = %p)",
1488 device, static_cast<void *>(bufferView), pAllocator);
1489
1490 vk::destroy(bufferView, pAllocator);
1491 }
1492
vkCreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)1493 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImage *pImage)
1494 {
1495 TRACE("(VkDevice device = %p, const VkImageCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImage* pImage = %p)",
1496 device, pCreateInfo, pAllocator, pImage);
1497
1498 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1499
1500 #ifdef __ANDROID__
1501 vk::BackingMemory backmem;
1502 bool swapchainImage = false;
1503 #endif
1504
1505 while(extensionCreateInfo)
1506 {
1507 switch((long)(extensionCreateInfo->sType))
1508 {
1509 #ifdef __ANDROID__
1510 case VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID:
1511 {
1512 const VkSwapchainImageCreateInfoANDROID *swapImageCreateInfo = reinterpret_cast<const VkSwapchainImageCreateInfoANDROID *>(extensionCreateInfo);
1513 backmem.androidUsage = swapImageCreateInfo->usage;
1514 }
1515 break;
1516 case VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID:
1517 {
1518 const VkNativeBufferANDROID *nativeBufferInfo = reinterpret_cast<const VkNativeBufferANDROID *>(extensionCreateInfo);
1519 backmem.nativeHandle = nativeBufferInfo->handle;
1520 backmem.stride = nativeBufferInfo->stride;
1521 swapchainImage = true;
1522 }
1523 break;
1524 #endif
1525 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
1526 // Do nothing. Should be handled by vk::Image::Create()
1527 break;
1528 case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
1529 /* Do nothing. We don't actually need the swapchain handle yet; we'll do all the work in vkBindImageMemory2. */
1530 break;
1531 default:
1532 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
1533 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1534 break;
1535 }
1536
1537 extensionCreateInfo = extensionCreateInfo->pNext;
1538 }
1539
1540 VkResult result = vk::Image::Create(pAllocator, pCreateInfo, pImage, vk::Cast(device));
1541
1542 #ifdef __ANDROID__
1543 if(swapchainImage)
1544 {
1545 if(result != VK_SUCCESS)
1546 {
1547 return result;
1548 }
1549
1550 vk::Image *image = vk::Cast(*pImage);
1551 VkMemoryRequirements memRequirements = image->getMemoryRequirements();
1552
1553 VkMemoryAllocateInfo allocInfo = {};
1554 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1555 allocInfo.allocationSize = memRequirements.size;
1556 allocInfo.memoryTypeIndex = 0;
1557
1558 VkDeviceMemory devmem = { VK_NULL_HANDLE };
1559 result = vkAllocateMemory(device, &allocInfo, pAllocator, &devmem);
1560 if(result != VK_SUCCESS)
1561 {
1562 return result;
1563 }
1564
1565 vkBindImageMemory(device, *pImage, devmem, 0);
1566 backmem.externalMemory = true;
1567
1568 image->setBackingMemory(backmem);
1569 }
1570 #endif
1571
1572 return result;
1573 }
1574
vkDestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)1575 VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator)
1576 {
1577 TRACE("(VkDevice device = %p, VkImage image = %p, const VkAllocationCallbacks* pAllocator = %p)",
1578 device, static_cast<void *>(image), pAllocator);
1579
1580 #ifdef __ANDROID__
1581 vk::Image *img = vk::Cast(image);
1582 if(img && img->hasExternalMemory())
1583 {
1584 vk::destroy(img->getExternalMemory(), pAllocator);
1585 }
1586 #endif
1587
1588 vk::destroy(image, pAllocator);
1589 }
1590
vkGetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)1591 VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, VkSubresourceLayout *pLayout)
1592 {
1593 TRACE("(VkDevice device = %p, VkImage image = %p, const VkImageSubresource* pSubresource = %p, VkSubresourceLayout* pLayout = %p)",
1594 device, static_cast<void *>(image), pSubresource, pLayout);
1595
1596 vk::Cast(image)->getSubresourceLayout(pSubresource, pLayout);
1597 }
1598
vkCreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)1599 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImageView *pView)
1600 {
1601 TRACE("(VkDevice device = %p, const VkImageViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImageView* pView = %p)",
1602 device, pCreateInfo, pAllocator, pView);
1603
1604 if(pCreateInfo->flags != 0)
1605 {
1606 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1607 }
1608
1609 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1610 const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
1611
1612 while(extensionCreateInfo)
1613 {
1614 switch(extensionCreateInfo->sType)
1615 {
1616 case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR:
1617 {
1618 const VkImageViewUsageCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkImageViewUsageCreateInfo *>(extensionCreateInfo);
1619 ASSERT(!(~vk::Cast(pCreateInfo->image)->getUsage() & multiviewCreateInfo->usage));
1620 }
1621 break;
1622 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
1623 {
1624 const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo = reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
1625 ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
1626
1627 if(ycbcrConversion)
1628 {
1629 ASSERT((pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_IDENTITY) &&
1630 (pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_IDENTITY) &&
1631 (pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_IDENTITY) &&
1632 (pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_IDENTITY));
1633 }
1634 }
1635 break;
1636 default:
1637 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1638 break;
1639 }
1640
1641 extensionCreateInfo = extensionCreateInfo->pNext;
1642 }
1643
1644 return vk::ImageView::Create(pAllocator, pCreateInfo, pView, ycbcrConversion);
1645 }
1646
vkDestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)1647 VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator)
1648 {
1649 TRACE("(VkDevice device = %p, VkImageView imageView = %p, const VkAllocationCallbacks* pAllocator = %p)",
1650 device, static_cast<void *>(imageView), pAllocator);
1651
1652 vk::destroy(imageView, pAllocator);
1653 }
1654
vkCreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)1655 VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule)
1656 {
1657 TRACE("(VkDevice device = %p, const VkShaderModuleCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkShaderModule* pShaderModule = %p)",
1658 device, pCreateInfo, pAllocator, pShaderModule);
1659
1660 if(pCreateInfo->flags != 0)
1661 {
1662 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1663 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1664 }
1665
1666 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1667 while(nextInfo)
1668 {
1669 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1670 nextInfo = nextInfo->pNext;
1671 }
1672
1673 return vk::ShaderModule::Create(pAllocator, pCreateInfo, pShaderModule);
1674 }
1675
vkDestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)1676 VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator)
1677 {
1678 TRACE("(VkDevice device = %p, VkShaderModule shaderModule = %p, const VkAllocationCallbacks* pAllocator = %p)",
1679 device, static_cast<void *>(shaderModule), pAllocator);
1680
1681 vk::destroy(shaderModule, pAllocator);
1682 }
1683
vkCreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)1684 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache)
1685 {
1686 TRACE("(VkDevice device = %p, const VkPipelineCacheCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineCache* pPipelineCache = %p)",
1687 device, pCreateInfo, pAllocator, pPipelineCache);
1688
1689 if(pCreateInfo->flags != 0)
1690 {
1691 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1692 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1693 }
1694
1695 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1696 while(extInfo)
1697 {
1698 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1699 extInfo = extInfo->pNext;
1700 }
1701
1702 return vk::PipelineCache::Create(pAllocator, pCreateInfo, pPipelineCache);
1703 }
1704
vkDestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)1705 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator)
1706 {
1707 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, const VkAllocationCallbacks* pAllocator = %p)",
1708 device, static_cast<void *>(pipelineCache), pAllocator);
1709
1710 vk::destroy(pipelineCache, pAllocator);
1711 }
1712
vkGetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)1713 VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData)
1714 {
1715 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, size_t* pDataSize = %p, void* pData = %p)",
1716 device, static_cast<void *>(pipelineCache), pDataSize, pData);
1717
1718 return vk::Cast(pipelineCache)->getData(pDataSize, pData);
1719 }
1720
vkMergePipelineCaches(VkDevice device,VkPipelineCache dstCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)1721 VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches)
1722 {
1723 TRACE("(VkDevice device = %p, VkPipelineCache dstCache = %p, uint32_t srcCacheCount = %d, const VkPipelineCache* pSrcCaches = %p)",
1724 device, static_cast<void *>(dstCache), int(srcCacheCount), pSrcCaches);
1725
1726 return vk::Cast(dstCache)->merge(srcCacheCount, pSrcCaches);
1727 }
1728
vkCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)1729 VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
1730 {
1731 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkGraphicsPipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
1732 device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
1733
1734 VkResult errorResult = VK_SUCCESS;
1735 for(uint32_t i = 0; i < createInfoCount; i++)
1736 {
1737 VkResult result = vk::GraphicsPipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
1738
1739 if(result == VK_SUCCESS)
1740 {
1741 static_cast<vk::GraphicsPipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
1742 }
1743 else
1744 {
1745 // According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
1746 // "When an application attempts to create many pipelines in a single command,
1747 // it is possible that some subset may fail creation. In that case, the
1748 // corresponding entries in the pPipelines output array will be filled with
1749 // VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
1750 // out of memory errors), the vkCreate*Pipelines commands will return an
1751 // error code. The implementation will attempt to create all pipelines, and
1752 // only return VK_NULL_HANDLE values for those that actually failed."
1753 pPipelines[i] = VK_NULL_HANDLE;
1754 errorResult = result;
1755 }
1756 }
1757
1758 return errorResult;
1759 }
1760
vkCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)1761 VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
1762 {
1763 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkComputePipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
1764 device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
1765
1766 VkResult errorResult = VK_SUCCESS;
1767 for(uint32_t i = 0; i < createInfoCount; i++)
1768 {
1769 VkResult result = vk::ComputePipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
1770
1771 if(result == VK_SUCCESS)
1772 {
1773 static_cast<vk::ComputePipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
1774 }
1775 else
1776 {
1777 // According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
1778 // "When an application attempts to create many pipelines in a single command,
1779 // it is possible that some subset may fail creation. In that case, the
1780 // corresponding entries in the pPipelines output array will be filled with
1781 // VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
1782 // out of memory errors), the vkCreate*Pipelines commands will return an
1783 // error code. The implementation will attempt to create all pipelines, and
1784 // only return VK_NULL_HANDLE values for those that actually failed."
1785 pPipelines[i] = VK_NULL_HANDLE;
1786 errorResult = result;
1787 }
1788 }
1789
1790 return errorResult;
1791 }
1792
vkDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)1793 VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator)
1794 {
1795 TRACE("(VkDevice device = %p, VkPipeline pipeline = %p, const VkAllocationCallbacks* pAllocator = %p)",
1796 device, static_cast<void *>(pipeline), pAllocator);
1797
1798 vk::destroy(pipeline, pAllocator);
1799 }
1800
vkCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)1801 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout)
1802 {
1803 TRACE("(VkDevice device = %p, const VkPipelineLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineLayout* pPipelineLayout = %p)",
1804 device, pCreateInfo, pAllocator, pPipelineLayout);
1805
1806 if(pCreateInfo->flags != 0)
1807 {
1808 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1809 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1810 }
1811
1812 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1813 while(nextInfo)
1814 {
1815 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1816 nextInfo = nextInfo->pNext;
1817 }
1818
1819 return vk::PipelineLayout::Create(pAllocator, pCreateInfo, pPipelineLayout);
1820 }
1821
vkDestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * pAllocator)1822 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator)
1823 {
1824 TRACE("(VkDevice device = %p, VkPipelineLayout pipelineLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
1825 device, static_cast<void *>(pipelineLayout), pAllocator);
1826
1827 vk::destroy(pipelineLayout, pAllocator);
1828 }
1829
vkCreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)1830 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSampler *pSampler)
1831 {
1832 TRACE("(VkDevice device = %p, const VkSamplerCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSampler* pSampler = %p)",
1833 device, pCreateInfo, pAllocator, pSampler);
1834
1835 if(pCreateInfo->flags != 0)
1836 {
1837 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1838 }
1839
1840 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1841 const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
1842
1843 while(extensionCreateInfo)
1844 {
1845 switch(extensionCreateInfo->sType)
1846 {
1847 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
1848 {
1849 const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo = reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
1850 ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
1851 }
1852 break;
1853 default:
1854 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1855 break;
1856 }
1857
1858 extensionCreateInfo = extensionCreateInfo->pNext;
1859 }
1860
1861 return vk::Sampler::Create(pAllocator, pCreateInfo, pSampler, ycbcrConversion);
1862 }
1863
vkDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)1864 VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator)
1865 {
1866 TRACE("(VkDevice device = %p, VkSampler sampler = %p, const VkAllocationCallbacks* pAllocator = %p)",
1867 device, static_cast<void *>(sampler), pAllocator);
1868
1869 vk::destroy(sampler, pAllocator);
1870 }
1871
vkCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)1872 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout)
1873 {
1874 TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorSetLayout* pSetLayout = %p)",
1875 device, pCreateInfo, pAllocator, pSetLayout);
1876
1877 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1878
1879 while(extensionCreateInfo)
1880 {
1881 switch(extensionCreateInfo->sType)
1882 {
1883 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
1884 ASSERT(!vk::Cast(device)->hasExtension(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME));
1885 break;
1886 default:
1887 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1888 break;
1889 }
1890
1891 extensionCreateInfo = extensionCreateInfo->pNext;
1892 }
1893
1894 return vk::DescriptorSetLayout::Create(pAllocator, pCreateInfo, pSetLayout);
1895 }
1896
vkDestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)1897 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks *pAllocator)
1898 {
1899 TRACE("(VkDevice device = %p, VkDescriptorSetLayout descriptorSetLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
1900 device, static_cast<void *>(descriptorSetLayout), pAllocator);
1901
1902 vk::destroy(descriptorSetLayout, pAllocator);
1903 }
1904
vkCreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)1905 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool)
1906 {
1907 TRACE("(VkDevice device = %p, const VkDescriptorPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorPool* pDescriptorPool = %p)",
1908 device, pCreateInfo, pAllocator, pDescriptorPool);
1909
1910 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1911 while(extInfo)
1912 {
1913 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1914 extInfo = extInfo->pNext;
1915 }
1916
1917 return vk::DescriptorPool::Create(pAllocator, pCreateInfo, pDescriptorPool);
1918 }
1919
vkDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)1920 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator)
1921 {
1922 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
1923 device, static_cast<void *>(descriptorPool), pAllocator);
1924
1925 vk::destroy(descriptorPool, pAllocator);
1926 }
1927
vkResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)1928 VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags)
1929 {
1930 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, VkDescriptorPoolResetFlags flags = 0x%x)",
1931 device, static_cast<void *>(descriptorPool), int(flags));
1932
1933 if(flags != 0)
1934 {
1935 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1936 UNSUPPORTED("flags %d", int(flags));
1937 }
1938
1939 return vk::Cast(descriptorPool)->reset();
1940 }
1941
vkAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1942 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets)
1943 {
1944 TRACE("(VkDevice device = %p, const VkDescriptorSetAllocateInfo* pAllocateInfo = %p, VkDescriptorSet* pDescriptorSets = %p)",
1945 device, pAllocateInfo, pDescriptorSets);
1946
1947 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pAllocateInfo->pNext);
1948 while(extInfo)
1949 {
1950 LOG_TRAP("pAllocateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1951 extInfo = extInfo->pNext;
1952 }
1953
1954 return vk::Cast(pAllocateInfo->descriptorPool)->allocateSets(pAllocateInfo->descriptorSetCount, pAllocateInfo->pSetLayouts, pDescriptorSets);
1955 }
1956
vkFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)1957 VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets)
1958 {
1959 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p)",
1960 device, static_cast<void *>(descriptorPool), descriptorSetCount, pDescriptorSets);
1961
1962 vk::Cast(descriptorPool)->freeSets(descriptorSetCount, pDescriptorSets);
1963
1964 return VK_SUCCESS;
1965 }
1966
vkUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)1967 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies)
1968 {
1969 TRACE("(VkDevice device = %p, uint32_t descriptorWriteCount = %d, const VkWriteDescriptorSet* pDescriptorWrites = %p, uint32_t descriptorCopyCount = %d, const VkCopyDescriptorSet* pDescriptorCopies = %p)",
1970 device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
1971
1972 vk::Cast(device)->updateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
1973 }
1974
vkCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)1975 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer)
1976 {
1977 TRACE("(VkDevice device = %p, const VkFramebufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFramebuffer* pFramebuffer = %p)",
1978 device, pCreateInfo, pAllocator, pFramebuffer);
1979
1980 if(pCreateInfo->flags != 0)
1981 {
1982 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1983 }
1984
1985 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1986 while(nextInfo)
1987 {
1988 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1989 nextInfo = nextInfo->pNext;
1990 }
1991
1992 return vk::Framebuffer::Create(pAllocator, pCreateInfo, pFramebuffer);
1993 }
1994
vkDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)1995 VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator)
1996 {
1997 TRACE("(VkDevice device = %p, VkFramebuffer framebuffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
1998 device, static_cast<void *>(framebuffer), pAllocator);
1999
2000 vk::destroy(framebuffer, pAllocator);
2001 }
2002
vkCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2003 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2004 {
2005 TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2006 device, pCreateInfo, pAllocator, pRenderPass);
2007
2008 if(pCreateInfo->flags != 0)
2009 {
2010 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2011 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2012 }
2013
2014 ValidateRenderPassPNextChain(device, pCreateInfo);
2015
2016 return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2017 }
2018
vkCreateRenderPass2KHR(VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2019 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2020 {
2021 TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2022 device, pCreateInfo, pAllocator, pRenderPass);
2023
2024 if(pCreateInfo->flags != 0)
2025 {
2026 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2027 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2028 }
2029
2030 ValidateRenderPassPNextChain(device, pCreateInfo);
2031
2032 return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2033 }
2034
vkDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)2035 VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator)
2036 {
2037 TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, const VkAllocationCallbacks* pAllocator = %p)",
2038 device, static_cast<void *>(renderPass), pAllocator);
2039
2040 vk::destroy(renderPass, pAllocator);
2041 }
2042
vkGetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)2043 VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity)
2044 {
2045 TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, VkExtent2D* pGranularity = %p)",
2046 device, static_cast<void *>(renderPass), pGranularity);
2047
2048 vk::Cast(renderPass)->getRenderAreaGranularity(pGranularity);
2049 }
2050
vkCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)2051 VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool)
2052 {
2053 TRACE("(VkDevice device = %p, const VkCommandPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkCommandPool* pCommandPool = %p)",
2054 device, pCreateInfo, pAllocator, pCommandPool);
2055
2056 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2057 while(nextInfo)
2058 {
2059 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2060 nextInfo = nextInfo->pNext;
2061 }
2062
2063 return vk::CommandPool::Create(pAllocator, pCreateInfo, pCommandPool);
2064 }
2065
vkDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)2066 VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator)
2067 {
2068 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2069 device, static_cast<void *>(commandPool), pAllocator);
2070
2071 vk::destroy(commandPool, pAllocator);
2072 }
2073
vkResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)2074 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags)
2075 {
2076 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolResetFlags flags = %d)",
2077 device, static_cast<void *>(commandPool), int(flags));
2078
2079 return vk::Cast(commandPool)->reset(flags);
2080 }
2081
vkAllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)2082 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, VkCommandBuffer *pCommandBuffers)
2083 {
2084 TRACE("(VkDevice device = %p, const VkCommandBufferAllocateInfo* pAllocateInfo = %p, VkCommandBuffer* pCommandBuffers = %p)",
2085 device, pAllocateInfo, pCommandBuffers);
2086
2087 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
2088 while(nextInfo)
2089 {
2090 LOG_TRAP("pAllocateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2091 nextInfo = nextInfo->pNext;
2092 }
2093
2094 return vk::Cast(pAllocateInfo->commandPool)->allocateCommandBuffers(vk::Cast(device), pAllocateInfo->level, pAllocateInfo->commandBufferCount, pCommandBuffers);
2095 }
2096
vkFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)2097 VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
2098 {
2099 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
2100 device, static_cast<void *>(commandPool), int(commandBufferCount), pCommandBuffers);
2101
2102 vk::Cast(commandPool)->freeCommandBuffers(commandBufferCount, pCommandBuffers);
2103 }
2104
vkBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)2105 VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo)
2106 {
2107 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCommandBufferBeginInfo* pBeginInfo = %p)",
2108 commandBuffer, pBeginInfo);
2109
2110 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pBeginInfo->pNext);
2111 while(nextInfo)
2112 {
2113 LOG_TRAP("pBeginInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2114 nextInfo = nextInfo->pNext;
2115 }
2116
2117 return vk::Cast(commandBuffer)->begin(pBeginInfo->flags, pBeginInfo->pInheritanceInfo);
2118 }
2119
vkEndCommandBuffer(VkCommandBuffer commandBuffer)2120 VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer)
2121 {
2122 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
2123
2124 return vk::Cast(commandBuffer)->end();
2125 }
2126
vkResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)2127 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags)
2128 {
2129 TRACE("VkCommandBuffer commandBuffer = %p, VkCommandBufferResetFlags flags = %d", commandBuffer, int(flags));
2130
2131 return vk::Cast(commandBuffer)->reset(flags);
2132 }
2133
vkCmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)2134 VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline)
2135 {
2136 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipeline pipeline = %p)",
2137 commandBuffer, int(pipelineBindPoint), static_cast<void *>(pipeline));
2138
2139 vk::Cast(commandBuffer)->bindPipeline(pipelineBindPoint, vk::Cast(pipeline));
2140 }
2141
vkCmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)2142 VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports)
2143 {
2144 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstViewport = %d, uint32_t viewportCount = %d, const VkViewport* pViewports = %p)",
2145 commandBuffer, int(firstViewport), int(viewportCount), pViewports);
2146
2147 vk::Cast(commandBuffer)->setViewport(firstViewport, viewportCount, pViewports);
2148 }
2149
vkCmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)2150 VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors)
2151 {
2152 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstScissor = %d, uint32_t scissorCount = %d, const VkRect2D* pScissors = %p)",
2153 commandBuffer, int(firstScissor), int(scissorCount), pScissors);
2154
2155 vk::Cast(commandBuffer)->setScissor(firstScissor, scissorCount, pScissors);
2156 }
2157
vkCmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)2158 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
2159 {
2160 TRACE("(VkCommandBuffer commandBuffer = %p, float lineWidth = %f)", commandBuffer, lineWidth);
2161
2162 vk::Cast(commandBuffer)->setLineWidth(lineWidth);
2163 }
2164
vkCmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)2165 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor)
2166 {
2167 TRACE("(VkCommandBuffer commandBuffer = %p, float depthBiasConstantFactor = %f, float depthBiasClamp = %f, float depthBiasSlopeFactor = %f)",
2168 commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2169
2170 vk::Cast(commandBuffer)->setDepthBias(depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2171 }
2172
vkCmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])2173 VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
2174 {
2175 TRACE("(VkCommandBuffer commandBuffer = %p, const float blendConstants[4] = {%f, %f, %f, %f})",
2176 commandBuffer, blendConstants[0], blendConstants[1], blendConstants[2], blendConstants[3]);
2177
2178 vk::Cast(commandBuffer)->setBlendConstants(blendConstants);
2179 }
2180
vkCmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)2181 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
2182 {
2183 TRACE("(VkCommandBuffer commandBuffer = %p, float minDepthBounds = %f, float maxDepthBounds = %f)",
2184 commandBuffer, minDepthBounds, maxDepthBounds);
2185
2186 vk::Cast(commandBuffer)->setDepthBounds(minDepthBounds, maxDepthBounds);
2187 }
2188
vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)2189 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask)
2190 {
2191 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t compareMask = %d)",
2192 commandBuffer, int(faceMask), int(compareMask));
2193
2194 vk::Cast(commandBuffer)->setStencilCompareMask(faceMask, compareMask);
2195 }
2196
vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)2197 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask)
2198 {
2199 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t writeMask = %d)",
2200 commandBuffer, int(faceMask), int(writeMask));
2201
2202 vk::Cast(commandBuffer)->setStencilWriteMask(faceMask, writeMask);
2203 }
2204
vkCmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)2205 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference)
2206 {
2207 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t reference = %d)",
2208 commandBuffer, int(faceMask), int(reference));
2209
2210 vk::Cast(commandBuffer)->setStencilReference(faceMask, reference);
2211 }
2212
vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)2213 VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets)
2214 {
2215 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipelineLayout layout = %p, uint32_t firstSet = %d, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p, uint32_t dynamicOffsetCount = %d, const uint32_t* pDynamicOffsets = %p)",
2216 commandBuffer, int(pipelineBindPoint), static_cast<void *>(layout), int(firstSet), int(descriptorSetCount), pDescriptorSets, int(dynamicOffsetCount), pDynamicOffsets);
2217
2218 vk::Cast(commandBuffer)->bindDescriptorSets(pipelineBindPoint, vk::Cast(layout), firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
2219 }
2220
vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)2221 VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType)
2222 {
2223 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkIndexType indexType = %d)",
2224 commandBuffer, static_cast<void *>(buffer), int(offset), int(indexType));
2225
2226 vk::Cast(commandBuffer)->bindIndexBuffer(vk::Cast(buffer), offset, indexType);
2227 }
2228
vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)2229 VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets)
2230 {
2231 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstBinding = %d, uint32_t bindingCount = %d, const VkBuffer* pBuffers = %p, const VkDeviceSize* pOffsets = %p)",
2232 commandBuffer, int(firstBinding), int(bindingCount), pBuffers, pOffsets);
2233
2234 vk::Cast(commandBuffer)->bindVertexBuffers(firstBinding, bindingCount, pBuffers, pOffsets);
2235 }
2236
vkCmdDraw(VkCommandBuffer commandBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)2237 VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance)
2238 {
2239 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t vertexCount = %d, uint32_t instanceCount = %d, uint32_t firstVertex = %d, uint32_t firstInstance = %d)",
2240 commandBuffer, int(vertexCount), int(instanceCount), int(firstVertex), int(firstInstance));
2241
2242 vk::Cast(commandBuffer)->draw(vertexCount, instanceCount, firstVertex, firstInstance);
2243 }
2244
vkCmdDrawIndexed(VkCommandBuffer commandBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)2245 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
2246 {
2247 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t indexCount = %d, uint32_t instanceCount = %d, uint32_t firstIndex = %d, int32_t vertexOffset = %d, uint32_t firstInstance = %d)",
2248 commandBuffer, int(indexCount), int(instanceCount), int(firstIndex), int(vertexOffset), int(firstInstance));
2249
2250 vk::Cast(commandBuffer)->drawIndexed(indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2251 }
2252
vkCmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)2253 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
2254 {
2255 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
2256 commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
2257
2258 vk::Cast(commandBuffer)->drawIndirect(vk::Cast(buffer), offset, drawCount, stride);
2259 }
2260
vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)2261 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
2262 {
2263 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
2264 commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
2265
2266 vk::Cast(commandBuffer)->drawIndexedIndirect(vk::Cast(buffer), offset, drawCount, stride);
2267 }
2268
vkCmdDispatch(VkCommandBuffer commandBuffer,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)2269 VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
2270 {
2271 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t groupCountX = %d, uint32_t groupCountY = %d, uint32_t groupCountZ = %d)",
2272 commandBuffer, int(groupCountX), int(groupCountY), int(groupCountZ));
2273
2274 vk::Cast(commandBuffer)->dispatch(groupCountX, groupCountY, groupCountZ);
2275 }
2276
vkCmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset)2277 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
2278 {
2279 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d)",
2280 commandBuffer, static_cast<void *>(buffer), int(offset));
2281
2282 vk::Cast(commandBuffer)->dispatchIndirect(vk::Cast(buffer), offset);
2283 }
2284
vkCmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)2285 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions)
2286 {
2287 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferCopy* pRegions = %p)",
2288 commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
2289
2290 vk::Cast(commandBuffer)->copyBuffer(vk::Cast(srcBuffer), vk::Cast(dstBuffer), regionCount, pRegions);
2291 }
2292
vkCmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)2293 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions)
2294 {
2295 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageCopy* pRegions = %p)",
2296 commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
2297
2298 vk::Cast(commandBuffer)->copyImage(vk::Cast(srcImage), srcImageLayout, vk::Cast(dstImage), dstImageLayout, regionCount, pRegions);
2299 }
2300
vkCmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)2301 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter)
2302 {
2303 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageBlit* pRegions = %p, VkFilter filter = %d)",
2304 commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions, filter);
2305
2306 vk::Cast(commandBuffer)->blitImage(vk::Cast(srcImage), srcImageLayout, vk::Cast(dstImage), dstImageLayout, regionCount, pRegions, filter);
2307 }
2308
vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)2309 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions)
2310 {
2311 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
2312 commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
2313
2314 vk::Cast(commandBuffer)->copyBufferToImage(vk::Cast(srcBuffer), vk::Cast(dstImage), dstImageLayout, regionCount, pRegions);
2315 }
2316
vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)2317 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions)
2318 {
2319 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
2320 commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
2321
2322 vk::Cast(commandBuffer)->copyImageToBuffer(vk::Cast(srcImage), srcImageLayout, vk::Cast(dstBuffer), regionCount, pRegions);
2323 }
2324
vkCmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)2325 VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData)
2326 {
2327 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize dataSize = %d, const void* pData = %p)",
2328 commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(dataSize), pData);
2329
2330 vk::Cast(commandBuffer)->updateBuffer(vk::Cast(dstBuffer), dstOffset, dataSize, pData);
2331 }
2332
vkCmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)2333 VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data)
2334 {
2335 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize size = %d, uint32_t data = %d)",
2336 commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(size), data);
2337
2338 vk::Cast(commandBuffer)->fillBuffer(vk::Cast(dstBuffer), dstOffset, size, data);
2339 }
2340
vkCmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)2341 VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
2342 {
2343 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearColorValue* pColor = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
2344 commandBuffer, static_cast<void *>(image), int(imageLayout), pColor, int(rangeCount), pRanges);
2345
2346 vk::Cast(commandBuffer)->clearColorImage(vk::Cast(image), imageLayout, pColor, rangeCount, pRanges);
2347 }
2348
vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)2349 VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
2350 {
2351 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearDepthStencilValue* pDepthStencil = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
2352 commandBuffer, static_cast<void *>(image), int(imageLayout), pDepthStencil, int(rangeCount), pRanges);
2353
2354 vk::Cast(commandBuffer)->clearDepthStencilImage(vk::Cast(image), imageLayout, pDepthStencil, rangeCount, pRanges);
2355 }
2356
vkCmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)2357 VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects)
2358 {
2359 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t attachmentCount = %d, const VkClearAttachment* pAttachments = %p, uint32_t rectCount = %d, const VkClearRect* pRects = %p)",
2360 commandBuffer, int(attachmentCount), pAttachments, int(rectCount), pRects);
2361
2362 vk::Cast(commandBuffer)->clearAttachments(attachmentCount, pAttachments, rectCount, pRects);
2363 }
2364
vkCmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)2365 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions)
2366 {
2367 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageResolve* pRegions = %p)",
2368 commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstImage), int(dstImageLayout), regionCount, pRegions);
2369
2370 vk::Cast(commandBuffer)->resolveImage(vk::Cast(srcImage), srcImageLayout, vk::Cast(dstImage), dstImageLayout, regionCount, pRegions);
2371 }
2372
vkCmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)2373 VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
2374 {
2375 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
2376 commandBuffer, static_cast<void *>(event), int(stageMask));
2377
2378 vk::Cast(commandBuffer)->setEvent(vk::Cast(event), stageMask);
2379 }
2380
vkCmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)2381 VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
2382 {
2383 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
2384 commandBuffer, static_cast<void *>(event), int(stageMask));
2385
2386 vk::Cast(commandBuffer)->resetEvent(vk::Cast(event), stageMask);
2387 }
2388
vkCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)2389 VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
2390 {
2391 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t eventCount = %d, const VkEvent* pEvents = %p, VkPipelineStageFlags srcStageMask = 0x%x, VkPipelineStageFlags dstStageMask = 0x%x, uint32_t memoryBarrierCount = %d, const VkMemoryBarrier* pMemoryBarriers = %p, uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
2392 commandBuffer, int(eventCount), pEvents, int(srcStageMask), int(dstStageMask), int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
2393
2394 vk::Cast(commandBuffer)->waitEvents(eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2395 }
2396
vkCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)2397 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
2398 {
2399 TRACE(
2400 "(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlags srcStageMask = 0x%x, VkPipelineStageFlags dstStageMask = 0x%x, VkDependencyFlags dependencyFlags = %d, uint32_t memoryBarrierCount = %d, onst VkMemoryBarrier* pMemoryBarriers = %p,"
2401 " uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
2402 commandBuffer, int(srcStageMask), int(dstStageMask), dependencyFlags, int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
2403
2404 vk::Cast(commandBuffer)->pipelineBarrier(srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2405 }
2406
vkCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags)2407 VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags)
2408 {
2409 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d, VkQueryControlFlags flags = %d)",
2410 commandBuffer, static_cast<void *>(queryPool), query, int(flags));
2411
2412 vk::Cast(commandBuffer)->beginQuery(vk::Cast(queryPool), query, flags);
2413 }
2414
vkCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query)2415 VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query)
2416 {
2417 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d)",
2418 commandBuffer, static_cast<void *>(queryPool), int(query));
2419
2420 vk::Cast(commandBuffer)->endQuery(vk::Cast(queryPool), query);
2421 }
2422
vkCmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)2423 VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
2424 {
2425 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
2426 commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount));
2427
2428 vk::Cast(commandBuffer)->resetQueryPool(vk::Cast(queryPool), firstQuery, queryCount);
2429 }
2430
vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query)2431 VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query)
2432 {
2433 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlagBits pipelineStage = %d, VkQueryPool queryPool = %p, uint32_t query = %d)",
2434 commandBuffer, int(pipelineStage), static_cast<void *>(queryPool), int(query));
2435
2436 vk::Cast(commandBuffer)->writeTimestamp(pipelineStage, vk::Cast(queryPool), query);
2437 }
2438
vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)2439 VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags)
2440 {
2441 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
2442 commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), static_cast<void *>(dstBuffer), int(dstOffset), int(stride), int(flags));
2443
2444 vk::Cast(commandBuffer)->copyQueryPoolResults(vk::Cast(queryPool), firstQuery, queryCount, vk::Cast(dstBuffer), dstOffset, stride, flags);
2445 }
2446
vkCmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)2447 VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues)
2448 {
2449 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineLayout layout = %p, VkShaderStageFlags stageFlags = %d, uint32_t offset = %d, uint32_t size = %d, const void* pValues = %p)",
2450 commandBuffer, static_cast<void *>(layout), stageFlags, offset, size, pValues);
2451
2452 vk::Cast(commandBuffer)->pushConstants(vk::Cast(layout), stageFlags, offset, size, pValues);
2453 }
2454
vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)2455 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents)
2456 {
2457 TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderPassBeginInfo* pRenderPassBegin = %p, VkSubpassContents contents = %d)",
2458 commandBuffer, pRenderPassBegin, contents);
2459
2460 const VkBaseInStructure *renderPassBeginInfo = reinterpret_cast<const VkBaseInStructure *>(pRenderPassBegin->pNext);
2461 while(renderPassBeginInfo)
2462 {
2463 switch(renderPassBeginInfo->sType)
2464 {
2465 case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
2466 // This extension controls which render area is used on which physical device,
2467 // in order to distribute rendering between multiple physical devices.
2468 // SwiftShader only has a single physical device, so this extension does nothing in this case.
2469 break;
2470 default:
2471 LOG_TRAP("pRenderPassBegin->pNext sType = %s", vk::Stringify(renderPassBeginInfo->sType).c_str());
2472 break;
2473 }
2474
2475 renderPassBeginInfo = renderPassBeginInfo->pNext;
2476 }
2477
2478 vk::Cast(commandBuffer)->beginRenderPass(vk::Cast(pRenderPassBegin->renderPass), vk::Cast(pRenderPassBegin->framebuffer), pRenderPassBegin->renderArea, pRenderPassBegin->clearValueCount, pRenderPassBegin->pClearValues, contents);
2479 }
2480
vkCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)2481 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, const VkSubpassBeginInfoKHR *pSubpassBeginInfo)
2482 {
2483 TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderPassBeginInfo* pRenderPassBegin = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p)",
2484 commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
2485
2486 vk::Cast(commandBuffer)->beginRenderPass(vk::Cast(pRenderPassBegin->renderPass), vk::Cast(pRenderPassBegin->framebuffer), pRenderPassBegin->renderArea, pRenderPassBegin->clearValueCount, pRenderPassBegin->pClearValues, pSubpassBeginInfo->contents);
2487 }
2488
vkCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)2489 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
2490 {
2491 TRACE("(VkCommandBuffer commandBuffer = %p, VkSubpassContents contents = %d)",
2492 commandBuffer, contents);
2493
2494 vk::Cast(commandBuffer)->nextSubpass(contents);
2495 }
2496
vkCmdNextSubpass2KHR(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)2497 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const VkSubpassEndInfoKHR *pSubpassEndInfo)
2498 {
2499 TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)",
2500 commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
2501
2502 vk::Cast(commandBuffer)->nextSubpass(pSubpassBeginInfo->contents);
2503 }
2504
vkCmdEndRenderPass(VkCommandBuffer commandBuffer)2505 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer)
2506 {
2507 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
2508
2509 vk::Cast(commandBuffer)->endRenderPass();
2510 }
2511
vkCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)2512 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo)
2513 {
2514 TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)", commandBuffer, pSubpassEndInfo);
2515
2516 vk::Cast(commandBuffer)->endRenderPass();
2517 }
2518
vkCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)2519 VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
2520 {
2521 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
2522 commandBuffer, commandBufferCount, pCommandBuffers);
2523
2524 vk::Cast(commandBuffer)->executeCommands(commandBufferCount, pCommandBuffers);
2525 }
2526
vkEnumerateInstanceVersion(uint32_t * pApiVersion)2527 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t *pApiVersion)
2528 {
2529 TRACE("(uint32_t* pApiVersion = %p)", pApiVersion);
2530 *pApiVersion = vk::API_VERSION;
2531 return VK_SUCCESS;
2532 }
2533
vkBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2534 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos)
2535 {
2536 TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindBufferMemoryInfo* pBindInfos = %p)",
2537 device, bindInfoCount, pBindInfos);
2538
2539 for(uint32_t i = 0; i < bindInfoCount; i++)
2540 {
2541 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pBindInfos[i].pNext);
2542 while(extInfo)
2543 {
2544 LOG_TRAP("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
2545 extInfo = extInfo->pNext;
2546 }
2547
2548 if(!vk::Cast(pBindInfos[i].buffer)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
2549 {
2550 UNSUPPORTED("vkBindBufferMemory2 with invalid external memory");
2551 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
2552 }
2553 }
2554
2555 for(uint32_t i = 0; i < bindInfoCount; i++)
2556 {
2557 vk::Cast(pBindInfos[i].buffer)->bind(vk::Cast(pBindInfos[i].memory), pBindInfos[i].memoryOffset);
2558 }
2559
2560 return VK_SUCCESS;
2561 }
2562
vkBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2563 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo *pBindInfos)
2564 {
2565 TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindImageMemoryInfo* pBindInfos = %p)",
2566 device, bindInfoCount, pBindInfos);
2567
2568 for(uint32_t i = 0; i < bindInfoCount; i++)
2569 {
2570 if(!vk::Cast(pBindInfos[i].image)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
2571 {
2572 UNSUPPORTED("vkBindImageMemory2 with invalid external memory");
2573 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2574 }
2575 }
2576
2577 for(uint32_t i = 0; i < bindInfoCount; i++)
2578 {
2579 vk::DeviceMemory *memory = vk::Cast(pBindInfos[i].memory);
2580 VkDeviceSize offset = pBindInfos[i].memoryOffset;
2581
2582 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pBindInfos[i].pNext);
2583 while(extInfo)
2584 {
2585 switch(extInfo->sType)
2586 {
2587 case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
2588 /* Do nothing */
2589 break;
2590
2591 #ifndef __ANDROID__
2592 case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
2593 {
2594 auto swapchainInfo = reinterpret_cast<VkBindImageMemorySwapchainInfoKHR const *>(extInfo);
2595 memory = vk::Cast(swapchainInfo->swapchain)->getImage(swapchainInfo->imageIndex).getImageMemory();
2596 offset = 0;
2597 }
2598 break;
2599 #endif
2600
2601 default:
2602 LOG_TRAP("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
2603 break;
2604 }
2605 extInfo = extInfo->pNext;
2606 }
2607
2608 vk::Cast(pBindInfos[i].image)->bind(memory, offset);
2609 }
2610
2611 return VK_SUCCESS;
2612 }
2613
vkGetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)2614 VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
2615 {
2616 TRACE("(VkDevice device = %p, uint32_t heapIndex = %d, uint32_t localDeviceIndex = %d, uint32_t remoteDeviceIndex = %d, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures = %p)",
2617 device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
2618
2619 ASSERT(localDeviceIndex != remoteDeviceIndex); // "localDeviceIndex must not equal remoteDeviceIndex"
2620 UNSUPPORTED("remoteDeviceIndex: %d", int(remoteDeviceIndex)); // Only one physical device is supported, and since the device indexes can't be equal, this should never be called.
2621 }
2622
vkCmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)2623 VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
2624 {
2625 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t deviceMask = %d", commandBuffer, deviceMask);
2626
2627 vk::Cast(commandBuffer)->setDeviceMask(deviceMask);
2628 }
2629
vkCmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)2630 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
2631 {
2632 TRACE("(VkCommandBuffer commandBuffer = %p, baseGroupX = %u, baseGroupY = %u, baseGroupZ = %u, groupCountX = %u, groupCountY = %u, groupCountZ = %u)",
2633 commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
2634
2635 vk::Cast(commandBuffer)->dispatchBase(baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
2636 }
2637
vkEnumeratePhysicalDeviceGroups(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)2638 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
2639 {
2640 TRACE("VkInstance instance = %p, uint32_t* pPhysicalDeviceGroupCount = %p, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties = %p",
2641 instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
2642
2643 return vk::Cast(instance)->getPhysicalDeviceGroups(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
2644 }
2645
vkGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2646 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
2647 {
2648 TRACE("(VkDevice device = %p, const VkImageMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
2649 device, pInfo, pMemoryRequirements);
2650
2651 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
2652 while(extInfo)
2653 {
2654 LOG_TRAP("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2655 extInfo = extInfo->pNext;
2656 }
2657
2658 VkBaseOutStructure *extensionRequirements = reinterpret_cast<VkBaseOutStructure *>(pMemoryRequirements->pNext);
2659 while(extensionRequirements)
2660 {
2661 switch(extensionRequirements->sType)
2662 {
2663 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
2664 {
2665 auto requirements = reinterpret_cast<VkMemoryDedicatedRequirements *>(extensionRequirements);
2666 vk::Cast(device)->getRequirements(requirements);
2667 }
2668 break;
2669 default:
2670 LOG_TRAP("pMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
2671 break;
2672 }
2673
2674 extensionRequirements = extensionRequirements->pNext;
2675 }
2676
2677 vkGetImageMemoryRequirements(device, pInfo->image, &(pMemoryRequirements->memoryRequirements));
2678 }
2679
vkGetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2680 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
2681 {
2682 TRACE("(VkDevice device = %p, const VkBufferMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
2683 device, pInfo, pMemoryRequirements);
2684
2685 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
2686 while(extInfo)
2687 {
2688 LOG_TRAP("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2689 extInfo = extInfo->pNext;
2690 }
2691
2692 VkBaseOutStructure *extensionRequirements = reinterpret_cast<VkBaseOutStructure *>(pMemoryRequirements->pNext);
2693 while(extensionRequirements)
2694 {
2695 switch(extensionRequirements->sType)
2696 {
2697 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
2698 {
2699 auto requirements = reinterpret_cast<VkMemoryDedicatedRequirements *>(extensionRequirements);
2700 vk::Cast(device)->getRequirements(requirements);
2701 }
2702 break;
2703 default:
2704 LOG_TRAP("pMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
2705 break;
2706 }
2707
2708 extensionRequirements = extensionRequirements->pNext;
2709 }
2710
2711 vkGetBufferMemoryRequirements(device, pInfo->buffer, &(pMemoryRequirements->memoryRequirements));
2712 }
2713
vkGetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)2714 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
2715 {
2716 TRACE("(VkDevice device = %p, const VkImageSparseMemoryRequirementsInfo2* pInfo = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements = %p)",
2717 device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
2718
2719 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
2720 while(extInfo)
2721 {
2722 LOG_TRAP("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2723 extInfo = extInfo->pNext;
2724 }
2725
2726 auto extensionRequirements = reinterpret_cast<VkBaseInStructure const *>(pSparseMemoryRequirements->pNext);
2727 while(extensionRequirements)
2728 {
2729 LOG_TRAP("pSparseMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
2730 extensionRequirements = extensionRequirements->pNext;
2731 }
2732
2733 // The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
2734 // "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
2735 *pSparseMemoryRequirementCount = 0;
2736 }
2737
vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)2738 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 *pFeatures)
2739 {
2740 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures2* pFeatures = %p)", physicalDevice, pFeatures);
2741
2742 VkBaseOutStructure *extensionFeatures = reinterpret_cast<VkBaseOutStructure *>(pFeatures->pNext);
2743 while(extensionFeatures)
2744 {
2745 switch((long)(extensionFeatures->sType))
2746 {
2747 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
2748 {
2749 auto features = reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(extensionFeatures);
2750 vk::Cast(physicalDevice)->getFeatures(features);
2751 }
2752 break;
2753 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
2754 {
2755 auto features = reinterpret_cast<VkPhysicalDevice16BitStorageFeatures *>(extensionFeatures);
2756 vk::Cast(physicalDevice)->getFeatures(features);
2757 }
2758 break;
2759 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES:
2760 {
2761 auto features = reinterpret_cast<VkPhysicalDeviceVariablePointerFeatures *>(extensionFeatures);
2762 vk::Cast(physicalDevice)->getFeatures(features);
2763 }
2764 break;
2765 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR:
2766 {
2767 auto features = reinterpret_cast<VkPhysicalDevice8BitStorageFeaturesKHR *>(extensionFeatures);
2768 vk::Cast(physicalDevice)->getFeatures(features);
2769 }
2770 break;
2771 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
2772 {
2773 auto features = reinterpret_cast<VkPhysicalDeviceMultiviewFeatures *>(extensionFeatures);
2774 vk::Cast(physicalDevice)->getFeatures(features);
2775 }
2776 break;
2777 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
2778 {
2779 auto features = reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures *>(extensionFeatures);
2780 vk::Cast(physicalDevice)->getFeatures(features);
2781 }
2782 break;
2783 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES:
2784 {
2785 auto features = reinterpret_cast<VkPhysicalDeviceShaderDrawParameterFeatures *>(extensionFeatures);
2786 vk::Cast(physicalDevice)->getFeatures(features);
2787 }
2788 break;
2789 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR:
2790 {
2791 auto features = reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(extensionFeatures);
2792 vk::Cast(physicalDevice)->getFeatures(features);
2793 }
2794 break;
2795 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
2796 {
2797 auto features = reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT *>(extensionFeatures);
2798 vk::Cast(physicalDevice)->getFeatures(features);
2799 }
2800 break;
2801 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT:
2802 {
2803 auto features = reinterpret_cast<VkPhysicalDeviceProvokingVertexFeaturesEXT *>(extensionFeatures);
2804 vk::Cast(physicalDevice)->getFeatures(features);
2805 }
2806 break;
2807 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT:
2808 ASSERT(!HasExtensionProperty(VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME, deviceExtensionProperties,
2809 sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0])));
2810 break;
2811 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT:
2812 ASSERT(!HasExtensionProperty(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME, deviceExtensionProperties,
2813 sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0])));
2814 break;
2815 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR:
2816 ASSERT(!HasExtensionProperty(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, deviceExtensionProperties,
2817 sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0])));
2818 break;
2819 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR:
2820 ASSERT(!HasExtensionProperty(VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME, deviceExtensionProperties,
2821 sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0])));
2822 break;
2823 default:
2824 LOG_TRAP("pFeatures->pNext sType = %s", vk::Stringify(extensionFeatures->sType).c_str());
2825 break;
2826 }
2827
2828 extensionFeatures = extensionFeatures->pNext;
2829 }
2830
2831 vkGetPhysicalDeviceFeatures(physicalDevice, &(pFeatures->features));
2832 }
2833
vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)2834 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 *pProperties)
2835 {
2836 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties2* pProperties = %p)", physicalDevice, pProperties);
2837
2838 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pProperties->pNext);
2839 while(extensionProperties)
2840 {
2841 // Casting to a long since some structures, such as
2842 // VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID and
2843 // VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT
2844 // are not enumerated in the official Vulkan header
2845 switch((long)(extensionProperties->sType))
2846 {
2847 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
2848 {
2849 auto properties = reinterpret_cast<VkPhysicalDeviceIDProperties *>(extensionProperties);
2850 vk::Cast(physicalDevice)->getProperties(properties);
2851 }
2852 break;
2853 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
2854 {
2855 auto properties = reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>(extensionProperties);
2856 vk::Cast(physicalDevice)->getProperties(properties);
2857 }
2858 break;
2859 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
2860 {
2861 auto properties = reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>(extensionProperties);
2862 vk::Cast(physicalDevice)->getProperties(properties);
2863 }
2864 break;
2865 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
2866 {
2867 auto properties = reinterpret_cast<VkPhysicalDevicePointClippingProperties *>(extensionProperties);
2868 vk::Cast(physicalDevice)->getProperties(properties);
2869 }
2870 break;
2871 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
2872 {
2873 auto properties = reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>(extensionProperties);
2874 vk::Cast(physicalDevice)->getProperties(properties);
2875 }
2876 break;
2877 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
2878 {
2879 auto properties = reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>(extensionProperties);
2880 vk::Cast(physicalDevice)->getProperties(properties);
2881 }
2882 break;
2883 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
2884 // Explicitly ignored, since VK_EXT_sample_locations is not supported
2885 ASSERT(!HasExtensionProperty(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME, deviceExtensionProperties,
2886 sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0])));
2887 break;
2888 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
2889 {
2890 auto properties = reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(extensionProperties);
2891 vk::Cast(physicalDevice)->getProperties(properties);
2892 }
2893 break;
2894 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR:
2895 {
2896 auto properties = reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR *>(extensionProperties);
2897 vk::Cast(physicalDevice)->getProperties(properties);
2898 }
2899 break;
2900 #ifdef __ANDROID__
2901 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID:
2902 {
2903 auto properties = reinterpret_cast<VkPhysicalDevicePresentationPropertiesANDROID *>(extensionProperties);
2904 vk::Cast(physicalDevice)->getProperties(properties);
2905 }
2906 break;
2907 #endif
2908 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
2909 {
2910 auto properties = reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>(extensionProperties);
2911 vk::Cast(physicalDevice)->getProperties(properties);
2912 }
2913 break;
2914 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT:
2915 {
2916 auto properties = reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>(extensionProperties);
2917 vk::Cast(physicalDevice)->getProperties(properties);
2918 }
2919 break;
2920 default:
2921 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
2922 LOG_TRAP("pProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
2923 break;
2924 }
2925
2926 extensionProperties = extensionProperties->pNext;
2927 }
2928
2929 vkGetPhysicalDeviceProperties(physicalDevice, &(pProperties->properties));
2930 }
2931
vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)2932 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 *pFormatProperties)
2933 {
2934 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties2* pFormatProperties = %p)",
2935 physicalDevice, format, pFormatProperties);
2936
2937 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pFormatProperties->pNext);
2938 while(extInfo)
2939 {
2940 LOG_TRAP("pFormatProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2941 extInfo = extInfo->pNext;
2942 }
2943
2944 vkGetPhysicalDeviceFormatProperties(physicalDevice, format, &(pFormatProperties->formatProperties));
2945 }
2946
vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)2947 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VkImageFormatProperties2 *pImageFormatProperties)
2948 {
2949 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo = %p, VkImageFormatProperties2* pImageFormatProperties = %p)",
2950 physicalDevice, pImageFormatInfo, pImageFormatProperties);
2951
2952 const VkBaseInStructure *extensionFormatInfo = reinterpret_cast<const VkBaseInStructure *>(pImageFormatInfo->pNext);
2953
2954 const VkExternalMemoryHandleTypeFlagBits *handleType = nullptr;
2955 while(extensionFormatInfo)
2956 {
2957 switch(extensionFormatInfo->sType)
2958 {
2959 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR:
2960 {
2961 // Explicitly ignored, since VK_KHR_image_format_list is not supported
2962 ASSERT(!HasExtensionProperty(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, deviceExtensionProperties,
2963 sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0])));
2964 }
2965 break;
2966 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT:
2967 {
2968 // Explicitly ignored, since VK_EXT_separate_stencil_usage is not supported
2969 ASSERT(!HasExtensionProperty(VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME, deviceExtensionProperties,
2970 sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0])));
2971 }
2972 break;
2973 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
2974 {
2975 const VkPhysicalDeviceExternalImageFormatInfo *imageFormatInfo = reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>(extensionFormatInfo);
2976 handleType = &(imageFormatInfo->handleType);
2977 }
2978 break;
2979 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
2980 {
2981 // Explicitly ignored, since VK_EXT_image_drm_format_modifier is not supported
2982 ASSERT(!HasExtensionProperty(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, deviceExtensionProperties,
2983 sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0])));
2984 }
2985 break;
2986 default:
2987 LOG_TRAP("pImageFormatInfo->pNext sType = %s", vk::Stringify(extensionFormatInfo->sType).c_str());
2988 break;
2989 }
2990
2991 extensionFormatInfo = extensionFormatInfo->pNext;
2992 }
2993
2994 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pImageFormatProperties->pNext);
2995
2996 while(extensionProperties)
2997 {
2998 switch(extensionProperties->sType)
2999 {
3000 case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
3001 {
3002 auto properties = reinterpret_cast<VkExternalImageFormatProperties *>(extensionProperties);
3003 vk::Cast(physicalDevice)->getProperties(handleType, properties);
3004 }
3005 break;
3006 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
3007 {
3008 auto properties = reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>(extensionProperties);
3009 vk::Cast(physicalDevice)->getProperties(properties);
3010 }
3011 break;
3012 case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
3013 {
3014 // Explicitly ignored, since VK_AMD_texture_gather_bias_lod is not supported
3015 ASSERT(!HasExtensionProperty(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME, deviceExtensionProperties,
3016 sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0])));
3017 }
3018 break;
3019 default:
3020 LOG_TRAP("pImageFormatProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3021 break;
3022 }
3023
3024 extensionProperties = extensionProperties->pNext;
3025 }
3026
3027 return vkGetPhysicalDeviceImageFormatProperties(physicalDevice,
3028 pImageFormatInfo->format,
3029 pImageFormatInfo->type,
3030 pImageFormatInfo->tiling,
3031 pImageFormatInfo->usage,
3032 pImageFormatInfo->flags,
3033 &(pImageFormatProperties->imageFormatProperties));
3034 }
3035
vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)3036 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties)
3037 {
3038 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties2* pQueueFamilyProperties = %p)",
3039 physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3040
3041 if(pQueueFamilyProperties)
3042 {
3043 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pQueueFamilyProperties->pNext);
3044 while(extInfo)
3045 {
3046 LOG_TRAP("pQueueFamilyProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3047 extInfo = extInfo->pNext;
3048 }
3049 }
3050
3051 if(!pQueueFamilyProperties)
3052 {
3053 *pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
3054 }
3055 else
3056 {
3057 vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
3058 }
3059 }
3060
vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)3061 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
3062 {
3063 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties2* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
3064
3065 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pMemoryProperties->pNext);
3066 while(extInfo)
3067 {
3068 LOG_TRAP("pMemoryProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3069 extInfo = extInfo->pNext;
3070 }
3071
3072 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &(pMemoryProperties->memoryProperties));
3073 }
3074
vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)3075 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VkSparseImageFormatProperties2 *pProperties)
3076 {
3077 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo = %p, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties2* pProperties = %p)",
3078 physicalDevice, pFormatInfo, pPropertyCount, pProperties);
3079
3080 if(pProperties)
3081 {
3082 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pProperties->pNext);
3083 while(extInfo)
3084 {
3085 LOG_TRAP("pProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3086 extInfo = extInfo->pNext;
3087 }
3088 }
3089
3090 // We do not support sparse images.
3091 *pPropertyCount = 0;
3092 }
3093
vkTrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)3094 VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags)
3095 {
3096 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolTrimFlags flags = %d)",
3097 device, static_cast<void *>(commandPool), flags);
3098
3099 if(flags != 0)
3100 {
3101 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
3102 UNSUPPORTED("flags %d", int(flags));
3103 }
3104
3105 vk::Cast(commandPool)->trim(flags);
3106 }
3107
vkGetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)3108 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue)
3109 {
3110 TRACE("(VkDevice device = %p, const VkDeviceQueueInfo2* pQueueInfo = %p, VkQueue* pQueue = %p)",
3111 device, pQueueInfo, pQueue);
3112
3113 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pQueueInfo->pNext);
3114 while(extInfo)
3115 {
3116 LOG_TRAP("pQueueInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3117 extInfo = extInfo->pNext;
3118 }
3119
3120 if(pQueueInfo->flags != 0)
3121 {
3122 // The only flag that can be set here is VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
3123 // According to the Vulkan 1.2.132 spec, 4.3.1. Queue Family Properties:
3124 // "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT specifies that the device queue is a
3125 // protected-capable queue. If the protected memory feature is not enabled,
3126 // the VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT bit of flags must not be set."
3127 UNSUPPORTED("VkPhysicalDeviceVulkan11Features::protectedMemory");
3128 }
3129
3130 vkGetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
3131 }
3132
vkCreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)3133 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSamplerYcbcrConversion *pYcbcrConversion)
3134 {
3135 TRACE("(VkDevice device = %p, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSamplerYcbcrConversion* pYcbcrConversion = %p)",
3136 device, pCreateInfo, pAllocator, pYcbcrConversion);
3137
3138 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
3139 while(extInfo)
3140 {
3141 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3142 extInfo = extInfo->pNext;
3143 }
3144
3145 return vk::SamplerYcbcrConversion::Create(pAllocator, pCreateInfo, pYcbcrConversion);
3146 }
3147
vkDestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)3148 VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks *pAllocator)
3149 {
3150 TRACE("(VkDevice device = %p, VkSamplerYcbcrConversion ycbcrConversion = %p, const VkAllocationCallbacks* pAllocator = %p)",
3151 device, static_cast<void *>(ycbcrConversion), pAllocator);
3152
3153 vk::destroy(ycbcrConversion, pAllocator);
3154 }
3155
vkCreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)3156 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
3157 {
3158 TRACE("(VkDevice device = %p, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate = %p)",
3159 device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
3160
3161 if(pCreateInfo->flags != 0)
3162 {
3163 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
3164 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
3165 }
3166
3167 if(pCreateInfo->templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
3168 {
3169 UNSUPPORTED("pCreateInfo->templateType %d", int(pCreateInfo->templateType));
3170 }
3171
3172 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
3173 while(extInfo)
3174 {
3175 LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3176 extInfo = extInfo->pNext;
3177 }
3178
3179 return vk::DescriptorUpdateTemplate::Create(pAllocator, pCreateInfo, pDescriptorUpdateTemplate);
3180 }
3181
vkDestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)3182 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks *pAllocator)
3183 {
3184 TRACE("(VkDevice device = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const VkAllocationCallbacks* pAllocator = %p)",
3185 device, static_cast<void *>(descriptorUpdateTemplate), pAllocator);
3186
3187 vk::destroy(descriptorUpdateTemplate, pAllocator);
3188 }
3189
vkUpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)3190 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData)
3191 {
3192 TRACE("(VkDevice device = %p, VkDescriptorSet descriptorSet = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const void* pData = %p)",
3193 device, static_cast<void *>(descriptorSet), static_cast<void *>(descriptorUpdateTemplate), pData);
3194
3195 vk::Cast(descriptorUpdateTemplate)->updateDescriptorSet(vk::Cast(device), descriptorSet, pData);
3196 }
3197
vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)3198 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VkExternalBufferProperties *pExternalBufferProperties)
3199 {
3200 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo = %p, VkExternalBufferProperties* pExternalBufferProperties = %p)",
3201 physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
3202
3203 vk::Cast(physicalDevice)->getProperties(pExternalBufferInfo, pExternalBufferProperties);
3204 }
3205
vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)3206 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VkExternalFenceProperties *pExternalFenceProperties)
3207 {
3208 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo = %p, VkExternalFenceProperties* pExternalFenceProperties = %p)",
3209 physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
3210
3211 vk::Cast(physicalDevice)->getProperties(pExternalFenceInfo, pExternalFenceProperties);
3212 }
3213
vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)3214 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
3215 {
3216 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo = %p, VkExternalSemaphoreProperties* pExternalSemaphoreProperties = %p)",
3217 physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
3218
3219 vk::Cast(physicalDevice)->getProperties(pExternalSemaphoreInfo, pExternalSemaphoreProperties);
3220 }
3221
vkGetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)3222 VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, VkDescriptorSetLayoutSupport *pSupport)
3223 {
3224 TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, VkDescriptorSetLayoutSupport* pSupport = %p)",
3225 device, pCreateInfo, pSupport);
3226
3227 vk::Cast(device)->getDescriptorSetLayoutSupport(pCreateInfo, pSupport);
3228 }
3229
vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)3230 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern)
3231 {
3232 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t lineStippleFactor = %u, uint16_t lineStipplePattern = %u",
3233 commandBuffer, lineStippleFactor, lineStipplePattern);
3234
3235 UNSUPPORTED("VkPhysicalDeviceLineRasterizationFeaturesEXT::stippled*Lines");
3236 }
3237
3238 #ifdef VK_USE_PLATFORM_XCB_KHR
vkCreateXcbSurfaceKHR(VkInstance instance,const VkXcbSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3239 VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3240 {
3241 TRACE("(VkInstance instance = %p, VkXcbSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3242 instance, pCreateInfo, pAllocator, pSurface);
3243
3244 return vk::XcbSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3245 }
3246
vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id)3247 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t *connection, xcb_visualid_t visual_id)
3248 {
3249 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, xcb_connection_t* connection = %p, xcb_visualid_t visual_id = %d)",
3250 physicalDevice, int(queueFamilyIndex), connection, int(visual_id));
3251
3252 return VK_TRUE;
3253 }
3254 #endif
3255
3256 #ifdef VK_USE_PLATFORM_XLIB_KHR
vkCreateXlibSurfaceKHR(VkInstance instance,const VkXlibSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3257 VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3258 {
3259 TRACE("(VkInstance instance = %p, VkXlibSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3260 instance, pCreateInfo, pAllocator, pSurface);
3261
3262 return vk::XlibSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3263 }
3264
vkGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,Display * dpy,VisualID visualID)3265 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display *dpy, VisualID visualID)
3266 {
3267 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, Display* dpy = %p, VisualID visualID = %lu)",
3268 physicalDevice, int(queueFamilyIndex), dpy, visualID);
3269
3270 return VK_TRUE;
3271 }
3272 #endif
3273
3274 #ifdef VK_USE_PLATFORM_MACOS_MVK
vkCreateMacOSSurfaceMVK(VkInstance instance,const VkMacOSSurfaceCreateInfoMVK * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3275 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3276 {
3277 TRACE("(VkInstance instance = %p, VkMacOSSurfaceCreateInfoMVK* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3278 instance, pCreateInfo, pAllocator, pSurface);
3279
3280 return vk::MacOSSurfaceMVK::Create(pAllocator, pCreateInfo, pSurface);
3281 }
3282 #endif
3283
3284 #ifdef VK_USE_PLATFORM_METAL_EXT
vkCreateMetalSurfaceEXT(VkInstance instance,const VkMetalSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3285 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3286 {
3287 TRACE("(VkInstance instance = %p, VkMetalSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3288 instance, pCreateInfo, pAllocator, pSurface);
3289
3290 return vk::MetalSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
3291 }
3292 #endif
3293
3294 #ifdef VK_USE_PLATFORM_WIN32_KHR
vkCreateWin32SurfaceKHR(VkInstance instance,const VkWin32SurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3295 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3296 {
3297 TRACE("(VkInstance instance = %p, VkWin32SurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3298 instance, pCreateInfo, pAllocator, pSurface);
3299
3300 return vk::Win32SurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3301 }
3302
vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex)3303 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex)
3304 {
3305 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d)",
3306 physicalDevice, queueFamilyIndex);
3307 return VK_TRUE;
3308 }
3309 #endif
3310
3311 #ifndef __ANDROID__
vkDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)3312 VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator)
3313 {
3314 TRACE("(VkInstance instance = %p, VkSurfaceKHR surface = %p, const VkAllocationCallbacks* pAllocator = %p)",
3315 instance, static_cast<void *>(surface), pAllocator);
3316
3317 vk::destroy(surface, pAllocator);
3318 }
3319
vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported)3320 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32 *pSupported)
3321 {
3322 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, VkSurface surface = %p, VKBool32* pSupported = %p)",
3323 physicalDevice, int(queueFamilyIndex), static_cast<void *>(surface), pSupported);
3324
3325 *pSupported = VK_TRUE;
3326 return VK_SUCCESS;
3327 }
3328
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * pSurfaceCapabilities)3329 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities)
3330 {
3331 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities = %p)",
3332 physicalDevice, static_cast<void *>(surface), pSurfaceCapabilities);
3333
3334 vk::Cast(surface)->getSurfaceCapabilities(pSurfaceCapabilities);
3335 return VK_SUCCESS;
3336 }
3337
vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)3338 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pSurfaceFormatCount, VkSurfaceFormatKHR *pSurfaceFormats)
3339 {
3340 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p. uint32_t* pSurfaceFormatCount = %p, VkSurfaceFormatKHR* pSurfaceFormats = %p)",
3341 physicalDevice, static_cast<void *>(surface), pSurfaceFormatCount, pSurfaceFormats);
3342
3343 if(!pSurfaceFormats)
3344 {
3345 *pSurfaceFormatCount = vk::Cast(surface)->getSurfaceFormatsCount();
3346 return VK_SUCCESS;
3347 }
3348
3349 return vk::Cast(surface)->getSurfaceFormats(pSurfaceFormatCount, pSurfaceFormats);
3350 }
3351
vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)3352 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pPresentModeCount, VkPresentModeKHR *pPresentModes)
3353 {
3354 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p uint32_t* pPresentModeCount = %p, VkPresentModeKHR* pPresentModes = %p)",
3355 physicalDevice, static_cast<void *>(surface), pPresentModeCount, pPresentModes);
3356
3357 if(!pPresentModes)
3358 {
3359 *pPresentModeCount = vk::Cast(surface)->getPresentModeCount();
3360 return VK_SUCCESS;
3361 }
3362
3363 return vk::Cast(surface)->getPresentModes(pPresentModeCount, pPresentModes);
3364 }
3365
vkCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain)3366 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain)
3367 {
3368 TRACE("(VkDevice device = %p, const VkSwapchainCreateInfoKHR* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSwapchainKHR* pSwapchain = %p)",
3369 device, pCreateInfo, pAllocator, pSwapchain);
3370
3371 if(pCreateInfo->oldSwapchain)
3372 {
3373 vk::Cast(pCreateInfo->oldSwapchain)->retire();
3374 }
3375
3376 if(vk::Cast(pCreateInfo->surface)->hasAssociatedSwapchain())
3377 {
3378 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
3379 }
3380
3381 VkResult status = vk::SwapchainKHR::Create(pAllocator, pCreateInfo, pSwapchain);
3382
3383 if(status != VK_SUCCESS)
3384 {
3385 return status;
3386 }
3387
3388 auto swapchain = vk::Cast(*pSwapchain);
3389 status = swapchain->createImages(device, pCreateInfo);
3390
3391 if(status != VK_SUCCESS)
3392 {
3393 vk::destroy(*pSwapchain, pAllocator);
3394 return status;
3395 }
3396
3397 vk::Cast(pCreateInfo->surface)->associateSwapchain(swapchain);
3398
3399 return VK_SUCCESS;
3400 }
3401
vkDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)3402 VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator)
3403 {
3404 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, const VkAllocationCallbacks* pAllocator = %p)",
3405 device, static_cast<void *>(swapchain), pAllocator);
3406
3407 vk::destroy(swapchain, pAllocator);
3408 }
3409
vkGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages)3410 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages)
3411 {
3412 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint32_t* pSwapchainImageCount = %p, VkImage* pSwapchainImages = %p)",
3413 device, static_cast<void *>(swapchain), pSwapchainImageCount, pSwapchainImages);
3414
3415 if(!pSwapchainImages)
3416 {
3417 *pSwapchainImageCount = vk::Cast(swapchain)->getImageCount();
3418 return VK_SUCCESS;
3419 }
3420
3421 return vk::Cast(swapchain)->getImages(pSwapchainImageCount, pSwapchainImages);
3422 }
3423
vkAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)3424 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex)
3425 {
3426 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint64_t timeout = %d, VkSemaphore semaphore = %p, VkFence fence = %p, uint32_t* pImageIndex = %p)",
3427 device, static_cast<void *>(swapchain), int(timeout), static_cast<void *>(semaphore), static_cast<void *>(fence), pImageIndex);
3428
3429 return vk::Cast(swapchain)->getNextImage(timeout, vk::Cast(semaphore), vk::Cast(fence), pImageIndex);
3430 }
3431
vkQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo)3432 VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo)
3433 {
3434 TRACE("(VkQueue queue = %p, const VkPresentInfoKHR* pPresentInfo = %p)",
3435 queue, pPresentInfo);
3436
3437 return vk::Cast(queue)->present(pPresentInfo);
3438 }
3439
vkAcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)3440 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo, uint32_t *pImageIndex)
3441 {
3442 TRACE("(VkDevice device = %p, const VkAcquireNextImageInfoKHR *pAcquireInfo = %p, uint32_t *pImageIndex = %p",
3443 device, pAcquireInfo, pImageIndex);
3444
3445 return vk::Cast(pAcquireInfo->swapchain)->getNextImage(pAcquireInfo->timeout, vk::Cast(pAcquireInfo->semaphore), vk::Cast(pAcquireInfo->fence), pImageIndex);
3446 }
3447
vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device,VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities)3448 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities)
3449 {
3450 TRACE("(VkDevice device = %p, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities = %p)",
3451 device, pDeviceGroupPresentCapabilities);
3452
3453 for(int i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; i++)
3454 {
3455 // The only real physical device in the presentation group is device 0,
3456 // and it can present to itself.
3457 pDeviceGroupPresentCapabilities->presentMask[i] = (i == 0) ? 1 : 0;
3458 }
3459
3460 pDeviceGroupPresentCapabilities->modes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
3461
3462 return VK_SUCCESS;
3463 }
3464
vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device,VkSurfaceKHR surface,VkDeviceGroupPresentModeFlagsKHR * pModes)3465 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR *pModes)
3466 {
3467 TRACE("(VkDevice device = %p, VkSurfaceKHR surface = %p, VkDeviceGroupPresentModeFlagsKHR *pModes = %p)",
3468 device, static_cast<void *>(surface), pModes);
3469
3470 *pModes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
3471 return VK_SUCCESS;
3472 }
3473
vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pRectCount,VkRect2D * pRects)3474 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pRectCount, VkRect2D *pRects)
3475 {
3476 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, uint32_t* pRectCount = %p, VkRect2D* pRects = %p)",
3477 physicalDevice, static_cast<void *>(surface), pRectCount, pRects);
3478
3479 return vk::Cast(surface)->getPresentRectangles(pRectCount, pRects);
3480 }
3481
3482 #endif // ! __ANDROID__
3483
3484 #ifdef __ANDROID__
3485
vkGetSwapchainGrallocUsage2ANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)3486 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsage2ANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainUsage, uint64_t *grallocConsumerUsage, uint64_t *grallocProducerUsage)
3487 {
3488 TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, VkSwapchainImageUsageFlagsANDROID swapchainUsage = %d, uint64_t* grallocConsumerUsage = %p, uin64_t* grallocProducerUsage = %p)",
3489 device, format, imageUsage, swapchainUsage, grallocConsumerUsage, grallocProducerUsage);
3490
3491 *grallocConsumerUsage = 0;
3492 *grallocProducerUsage = GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN;
3493
3494 return VK_SUCCESS;
3495 }
3496
vkGetSwapchainGrallocUsageANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)3497 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsageANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, int *grallocUsage)
3498 {
3499 TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, int* grallocUsage = %p)",
3500 device, format, imageUsage, grallocUsage);
3501
3502 *grallocUsage = GRALLOC_USAGE_SW_WRITE_OFTEN;
3503
3504 return VK_SUCCESS;
3505 }
3506
vkAcquireImageANDROID(VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)3507 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireImageANDROID(VkDevice device, VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence)
3508 {
3509 TRACE("(VkDevice device = %p, VkImage image = %p, int nativeFenceFd = %d, VkSemaphore semaphore = %p, VkFence fence = %p)",
3510 device, static_cast<void *>(image), nativeFenceFd, static_cast<void *>(semaphore), static_cast<void *>(fence));
3511
3512 if(nativeFenceFd >= 0)
3513 {
3514 sync_wait(nativeFenceFd, -1);
3515 close(nativeFenceFd);
3516 }
3517
3518 if(fence != VK_NULL_HANDLE)
3519 {
3520 vk::Cast(fence)->complete();
3521 }
3522
3523 if(semaphore != VK_NULL_HANDLE)
3524 {
3525 vk::Cast(semaphore)->signal();
3526 }
3527
3528 return VK_SUCCESS;
3529 }
3530
vkQueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)3531 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSignalReleaseImageANDROID(VkQueue queue, uint32_t waitSemaphoreCount, const VkSemaphore *pWaitSemaphores, VkImage image, int *pNativeFenceFd)
3532 {
3533 TRACE("(VkQueue queue = %p, uint32_t waitSemaphoreCount = %d, const VkSemaphore* pWaitSemaphores = %p, VkImage image = %p, int* pNativeFenceFd = %p)",
3534 queue, waitSemaphoreCount, pWaitSemaphores, static_cast<void *>(image), pNativeFenceFd);
3535
3536 // This is a hack to deal with screen tearing for now.
3537 // Need to correctly implement threading using VkSemaphore
3538 // to get rid of it. b/132458423
3539 vkQueueWaitIdle(queue);
3540
3541 *pNativeFenceFd = -1;
3542
3543 return vk::Cast(image)->prepareForExternalUseANDROID();
3544 }
3545 #endif // __ANDROID__
3546 }
3547