1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "VkBuffer.hpp"
16 #include "VkBufferView.hpp"
17 #include "VkCommandBuffer.hpp"
18 #include "VkCommandPool.hpp"
19 #include "VkConfig.hpp"
20 #include "VkDebugUtilsMessenger.hpp"
21 #include "VkDescriptorPool.hpp"
22 #include "VkDescriptorSetLayout.hpp"
23 #include "VkDescriptorUpdateTemplate.hpp"
24 #include "VkDestroy.hpp"
25 #include "VkDevice.hpp"
26 #include "VkDeviceMemory.hpp"
27 #include "VkEvent.hpp"
28 #include "VkFence.hpp"
29 #include "VkFramebuffer.hpp"
30 #include "VkGetProcAddress.hpp"
31 #include "VkImage.hpp"
32 #include "VkImageView.hpp"
33 #include "VkInstance.hpp"
34 #include "VkPhysicalDevice.hpp"
35 #include "VkPipeline.hpp"
36 #include "VkPipelineCache.hpp"
37 #include "VkPipelineLayout.hpp"
38 #include "VkQueryPool.hpp"
39 #include "VkQueue.hpp"
40 #include "VkRenderPass.hpp"
41 #include "VkSampler.hpp"
42 #include "VkSemaphore.hpp"
43 #include "VkShaderModule.hpp"
44 #include "VkStringify.hpp"
45 #include "VkStructConversion.hpp"
46 #include "VkTimelineSemaphore.hpp"
47
48 #include "Reactor/Nucleus.hpp"
49 #include "System/CPUID.hpp"
50 #include "System/Debug.hpp"
51 #include "System/SwiftConfig.hpp"
52 #include "WSI/HeadlessSurfaceKHR.hpp"
53 #include "WSI/VkSwapchainKHR.hpp"
54
55 #if defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_MACOS_MVK)
56 # include "WSI/MetalSurface.hpp"
57 #endif
58
59 #ifdef VK_USE_PLATFORM_XCB_KHR
60 # include "WSI/XcbSurfaceKHR.hpp"
61 #endif
62
63 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
64 # include "WSI/WaylandSurfaceKHR.hpp"
65 #endif
66
67 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
68 # include "WSI/DirectFBSurfaceEXT.hpp"
69 #endif
70
71 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
72 # include "WSI/DisplaySurfaceKHR.hpp"
73 #endif
74
75 #ifdef VK_USE_PLATFORM_WIN32_KHR
76 # include "WSI/Win32SurfaceKHR.hpp"
77 #endif
78
79 #include "marl/mutex.h"
80 #include "marl/scheduler.h"
81 #include "marl/thread.h"
82 #include "marl/tsa.h"
83
84 #ifdef __ANDROID__
85 # include <unistd.h>
86
87 # include "commit.h"
88 # include <android/log.h>
89 # include <hardware/gralloc.h>
90 # include <hardware/gralloc1.h>
91 # include <sync/sync.h>
92 # ifdef SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
93 # include "VkDeviceMemoryExternalAndroid.hpp"
94 # endif
95 #endif
96
97 #include <algorithm>
98 #include <cinttypes>
99 #include <cmath>
100 #include <cstring>
101 #include <functional>
102 #include <map>
103 #include <string>
104
105 namespace {
106
107 // Enable commit_id.py and #include commit.h for other platforms.
108 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
logBuildVersionInformation()109 void logBuildVersionInformation()
110 {
111 // TODO(b/144093703): Don't call __android_log_print() directly
112 __android_log_print(ANDROID_LOG_INFO, "SwiftShader", "SwiftShader Version: %s", SWIFTSHADER_VERSION_STRING);
113 }
114 #endif // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
115
getOrCreateScheduler()116 std::shared_ptr<marl::Scheduler> getOrCreateScheduler()
117 {
118 struct Scheduler
119 {
120 marl::mutex mutex;
121 std::weak_ptr<marl::Scheduler> weakptr GUARDED_BY(mutex);
122 };
123
124 static Scheduler scheduler; // TODO(b/208256248): Avoid exit-time destructor.
125
126 marl::lock lock(scheduler.mutex);
127 auto sptr = scheduler.weakptr.lock();
128 if(!sptr)
129 {
130 const sw::Configuration &config = sw::getConfiguration();
131 marl::Scheduler::Config cfg = sw::getSchedulerConfiguration(config);
132 sptr = std::make_shared<marl::Scheduler>(cfg);
133 scheduler.weakptr = sptr;
134 }
135 return sptr;
136 }
137
138 // initializeLibrary() is called by vkCreateInstance() to perform one-off global
139 // initialization of the swiftshader driver.
initializeLibrary()140 void initializeLibrary()
141 {
142 static bool doOnce = [] {
143 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
144 logBuildVersionInformation();
145 #endif // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
146 return true;
147 }();
148 (void)doOnce;
149 }
150
151 template<class T>
ValidateRenderPassPNextChain(VkDevice device,const T * pCreateInfo)152 void ValidateRenderPassPNextChain(VkDevice device, const T *pCreateInfo)
153 {
154 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
155
156 while(extensionCreateInfo)
157 {
158 switch(extensionCreateInfo->sType)
159 {
160 case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
161 {
162 const VkRenderPassInputAttachmentAspectCreateInfo *inputAttachmentAspectCreateInfo = reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>(extensionCreateInfo);
163
164 for(uint32_t i = 0; i < inputAttachmentAspectCreateInfo->aspectReferenceCount; i++)
165 {
166 const auto &aspectReference = inputAttachmentAspectCreateInfo->pAspectReferences[i];
167 ASSERT(aspectReference.subpass < pCreateInfo->subpassCount);
168 const auto &subpassDescription = pCreateInfo->pSubpasses[aspectReference.subpass];
169 ASSERT(aspectReference.inputAttachmentIndex < subpassDescription.inputAttachmentCount);
170 const auto &attachmentReference = subpassDescription.pInputAttachments[aspectReference.inputAttachmentIndex];
171 if(attachmentReference.attachment != VK_ATTACHMENT_UNUSED)
172 {
173 // If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, for any
174 // element of the pInputAttachments member of any element of pSubpasses where the attachment member
175 // is not VK_ATTACHMENT_UNUSED, the aspectMask member of the corresponding element of
176 // VkRenderPassInputAttachmentAspectCreateInfo::pAspectReferences must only include aspects that are
177 // present in images of the format specified by the element of pAttachments at attachment
178 vk::Format format(pCreateInfo->pAttachments[attachmentReference.attachment].format);
179 bool isDepth = format.isDepth();
180 bool isStencil = format.isStencil();
181 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) || (!isDepth && !isStencil));
182 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) || isDepth);
183 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) || isStencil);
184 }
185 }
186 }
187 break;
188 case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
189 {
190 const VkRenderPassMultiviewCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>(extensionCreateInfo);
191 ASSERT((multiviewCreateInfo->subpassCount == 0) || (multiviewCreateInfo->subpassCount == pCreateInfo->subpassCount));
192 ASSERT((multiviewCreateInfo->dependencyCount == 0) || (multiviewCreateInfo->dependencyCount == pCreateInfo->dependencyCount));
193
194 bool zeroMask = (multiviewCreateInfo->pViewMasks[0] == 0);
195 for(uint32_t i = 1; i < multiviewCreateInfo->subpassCount; i++)
196 {
197 ASSERT((multiviewCreateInfo->pViewMasks[i] == 0) == zeroMask);
198 }
199
200 if(zeroMask)
201 {
202 ASSERT(multiviewCreateInfo->correlationMaskCount == 0);
203 }
204
205 for(uint32_t i = 0; i < multiviewCreateInfo->dependencyCount; i++)
206 {
207 const auto &dependency = pCreateInfo->pDependencies[i];
208 if(multiviewCreateInfo->pViewOffsets[i] != 0)
209 {
210 ASSERT(dependency.srcSubpass != dependency.dstSubpass);
211 ASSERT(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT);
212 }
213 if(zeroMask)
214 {
215 ASSERT(!(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT));
216 }
217 }
218
219 // If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo,
220 // each element of its pViewMask member must not include a bit at a position
221 // greater than the value of VkPhysicalDeviceLimits::maxFramebufferLayers
222 // pViewMask is a 32 bit value. If maxFramebufferLayers > 32, it's impossible
223 // for pViewMask to contain a bit at an illegal position
224 // Note: Verify pViewMask values instead if we hit this assert
225 ASSERT(vk::Cast(device)->getPhysicalDevice()->getProperties().limits.maxFramebufferLayers >= 32);
226 }
227 break;
228 case VK_STRUCTURE_TYPE_MAX_ENUM:
229 // dEQP tests that this value is ignored.
230 break;
231 default:
232 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
233 break;
234 }
235
236 extensionCreateInfo = extensionCreateInfo->pNext;
237 }
238 }
239
240 // This variable will be set to the negotiated ICD interface version negotiated with the loader.
241 // It defaults to 1 because if vk_icdNegotiateLoaderICDInterfaceVersion is never called it means
242 // that the loader doens't support version 2 of that interface.
243 uint32_t sICDInterfaceVersion = 1;
244 // Whether any vk_icd* entrypoints were used. This is used to distinguish between applications that
245 // use the Vulkan loader to load Swiftshader (in which case vk_icd functions are called), and
246 // applications that load Swiftshader and grab vkGetInstanceProcAddr directly.
247 bool sICDEntryPointsUsed = false;
248
249 } // namespace
250
251 extern "C" {
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)252 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
253 {
254 TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
255 sICDEntryPointsUsed = true;
256
257 return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
258 }
259
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)260 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
261 {
262 sICDEntryPointsUsed = true;
263
264 sICDInterfaceVersion = std::min(*pSupportedVersion, 7u);
265 *pSupportedVersion = sICDInterfaceVersion;
266 return VK_SUCCESS;
267 }
268
vk_icdGetPhysicalDeviceProcAddr(VkInstance instance,const char * pName)269 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char *pName)
270 {
271 sICDEntryPointsUsed = true;
272 return vk::GetPhysicalDeviceProcAddr(vk::Cast(instance), pName);
273 }
274
275 #if VK_USE_PLATFORM_WIN32_KHR
276
vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance,LUID adapterLUID,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)277 VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices)
278 {
279 sICDEntryPointsUsed = true;
280 if(!pPhysicalDevices)
281 {
282 *pPhysicalDeviceCount = 0;
283 }
284
285 return VK_SUCCESS;
286 }
287
288 #endif // VK_USE_PLATFORM_WIN32_KHR
289
290 #if VK_USE_PLATFORM_FUCHSIA
291
292 // This symbol must be exported by a Fuchsia Vulkan ICD. The Vulkan loader will
293 // call it, passing the address of a global function pointer that can later be
294 // used at runtime to connect to Fuchsia FIDL services, as required by certain
295 // extensions. See https://fxbug.dev/13095 for more details.
296 //
297 // NOTE: This entry point has not been upstreamed to Khronos yet, which reserves
298 // all symbols starting with vk_icd. See https://fxbug.dev/13074 which
299 // tracks upstreaming progress.
vk_icdInitializeConnectToServiceCallback(PFN_vkConnectToService callback)300 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdInitializeConnectToServiceCallback(
301 PFN_vkConnectToService callback)
302 {
303 TRACE("(callback = %p)", callback);
304 sICDEntryPointsUsed = true;
305 vk::icdFuchsiaServiceConnectCallback = callback;
306 return VK_SUCCESS;
307 }
308
309 #endif // VK_USE_PLATFORM_FUCHSIA
310
311 struct ExtensionProperties : public VkExtensionProperties
312 {
__anona1f0d6e80302ExtensionProperties313 std::function<bool()> isSupported = [] { return true; };
314 };
315
316 // TODO(b/208256248): Avoid exit-time destructor.
317 static const ExtensionProperties instanceExtensionProperties[] = {
318 { { VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION } },
319 { { VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION } },
320 { { VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION } },
321 { { VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION } },
322 { { VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION } },
323 { { VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION } },
324 { { VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, VK_EXT_HEADLESS_SURFACE_SPEC_VERSION } },
325 #ifndef __ANDROID__
326 { { VK_KHR_SURFACE_EXTENSION_NAME, VK_KHR_SURFACE_SPEC_VERSION } },
327 { { VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME, VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION } },
328 { { VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME, VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION } },
329 #endif
330 #ifdef VK_USE_PLATFORM_XCB_KHR
__anona1f0d6e80402null331 { { VK_KHR_XCB_SURFACE_EXTENSION_NAME, VK_KHR_XCB_SURFACE_SPEC_VERSION }, [] { return vk::XcbSurfaceKHR::isSupported(); } },
332 #endif
333 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
__anona1f0d6e80502null334 { { VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, VK_KHR_WAYLAND_SURFACE_SPEC_VERSION }, [] { return vk::WaylandSurfaceKHR::isSupported(); } },
335 #endif
336 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
337 { { VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME, VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION } },
338 #endif
339 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
340 { { VK_KHR_DISPLAY_EXTENSION_NAME, VK_KHR_DISPLAY_SPEC_VERSION } },
341 #endif
342 #ifdef VK_USE_PLATFORM_MACOS_MVK
343 { { VK_MVK_MACOS_SURFACE_EXTENSION_NAME, VK_MVK_MACOS_SURFACE_SPEC_VERSION } },
344 #endif
345 #ifdef VK_USE_PLATFORM_METAL_EXT
346 { { VK_EXT_METAL_SURFACE_EXTENSION_NAME, VK_EXT_METAL_SURFACE_SPEC_VERSION } },
347 #endif
348 #ifdef VK_USE_PLATFORM_WIN32_KHR
349 { { VK_KHR_WIN32_SURFACE_EXTENSION_NAME, VK_KHR_WIN32_SURFACE_SPEC_VERSION } },
350 #endif
351 };
352
353 // TODO(b/208256248): Avoid exit-time destructor.
354 static const ExtensionProperties deviceExtensionProperties[] = {
355 { { VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION } },
356 // Vulkan 1.1 promoted extensions
357 { { VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, VK_KHR_BIND_MEMORY_2_SPEC_VERSION } },
358 { { VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION } },
359 { { VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION } },
360 { { VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION } },
361 { { VK_KHR_DEVICE_GROUP_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_SPEC_VERSION } },
362 { { VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_SPEC_VERSION } },
363 { { VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION } },
364 { { VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
365 { { VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION } },
366 { { VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_KHR_MAINTENANCE1_SPEC_VERSION } },
367 { { VK_KHR_MAINTENANCE2_EXTENSION_NAME, VK_KHR_MAINTENANCE2_SPEC_VERSION } },
368 { { VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_KHR_MAINTENANCE3_SPEC_VERSION } },
369 { { VK_KHR_MULTIVIEW_EXTENSION_NAME, VK_KHR_MULTIVIEW_SPEC_VERSION } },
370 { { VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME, VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION } },
371 { { VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION } },
372 { { VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION } },
373 // Only 1.1 core version of this is supported. The extension has additional requirements
374 //{{ VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION }},
375 { { VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION } },
376 // Only 1.1 core version of this is supported. The extension has additional requirements
377 //{{ VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, VK_KHR_VARIABLE_POINTERS_SPEC_VERSION }},
378 { { VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION } },
379 #ifndef __ANDROID__
380 // We fully support the KHR_swapchain v70 additions, so just track the spec version.
381 { { VK_KHR_SWAPCHAIN_EXTENSION_NAME, VK_KHR_SWAPCHAIN_SPEC_VERSION } },
382 #else
383 // We only support V7 of this extension. Missing functionality: in V8,
384 // it becomes possible to pass a VkNativeBufferANDROID structure to
385 // vkBindImageMemory2. Android's swapchain implementation does this in
386 // order to support passing VkBindImageMemorySwapchainInfoKHR
387 // (from KHR_swapchain v70) to vkBindImageMemory2.
388 { { VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME, 7 } },
389 #endif
390 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
391 { { VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION } },
392 #endif
393 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
394 { { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION } },
395 #endif
396 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
397 { { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION } },
398 #endif
399 #if !defined(__APPLE__)
400 { { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION } },
401 #endif
402 #if VK_USE_PLATFORM_FUCHSIA
403 { { VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
404 { { VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION } },
405 #endif
406 { { VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, VK_EXT_PROVOKING_VERTEX_SPEC_VERSION } },
407 { { VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION } },
408 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
409 { { VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME, VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION } },
410 #endif // SWIFTSHADER_DEVICE_MEMORY_REPORT
411 // Vulkan 1.2 promoted extensions
412 { { VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, VK_EXT_HOST_QUERY_RESET_SPEC_VERSION } },
413 { { VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME, VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION } },
414 { { VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME, VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION } },
415 { { VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION } },
416 { { VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION } },
417 { { VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME, VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION } },
418 { { VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME, VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION } },
419 { { VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION } },
420 { { VK_KHR_SPIRV_1_4_EXTENSION_NAME, VK_KHR_SPIRV_1_4_SPEC_VERSION } },
421 { { VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION } },
422 { { VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION } },
423 // Vulkan 1.3 promoted extensions
424 { { VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME, VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION } },
425 { { VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME, VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION } },
426 { { VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME, VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION } },
427 { { VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME, VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION } },
428 { { VK_EXT_PRIVATE_DATA_EXTENSION_NAME, VK_EXT_PRIVATE_DATA_SPEC_VERSION } },
429 { { VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME, VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION } },
430 { { VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME, VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION } },
431 { { VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME, VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION } },
432 { { VK_EXT_TOOLING_INFO_EXTENSION_NAME, VK_EXT_TOOLING_INFO_SPEC_VERSION } },
433 { { VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME, VK_KHR_COPY_COMMANDS_2_SPEC_VERSION } },
434 { { VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME, VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION } },
435 { { VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME, VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION } },
436 { { VK_KHR_MAINTENANCE_4_EXTENSION_NAME, VK_KHR_MAINTENANCE_4_SPEC_VERSION } },
437 { { VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME, VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION } },
438 { { VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME, VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION } },
439 { { VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME, VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION } },
440 { { VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME, VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION } },
441 // Roadmap 2022 extension
442 { { VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME, VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION } },
443 // Additional extension
444 { { VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME, VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION } },
445 { { VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME, VK_GOOGLE_DECORATE_STRING_SPEC_VERSION } },
446 { { VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME, VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION } },
447 { { VK_GOOGLE_USER_TYPE_EXTENSION_NAME, VK_GOOGLE_USER_TYPE_SPEC_VERSION } },
448 { { VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME, VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION } },
449 { { VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION } },
450 { { VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME, VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION } },
451 { { VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME, VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION } },
452 #ifndef __ANDROID__
453 { { VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME, VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION } },
454 #endif
455 { { VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME, VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION } },
456 { { VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME, VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION } },
457 { { VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME, VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION } },
458 { { VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION } },
459 { { VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME, VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION } },
460 // The following extension is only used to add support for Bresenham lines
461 { { VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, VK_EXT_LINE_RASTERIZATION_SPEC_VERSION } },
462 // The following extension is used by ANGLE to emulate blitting the stencil buffer
463 { { VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION } },
464 { { VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME, VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION } },
465 // Useful for D3D emulation
466 { { VK_EXT_4444_FORMATS_EXTENSION_NAME, VK_EXT_4444_FORMATS_SPEC_VERSION } },
467 // Used by ANGLE to support GL_KHR_blend_equation_advanced
468 { { VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION } },
469 // Used by ANGLE to implement triangle/etc list restarts as possible in OpenGL
470 { { VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME, VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION } },
471 { { VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME, VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION } },
472 { { VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME, VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION } },
473 { { VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME, VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION } },
474 { { VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME, VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION } },
475 };
476
numSupportedExtensions(const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)477 static uint32_t numSupportedExtensions(const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
478 {
479 uint32_t count = 0;
480
481 for(uint32_t i = 0; i < extensionPropertiesCount; i++)
482 {
483 if(extensionProperties[i].isSupported())
484 {
485 count++;
486 }
487 }
488
489 return count;
490 }
491
numInstanceSupportedExtensions()492 static uint32_t numInstanceSupportedExtensions()
493 {
494 return numSupportedExtensions(instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
495 }
496
numDeviceSupportedExtensions()497 static uint32_t numDeviceSupportedExtensions()
498 {
499 return numSupportedExtensions(deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
500 }
501
hasExtension(const char * extensionName,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)502 static bool hasExtension(const char *extensionName, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
503 {
504 for(uint32_t i = 0; i < extensionPropertiesCount; i++)
505 {
506 if(strcmp(extensionName, extensionProperties[i].extensionName) == 0)
507 {
508 return extensionProperties[i].isSupported();
509 }
510 }
511
512 return false;
513 }
514
hasInstanceExtension(const char * extensionName)515 static bool hasInstanceExtension(const char *extensionName)
516 {
517 return hasExtension(extensionName, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
518 }
519
hasDeviceExtension(const char * extensionName)520 static bool hasDeviceExtension(const char *extensionName)
521 {
522 return hasExtension(extensionName, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
523 }
524
copyExtensions(VkExtensionProperties * pProperties,uint32_t toCopy,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)525 static void copyExtensions(VkExtensionProperties *pProperties, uint32_t toCopy, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
526 {
527 for(uint32_t i = 0, j = 0; i < toCopy; i++, j++)
528 {
529 while((j < extensionPropertiesCount) && !extensionProperties[j].isSupported())
530 {
531 j++;
532 }
533 if(j < extensionPropertiesCount)
534 {
535 pProperties[i] = extensionProperties[j];
536 }
537 }
538 }
539
copyInstanceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)540 static void copyInstanceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
541 {
542 copyExtensions(pProperties, toCopy, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
543 }
544
copyDeviceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)545 static void copyDeviceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
546 {
547 copyExtensions(pProperties, toCopy, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
548 }
549
vkCreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)550 VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance)
551 {
552 TRACE("(const VkInstanceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkInstance* pInstance = %p)",
553 pCreateInfo, pAllocator, pInstance);
554
555 initializeLibrary();
556
557 // ICD interface rule for version 5 of the interface:
558 // - If the loader supports version 4 or lower, the driver must fail with
559 // VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with apiVersion
560 // set to > Vulkan 1.0
561 // - If the loader supports version 5 or above, the loader must fail with
562 // VK_ERROR_INCOMPATIBLE_DRIVER if it can't handle the apiVersion, and drivers
563 // should fail with VK_ERROR_INCOMPATIBLE_DRIVER only if they can not support the
564 // specified apiVersion.
565 if(pCreateInfo->pApplicationInfo)
566 {
567 uint32_t appApiVersion = pCreateInfo->pApplicationInfo->apiVersion;
568 if(sICDEntryPointsUsed && sICDInterfaceVersion <= 4)
569 {
570 // Any version above 1.0 is an error.
571 if(VK_API_VERSION_MAJOR(appApiVersion) != 1 || VK_API_VERSION_MINOR(appApiVersion) != 0)
572 {
573 return VK_ERROR_INCOMPATIBLE_DRIVER;
574 }
575 }
576 else
577 {
578 if(VK_API_VERSION_MAJOR(appApiVersion) > VK_API_VERSION_MINOR(vk::API_VERSION))
579 {
580 return VK_ERROR_INCOMPATIBLE_DRIVER;
581 }
582 if((VK_API_VERSION_MAJOR(appApiVersion) == VK_API_VERSION_MINOR(vk::API_VERSION)) &&
583 VK_API_VERSION_MINOR(appApiVersion) > VK_API_VERSION_MINOR(vk::API_VERSION))
584 {
585 return VK_ERROR_INCOMPATIBLE_DRIVER;
586 }
587 }
588 }
589
590 if(pCreateInfo->flags != 0)
591 {
592 // Vulkan 1.3: "flags is reserved for future use." "flags must be 0"
593 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
594 }
595
596 if(pCreateInfo->enabledLayerCount != 0)
597 {
598 // Creating instances with unsupported layers should fail and SwiftShader doesn't support any layer
599 return VK_ERROR_LAYER_NOT_PRESENT;
600 }
601
602 for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
603 {
604 if(!hasInstanceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
605 {
606 return VK_ERROR_EXTENSION_NOT_PRESENT;
607 }
608 }
609
610 VkDebugUtilsMessengerEXT messenger = { VK_NULL_HANDLE };
611 if(pCreateInfo->pNext)
612 {
613 const VkBaseInStructure *createInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
614 switch(createInfo->sType)
615 {
616 case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
617 {
618 const VkDebugUtilsMessengerCreateInfoEXT *debugUtilsMessengerCreateInfoEXT = reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>(createInfo);
619 VkResult result = vk::DebugUtilsMessenger::Create(pAllocator, debugUtilsMessengerCreateInfoEXT, &messenger);
620 if(result != VK_SUCCESS)
621 {
622 return result;
623 }
624 }
625 break;
626 case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
627 // According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
628 // "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
629 // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
630 // internal use by the loader, and do not have corresponding
631 // Vulkan structures in this Specification."
632 break;
633 case VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG:
634 // TODO(b/229112690): This structure is only meant to be used by the Vulkan Loader
635 // and should not be forwarded to the driver.
636 break;
637 default:
638 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(createInfo->sType).c_str());
639 break;
640 }
641 }
642
643 *pInstance = VK_NULL_HANDLE;
644 VkPhysicalDevice physicalDevice = VK_NULL_HANDLE;
645
646 VkResult result = vk::DispatchablePhysicalDevice::Create(pAllocator, pCreateInfo, &physicalDevice);
647 if(result != VK_SUCCESS)
648 {
649 vk::destroy(messenger, pAllocator);
650 return result;
651 }
652
653 result = vk::DispatchableInstance::Create(pAllocator, pCreateInfo, pInstance, physicalDevice, vk::Cast(messenger));
654 if(result != VK_SUCCESS)
655 {
656 vk::destroy(messenger, pAllocator);
657 vk::destroy(physicalDevice, pAllocator);
658 return result;
659 }
660
661 return result;
662 }
663
vkDestroyInstance(VkInstance instance,const VkAllocationCallbacks * pAllocator)664 VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator)
665 {
666 TRACE("(VkInstance instance = %p, const VkAllocationCallbacks* pAllocator = %p)", instance, pAllocator);
667
668 vk::destroy(instance, pAllocator);
669 }
670
vkEnumeratePhysicalDevices(VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)671 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices)
672 {
673 TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceCount = %p, VkPhysicalDevice* pPhysicalDevices = %p)",
674 instance, pPhysicalDeviceCount, pPhysicalDevices);
675
676 return vk::Cast(instance)->getPhysicalDevices(pPhysicalDeviceCount, pPhysicalDevices);
677 }
678
vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)679 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures)
680 {
681 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures* pFeatures = %p)",
682 physicalDevice, pFeatures);
683
684 *pFeatures = vk::Cast(physicalDevice)->getFeatures();
685 }
686
vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)687 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties)
688 {
689 TRACE("GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties* pFormatProperties = %p)",
690 physicalDevice, (int)format, pFormatProperties);
691
692 vk::PhysicalDevice::GetFormatProperties(format, pFormatProperties);
693 }
694
vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)695 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties)
696 {
697 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkImageTiling tiling = %d, VkImageUsageFlags usage = %d, VkImageCreateFlags flags = %d, VkImageFormatProperties* pImageFormatProperties = %p)",
698 physicalDevice, (int)format, (int)type, (int)tiling, usage, flags, pImageFormatProperties);
699
700 VkPhysicalDeviceImageFormatInfo2 info2 = {};
701 info2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
702 info2.pNext = nullptr;
703 info2.format = format;
704 info2.type = type;
705 info2.tiling = tiling;
706 info2.usage = usage;
707 info2.flags = flags;
708
709 VkImageFormatProperties2 properties2 = {};
710 properties2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
711 properties2.pNext = nullptr;
712
713 VkResult result = vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, &info2, &properties2);
714
715 *pImageFormatProperties = properties2.imageFormatProperties;
716
717 return result;
718 }
719
vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)720 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties)
721 {
722 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties* pProperties = %p)",
723 physicalDevice, pProperties);
724
725 *pProperties = vk::Cast(physicalDevice)->getProperties();
726 }
727
vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)728 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties)
729 {
730 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties* pQueueFamilyProperties = %p))", physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
731
732 if(!pQueueFamilyProperties)
733 {
734 *pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
735 }
736 else
737 {
738 vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
739 }
740 }
741
vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)742 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties)
743 {
744 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
745
746 *pMemoryProperties = vk::PhysicalDevice::GetMemoryProperties();
747 }
748
vkGetInstanceProcAddr(VkInstance instance,const char * pName)749 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *pName)
750 {
751 TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
752
753 return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
754 }
755
vkGetDeviceProcAddr(VkDevice device,const char * pName)756 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char *pName)
757 {
758 TRACE("(VkDevice device = %p, const char* pName = %p)", device, pName);
759
760 return vk::GetDeviceProcAddr(vk::Cast(device), pName);
761 }
762
vkCreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)763 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice)
764 {
765 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkDeviceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDevice* pDevice = %p)",
766 physicalDevice, pCreateInfo, pAllocator, pDevice);
767
768 if(pCreateInfo->flags != 0)
769 {
770 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
771 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
772 }
773
774 if(pCreateInfo->enabledLayerCount != 0)
775 {
776 // "The ppEnabledLayerNames and enabledLayerCount members of VkDeviceCreateInfo are deprecated and their values must be ignored by implementations."
777 UNSUPPORTED("pCreateInfo->enabledLayerCount != 0");
778 }
779
780 for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
781 {
782 if(!hasDeviceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
783 {
784 return VK_ERROR_EXTENSION_NOT_PRESENT;
785 }
786 }
787
788 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
789
790 const VkPhysicalDeviceFeatures *enabledFeatures = pCreateInfo->pEnabledFeatures;
791
792 while(extensionCreateInfo)
793 {
794 switch(extensionCreateInfo->sType)
795 {
796 case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
797 // According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
798 // "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
799 // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
800 // internal use by the loader, and do not have corresponding
801 // Vulkan structures in this Specification."
802 break;
803 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
804 {
805 ASSERT(!pCreateInfo->pEnabledFeatures); // "If the pNext chain includes a VkPhysicalDeviceFeatures2 structure, then pEnabledFeatures must be NULL"
806
807 const VkPhysicalDeviceFeatures2 *physicalDeviceFeatures2 = reinterpret_cast<const VkPhysicalDeviceFeatures2 *>(extensionCreateInfo);
808
809 enabledFeatures = &physicalDeviceFeatures2->features;
810 }
811 break;
812 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
813 {
814 const VkPhysicalDeviceSamplerYcbcrConversionFeatures *samplerYcbcrConversionFeatures = reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(extensionCreateInfo);
815
816 // YCbCr conversion is supported.
817 // samplerYcbcrConversionFeatures->samplerYcbcrConversion can be VK_TRUE or VK_FALSE.
818 // No action needs to be taken on our end in either case; it's the apps responsibility that
819 // "To create a sampler Y'CbCr conversion, the samplerYcbcrConversion feature must be enabled."
820 (void)samplerYcbcrConversionFeatures->samplerYcbcrConversion;
821 }
822 break;
823 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
824 {
825 const VkPhysicalDevice16BitStorageFeatures *storage16BitFeatures = reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>(extensionCreateInfo);
826
827 if(storage16BitFeatures->storageBuffer16BitAccess != VK_FALSE ||
828 storage16BitFeatures->uniformAndStorageBuffer16BitAccess != VK_FALSE ||
829 storage16BitFeatures->storagePushConstant16 != VK_FALSE ||
830 storage16BitFeatures->storageInputOutput16 != VK_FALSE)
831 {
832 return VK_ERROR_FEATURE_NOT_PRESENT;
833 }
834 }
835 break;
836 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES:
837 {
838 const VkPhysicalDeviceVariablePointerFeatures *variablePointerFeatures = reinterpret_cast<const VkPhysicalDeviceVariablePointerFeatures *>(extensionCreateInfo);
839
840 if(variablePointerFeatures->variablePointersStorageBuffer != VK_FALSE ||
841 variablePointerFeatures->variablePointers != VK_FALSE)
842 {
843 return VK_ERROR_FEATURE_NOT_PRESENT;
844 }
845 }
846 break;
847 case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
848 {
849 const VkDeviceGroupDeviceCreateInfo *groupDeviceCreateInfo = reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>(extensionCreateInfo);
850
851 if((groupDeviceCreateInfo->physicalDeviceCount != 1) ||
852 (groupDeviceCreateInfo->pPhysicalDevices[0] != physicalDevice))
853 {
854 return VK_ERROR_FEATURE_NOT_PRESENT;
855 }
856 }
857 break;
858 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
859 {
860 const VkPhysicalDeviceMultiviewFeatures *multiviewFeatures = reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>(extensionCreateInfo);
861
862 if(multiviewFeatures->multiviewGeometryShader ||
863 multiviewFeatures->multiviewTessellationShader)
864 {
865 return VK_ERROR_FEATURE_NOT_PRESENT;
866 }
867 }
868 break;
869 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
870 {
871 const VkPhysicalDeviceShaderDrawParametersFeatures *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>(extensionCreateInfo);
872
873 if(shaderDrawParametersFeatures->shaderDrawParameters)
874 {
875 return VK_ERROR_FEATURE_NOT_PRESENT;
876 }
877 }
878 break;
879 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES:
880 {
881 const VkPhysicalDeviceDynamicRenderingFeatures *dynamicRenderingFeatures = reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures *>(extensionCreateInfo);
882
883 // Dynamic rendering is supported
884 (void)(dynamicRenderingFeatures->dynamicRendering);
885 }
886 break;
887 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES:
888 {
889 const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(extensionCreateInfo);
890
891 // Separate depth and stencil layouts is already supported
892 (void)(shaderDrawParametersFeatures->separateDepthStencilLayouts);
893 }
894 break;
895 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
896 {
897 const auto *lineRasterizationFeatures = reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>(extensionCreateInfo);
898 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(lineRasterizationFeatures);
899 if(!hasFeatures)
900 {
901 return VK_ERROR_FEATURE_NOT_PRESENT;
902 }
903 }
904 break;
905 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT:
906 {
907 const VkPhysicalDeviceProvokingVertexFeaturesEXT *provokingVertexFeatures = reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>(extensionCreateInfo);
908 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(provokingVertexFeatures);
909 if(!hasFeatures)
910 {
911 return VK_ERROR_FEATURE_NOT_PRESENT;
912 }
913 }
914 break;
915 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES:
916 {
917 const VkPhysicalDeviceImageRobustnessFeatures *imageRobustnessFeatures = reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures *>(extensionCreateInfo);
918
919 // We currently always provide robust image accesses. When the feature is disabled, results are
920 // undefined (for images with Dim != Buffer), so providing robustness is also acceptable.
921 // TODO(b/159329067): Only provide robustness when requested.
922 (void)imageRobustnessFeatures->robustImageAccess;
923 }
924 break;
925 // For unsupported structures, check that we don't expose the corresponding extension string:
926 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT:
927 ASSERT(!hasDeviceExtension(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME));
928 break;
929 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES:
930 {
931 const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *imagelessFramebufferFeatures = reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(extensionCreateInfo);
932 // Always provide Imageless Framebuffers
933 (void)imagelessFramebufferFeatures->imagelessFramebuffer;
934 }
935 break;
936 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES:
937 {
938 const VkPhysicalDeviceScalarBlockLayoutFeatures *scalarBlockLayoutFeatures = reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>(extensionCreateInfo);
939
940 // VK_EXT_scalar_block_layout is supported, allowing C-like structure layout for SPIR-V blocks.
941 (void)scalarBlockLayoutFeatures->scalarBlockLayout;
942 }
943 break;
944 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
945 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT:
946 {
947 const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *deviceMemoryReportFeatures = reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>(extensionCreateInfo);
948 (void)deviceMemoryReportFeatures->deviceMemoryReport;
949 }
950 break;
951 #endif // SWIFTSHADER_DEVICE_MEMORY_REPORT
952 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES:
953 {
954 const VkPhysicalDeviceHostQueryResetFeatures *hostQueryResetFeatures = reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>(extensionCreateInfo);
955
956 // VK_EXT_host_query_reset is always enabled.
957 (void)hostQueryResetFeatures->hostQueryReset;
958 }
959 break;
960 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES:
961 {
962 const VkPhysicalDevicePipelineCreationCacheControlFeatures *pipelineCreationCacheControlFeatures = reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures *>(extensionCreateInfo);
963
964 // VK_EXT_pipeline_creation_cache_control is always enabled.
965 (void)pipelineCreationCacheControlFeatures->pipelineCreationCacheControl;
966 }
967 break;
968 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES:
969 {
970 const auto *tsFeatures = reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>(extensionCreateInfo);
971
972 // VK_KHR_timeline_semaphores is always enabled
973 (void)tsFeatures->timelineSemaphore;
974 }
975 break;
976 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT:
977 {
978 const auto *customBorderColorFeatures = reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT *>(extensionCreateInfo);
979
980 // VK_EXT_custom_border_color is always enabled
981 (void)customBorderColorFeatures->customBorderColors;
982 (void)customBorderColorFeatures->customBorderColorWithoutFormat;
983 }
984 break;
985 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
986 {
987 const auto *vk11Features = reinterpret_cast<const VkPhysicalDeviceVulkan11Features *>(extensionCreateInfo);
988 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk11Features);
989 if(!hasFeatures)
990 {
991 return VK_ERROR_FEATURE_NOT_PRESENT;
992 }
993 }
994 break;
995 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
996 {
997 const auto *vk12Features = reinterpret_cast<const VkPhysicalDeviceVulkan12Features *>(extensionCreateInfo);
998 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk12Features);
999 if(!hasFeatures)
1000 {
1001 return VK_ERROR_FEATURE_NOT_PRESENT;
1002 }
1003 }
1004 break;
1005 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES:
1006 {
1007 const auto *vk13Features = reinterpret_cast<const VkPhysicalDeviceVulkan13Features *>(extensionCreateInfo);
1008 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk13Features);
1009 if(!hasFeatures)
1010 {
1011 return VK_ERROR_FEATURE_NOT_PRESENT;
1012 }
1013 }
1014 break;
1015 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
1016 {
1017 const auto *depthClipFeatures = reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(extensionCreateInfo);
1018 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(depthClipFeatures);
1019 if(!hasFeatures)
1020 {
1021 return VK_ERROR_FEATURE_NOT_PRESENT;
1022 }
1023 }
1024 break;
1025 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
1026 {
1027 const auto *blendOpFeatures = reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(extensionCreateInfo);
1028 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(blendOpFeatures);
1029 if(!hasFeatures)
1030 {
1031 return VK_ERROR_FEATURE_NOT_PRESENT;
1032 }
1033 }
1034 break;
1035 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT:
1036 {
1037 const auto *dynamicStateFeatures = reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>(extensionCreateInfo);
1038 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(dynamicStateFeatures);
1039 if(!hasFeatures)
1040 {
1041 return VK_ERROR_FEATURE_NOT_PRESENT;
1042 }
1043 }
1044 break;
1045 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT:
1046 {
1047 const auto *dynamicStateFeatures = reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>(extensionCreateInfo);
1048 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(dynamicStateFeatures);
1049 if(!hasFeatures)
1050 {
1051 return VK_ERROR_FEATURE_NOT_PRESENT;
1052 }
1053 }
1054 break;
1055 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES:
1056 {
1057 const auto *privateDataFeatures = reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures *>(extensionCreateInfo);
1058 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(privateDataFeatures);
1059 if(!hasFeatures)
1060 {
1061 return VK_ERROR_FEATURE_NOT_PRESENT;
1062 }
1063 }
1064 break;
1065 case VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO:
1066 {
1067 const auto *privateDataCreateInfo = reinterpret_cast<const VkDevicePrivateDataCreateInfo *>(extensionCreateInfo);
1068 (void)privateDataCreateInfo->privateDataSlotRequestCount;
1069 }
1070 break;
1071 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES:
1072 {
1073 const auto *textureCompressionASTCHDRFeatures = reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>(extensionCreateInfo);
1074 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(textureCompressionASTCHDRFeatures);
1075 if(!hasFeatures)
1076 {
1077 return VK_ERROR_FEATURE_NOT_PRESENT;
1078 }
1079 }
1080 break;
1081 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES:
1082 {
1083 const auto *shaderDemoteToHelperInvocationFeatures = reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>(extensionCreateInfo);
1084 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(shaderDemoteToHelperInvocationFeatures);
1085 if(!hasFeatures)
1086 {
1087 return VK_ERROR_FEATURE_NOT_PRESENT;
1088 }
1089 }
1090 break;
1091 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES:
1092 {
1093 const auto *shaderTerminateInvocationFeatures = reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures *>(extensionCreateInfo);
1094 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(shaderTerminateInvocationFeatures);
1095 if(!hasFeatures)
1096 {
1097 return VK_ERROR_FEATURE_NOT_PRESENT;
1098 }
1099 }
1100 break;
1101 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES:
1102 {
1103 const auto *subgroupSizeControlFeatures = reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures *>(extensionCreateInfo);
1104 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(subgroupSizeControlFeatures);
1105 if(!hasFeatures)
1106 {
1107 return VK_ERROR_FEATURE_NOT_PRESENT;
1108 }
1109 }
1110 break;
1111 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES:
1112 {
1113 const auto *uniformBlockFeatures = reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures *>(extensionCreateInfo);
1114 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(uniformBlockFeatures);
1115 if(!hasFeatures)
1116 {
1117 return VK_ERROR_FEATURE_NOT_PRESENT;
1118 }
1119 }
1120 break;
1121 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES:
1122 {
1123 const auto *integerDotProductFeatures = reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures *>(extensionCreateInfo);
1124 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(integerDotProductFeatures);
1125 if(!hasFeatures)
1126 {
1127 return VK_ERROR_FEATURE_NOT_PRESENT;
1128 }
1129 }
1130 break;
1131 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES:
1132 {
1133 const auto *zeroInitializeWorkgroupMemoryFeatures = reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>(extensionCreateInfo);
1134 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(zeroInitializeWorkgroupMemoryFeatures);
1135 if(!hasFeatures)
1136 {
1137 return VK_ERROR_FEATURE_NOT_PRESENT;
1138 }
1139 }
1140 break;
1141 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT:
1142 {
1143 const auto *primitiveTopologyListRestartFeatures = reinterpret_cast<const VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *>(extensionCreateInfo);
1144 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(primitiveTopologyListRestartFeatures);
1145 if(!hasFeatures)
1146 {
1147 return VK_ERROR_FEATURE_NOT_PRESENT;
1148 }
1149 }
1150 break;
1151 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES:
1152 {
1153 const auto *descriptorIndexingFeatures = reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures *>(extensionCreateInfo);
1154 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(descriptorIndexingFeatures);
1155 if(!hasFeatures)
1156 {
1157 return VK_ERROR_FEATURE_NOT_PRESENT;
1158 }
1159 }
1160 break;
1161 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR:
1162 {
1163 const auto *globalPriorityQueryFeatures = reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR *>(extensionCreateInfo);
1164 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(globalPriorityQueryFeatures);
1165 if(!hasFeatures)
1166 {
1167 return VK_ERROR_FEATURE_NOT_PRESENT;
1168 }
1169 }
1170 break;
1171 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
1172 {
1173 const auto *protectedMemoryFeatures = reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures *>(extensionCreateInfo);
1174 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(protectedMemoryFeatures);
1175 if(!hasFeatures)
1176 {
1177 return VK_ERROR_FEATURE_NOT_PRESENT;
1178 }
1179 }
1180 break;
1181 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES:
1182 {
1183 const auto *bufferDeviceAddressFeatures = reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures *>(extensionCreateInfo);
1184 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(bufferDeviceAddressFeatures);
1185 if(!hasFeatures)
1186 {
1187 return VK_ERROR_FEATURE_NOT_PRESENT;
1188 }
1189 }
1190 break;
1191 // These structs are supported, but no behavior changes based on their feature flags
1192 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES:
1193 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES:
1194 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES:
1195 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES:
1196 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES:
1197 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT:
1198 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES:
1199 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES:
1200 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES:
1201 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT:
1202 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT:
1203 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT:
1204 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT:
1205 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT:
1206 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT:
1207 break;
1208 default:
1209 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
1210 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1211 break;
1212 }
1213
1214 extensionCreateInfo = extensionCreateInfo->pNext;
1215 }
1216
1217 ASSERT(pCreateInfo->queueCreateInfoCount > 0);
1218
1219 if(enabledFeatures)
1220 {
1221 if(!vk::Cast(physicalDevice)->hasFeatures(*enabledFeatures))
1222 {
1223 return VK_ERROR_FEATURE_NOT_PRESENT;
1224 }
1225 }
1226
1227 uint32_t queueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
1228
1229 for(uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++)
1230 {
1231 const VkDeviceQueueCreateInfo &queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
1232 if(queueCreateInfo.flags != 0)
1233 {
1234 UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d]->flags 0x%08X", i, queueCreateInfo.flags);
1235 }
1236
1237 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(queueCreateInfo.pNext);
1238 while(extInfo)
1239 {
1240 switch(extInfo->sType)
1241 {
1242 case VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR:
1243 {
1244 const auto *globalPriorityCreateInfo = reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoKHR *>(extInfo);
1245 if(!(vk::Cast(physicalDevice)->validateQueueGlobalPriority(globalPriorityCreateInfo->globalPriority)))
1246 {
1247 return VK_ERROR_INITIALIZATION_FAILED;
1248 }
1249 }
1250 break;
1251 default:
1252 UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
1253 break;
1254 }
1255
1256 extInfo = extInfo->pNext;
1257 }
1258
1259 ASSERT(queueCreateInfo.queueFamilyIndex < queueFamilyPropertyCount);
1260 (void)queueFamilyPropertyCount; // Silence unused variable warning
1261 }
1262
1263 auto scheduler = getOrCreateScheduler();
1264 return vk::DispatchableDevice::Create(pAllocator, pCreateInfo, pDevice, vk::Cast(physicalDevice), enabledFeatures, scheduler);
1265 }
1266
vkDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)1267 VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator)
1268 {
1269 TRACE("(VkDevice device = %p, const VkAllocationCallbacks* pAllocator = %p)", device, pAllocator);
1270
1271 vk::destroy(device, pAllocator);
1272 }
1273
vkEnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1274 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
1275 {
1276 TRACE("(const char* pLayerName = %p, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)",
1277 pLayerName, pPropertyCount, pProperties);
1278
1279 uint32_t extensionPropertiesCount = numInstanceSupportedExtensions();
1280
1281 if(!pProperties)
1282 {
1283 *pPropertyCount = extensionPropertiesCount;
1284 return VK_SUCCESS;
1285 }
1286
1287 auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
1288 copyInstanceExtensions(pProperties, toCopy);
1289
1290 *pPropertyCount = toCopy;
1291 return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
1292 }
1293
vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1294 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
1295 {
1296 TRACE("(VkPhysicalDevice physicalDevice = %p, const char* pLayerName, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
1297
1298 uint32_t extensionPropertiesCount = numDeviceSupportedExtensions();
1299
1300 if(!pProperties)
1301 {
1302 *pPropertyCount = extensionPropertiesCount;
1303 return VK_SUCCESS;
1304 }
1305
1306 auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
1307 copyDeviceExtensions(pProperties, toCopy);
1308
1309 *pPropertyCount = toCopy;
1310 return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
1311 }
1312
vkEnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)1313 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties)
1314 {
1315 TRACE("(uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", pPropertyCount, pProperties);
1316
1317 if(!pProperties)
1318 {
1319 *pPropertyCount = 0;
1320 return VK_SUCCESS;
1321 }
1322
1323 return VK_SUCCESS;
1324 }
1325
vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)1326 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkLayerProperties *pProperties)
1327 {
1328 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
1329
1330 if(!pProperties)
1331 {
1332 *pPropertyCount = 0;
1333 return VK_SUCCESS;
1334 }
1335
1336 return VK_SUCCESS;
1337 }
1338
vkGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)1339 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue)
1340 {
1341 TRACE("(VkDevice device = %p, uint32_t queueFamilyIndex = %d, uint32_t queueIndex = %d, VkQueue* pQueue = %p)",
1342 device, queueFamilyIndex, queueIndex, pQueue);
1343
1344 *pQueue = vk::Cast(device)->getQueue(queueFamilyIndex, queueIndex);
1345 }
1346
vkQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)1347 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence)
1348 {
1349 TRACE("(VkQueue queue = %p, uint32_t submitCount = %d, const VkSubmitInfo* pSubmits = %p, VkFence fence = %p)",
1350 queue, submitCount, pSubmits, static_cast<void *>(fence));
1351
1352 return vk::Cast(queue)->submit(submitCount, vk::SubmitInfo::Allocate(submitCount, pSubmits), vk::Cast(fence));
1353 }
1354
vkQueueSubmit2(VkQueue queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)1355 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 *pSubmits, VkFence fence)
1356 {
1357 TRACE("(VkQueue queue = %p, uint32_t submitCount = %d, const VkSubmitInfo2* pSubmits = %p, VkFence fence = %p)",
1358 queue, submitCount, pSubmits, static_cast<void *>(fence));
1359
1360 return vk::Cast(queue)->submit(submitCount, vk::SubmitInfo::Allocate(submitCount, pSubmits), vk::Cast(fence));
1361 }
1362
vkQueueWaitIdle(VkQueue queue)1363 VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue)
1364 {
1365 TRACE("(VkQueue queue = %p)", queue);
1366
1367 return vk::Cast(queue)->waitIdle();
1368 }
1369
vkDeviceWaitIdle(VkDevice device)1370 VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device)
1371 {
1372 TRACE("(VkDevice device = %p)", device);
1373
1374 return vk::Cast(device)->waitIdle();
1375 }
1376
vkAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1377 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory)
1378 {
1379 TRACE("(VkDevice device = %p, const VkMemoryAllocateInfo* pAllocateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDeviceMemory* pMemory = %p)",
1380 device, pAllocateInfo, pAllocator, pMemory);
1381
1382 VkResult result = vk::DeviceMemory::Allocate(pAllocator, pAllocateInfo, pMemory, vk::Cast(device));
1383
1384 if(result != VK_SUCCESS)
1385 {
1386 vk::destroy(*pMemory, pAllocator);
1387 *pMemory = VK_NULL_HANDLE;
1388 }
1389
1390 return result;
1391 }
1392
vkFreeMemory(VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)1393 VKAPI_ATTR void VKAPI_CALL vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator)
1394 {
1395 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, const VkAllocationCallbacks* pAllocator = %p)",
1396 device, static_cast<void *>(memory), pAllocator);
1397
1398 vk::destroy(memory, pAllocator);
1399 }
1400
1401 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
vkGetMemoryFdKHR(VkDevice device,const VkMemoryGetFdInfoKHR * getFdInfo,int * pFd)1402 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR *getFdInfo, int *pFd)
1403 {
1404 TRACE("(VkDevice device = %p, const VkMemoryGetFdInfoKHR* getFdInfo = %p, int* pFd = %p",
1405 device, getFdInfo, pFd);
1406
1407 if(getFdInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1408 {
1409 UNSUPPORTED("pGetFdInfo->handleType %u", getFdInfo->handleType);
1410 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1411 }
1412 return vk::Cast(getFdInfo->memory)->exportFd(pFd);
1413 }
1414
vkGetMemoryFdPropertiesKHR(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)1415 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR *pMemoryFdProperties)
1416 {
1417 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, int fd = %d, VkMemoryFdPropertiesKHR* pMemoryFdProperties = %p)",
1418 device, handleType, fd, pMemoryFdProperties);
1419
1420 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1421 {
1422 UNSUPPORTED("handleType %u", handleType);
1423 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1424 }
1425
1426 if(fd < 0)
1427 {
1428 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1429 }
1430
1431 const VkPhysicalDeviceMemoryProperties &memoryProperties =
1432 vk::PhysicalDevice::GetMemoryProperties();
1433
1434 // All SwiftShader memory types support this!
1435 pMemoryFdProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1436
1437 return VK_SUCCESS;
1438 }
1439 #endif // SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
1440 #if VK_USE_PLATFORM_FUCHSIA
vkGetMemoryZirconHandleFUCHSIA(VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pGetHandleInfo,zx_handle_t * pHandle)1441 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA *pGetHandleInfo, zx_handle_t *pHandle)
1442 {
1443 TRACE("(VkDevice device = %p, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetHandleInfo = %p, zx_handle_t* pHandle = %p",
1444 device, pGetHandleInfo, pHandle);
1445
1446 if(pGetHandleInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
1447 {
1448 UNSUPPORTED("pGetHandleInfo->handleType %u", pGetHandleInfo->handleType);
1449 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1450 }
1451 return vk::Cast(pGetHandleInfo->memory)->exportHandle(pHandle);
1452 }
1453
vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,zx_handle_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties)1454 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t handle, VkMemoryZirconHandlePropertiesFUCHSIA *pMemoryZirconHandleProperties)
1455 {
1456 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, zx_handle_t handle = %d, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties = %p)",
1457 device, handleType, handle, pMemoryZirconHandleProperties);
1458
1459 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
1460 {
1461 UNSUPPORTED("handleType %u", handleType);
1462 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1463 }
1464
1465 if(handle == ZX_HANDLE_INVALID)
1466 {
1467 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1468 }
1469
1470 const VkPhysicalDeviceMemoryProperties &memoryProperties =
1471 vk::PhysicalDevice::GetMemoryProperties();
1472
1473 // All SwiftShader memory types support this!
1474 pMemoryZirconHandleProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1475
1476 return VK_SUCCESS;
1477 }
1478 #endif // VK_USE_PLATFORM_FUCHSIA
1479
vkGetMemoryHostPointerPropertiesEXT(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties)1480 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void *pHostPointer, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties)
1481 {
1482 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, const void *pHostPointer = %p, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties = %p)",
1483 device, handleType, pHostPointer, pMemoryHostPointerProperties);
1484
1485 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT && handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT)
1486 {
1487 UNSUPPORTED("handleType %u", handleType);
1488 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1489 }
1490 pMemoryHostPointerProperties->memoryTypeBits = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1491
1492 return VK_SUCCESS;
1493 }
1494
1495 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1496 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo, struct AHardwareBuffer **pBuffer)
1497 {
1498 TRACE("(VkDevice device = %p, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo = %p, struct AHardwareBuffer **pBuffer = %p)",
1499 device, pInfo, pBuffer);
1500
1501 return vk::Cast(pInfo->memory)->exportAndroidHardwareBuffer(pBuffer);
1502 }
1503
vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)1504 VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties)
1505 {
1506 TRACE("(VkDevice device = %p, const struct AHardwareBuffer *buffer = %p, VkAndroidHardwareBufferPropertiesANDROID *pProperties = %p)",
1507 device, buffer, pProperties);
1508
1509 return vk::DeviceMemory::GetAndroidHardwareBufferProperties(device, buffer, pProperties);
1510 }
1511 #endif // SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
1512
vkMapMemory(VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1513 VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData)
1514 {
1515 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize offset = %d, VkDeviceSize size = %d, VkMemoryMapFlags flags = %d, void** ppData = %p)",
1516 device, static_cast<void *>(memory), int(offset), int(size), flags, ppData);
1517
1518 if(flags != 0)
1519 {
1520 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1521 UNSUPPORTED("flags 0x%08X", int(flags));
1522 }
1523
1524 return vk::Cast(memory)->map(offset, size, ppData);
1525 }
1526
vkUnmapMemory(VkDevice device,VkDeviceMemory memory)1527 VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(VkDevice device, VkDeviceMemory memory)
1528 {
1529 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p)", device, static_cast<void *>(memory));
1530
1531 // Noop, memory will be released when the DeviceMemory object is released
1532 }
1533
vkFlushMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1534 VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1535 {
1536 TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1537 device, memoryRangeCount, pMemoryRanges);
1538
1539 // Noop, host and device memory are the same to SwiftShader
1540
1541 return VK_SUCCESS;
1542 }
1543
vkInvalidateMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1544 VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1545 {
1546 TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1547 device, memoryRangeCount, pMemoryRanges);
1548
1549 // Noop, host and device memory are the same to SwiftShader
1550
1551 return VK_SUCCESS;
1552 }
1553
vkGetDeviceMemoryCommitment(VkDevice pDevice,VkDeviceMemory pMemory,VkDeviceSize * pCommittedMemoryInBytes)1554 VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(VkDevice pDevice, VkDeviceMemory pMemory, VkDeviceSize *pCommittedMemoryInBytes)
1555 {
1556 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize* pCommittedMemoryInBytes = %p)",
1557 pDevice, static_cast<void *>(pMemory), pCommittedMemoryInBytes);
1558
1559 auto *memory = vk::Cast(pMemory);
1560
1561 #if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
1562 const auto &memoryProperties = vk::PhysicalDevice::GetMemoryProperties();
1563 uint32_t typeIndex = memory->getMemoryTypeIndex();
1564 ASSERT(typeIndex < memoryProperties.memoryTypeCount);
1565 ASSERT(memoryProperties.memoryTypes[typeIndex].propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
1566 #endif
1567
1568 *pCommittedMemoryInBytes = memory->getCommittedMemoryInBytes();
1569 }
1570
vkBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1571 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1572 {
1573 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1574 device, static_cast<void *>(buffer), static_cast<void *>(memory), int(memoryOffset));
1575
1576 if(!vk::Cast(buffer)->canBindToMemory(vk::Cast(memory)))
1577 {
1578 UNSUPPORTED("vkBindBufferMemory with invalid external memory");
1579 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1580 }
1581 vk::Cast(buffer)->bind(vk::Cast(memory), memoryOffset);
1582 return VK_SUCCESS;
1583 }
1584
vkBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)1585 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1586 {
1587 TRACE("(VkDevice device = %p, VkImage image = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1588 device, static_cast<void *>(image), static_cast<void *>(memory), int(memoryOffset));
1589
1590 if(!vk::Cast(image)->canBindToMemory(vk::Cast(memory)))
1591 {
1592 UNSUPPORTED("vkBindImageMemory with invalid external memory");
1593 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1594 }
1595 vk::Cast(image)->bind(vk::Cast(memory), memoryOffset);
1596 return VK_SUCCESS;
1597 }
1598
vkGetBufferMemoryRequirements(VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)1599 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements)
1600 {
1601 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1602 device, static_cast<void *>(buffer), pMemoryRequirements);
1603
1604 *pMemoryRequirements = vk::Cast(buffer)->getMemoryRequirements();
1605 }
1606
vkGetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)1607 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements)
1608 {
1609 TRACE("(VkDevice device = %p, VkImage image = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1610 device, static_cast<void *>(image), pMemoryRequirements);
1611
1612 *pMemoryRequirements = vk::Cast(image)->getMemoryRequirements();
1613 }
1614
vkGetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1615 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
1616 {
1617 TRACE("(VkDevice device = %p, VkImage image = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements* pSparseMemoryRequirements = %p)",
1618 device, static_cast<void *>(image), pSparseMemoryRequirementCount, pSparseMemoryRequirements);
1619
1620 // The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
1621 // "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
1622 *pSparseMemoryRequirementCount = 0;
1623 }
1624
vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkSampleCountFlagBits samples,VkImageUsageFlags usage,VkImageTiling tiling,uint32_t * pPropertyCount,VkSparseImageFormatProperties * pProperties)1625 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties)
1626 {
1627 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkSampleCountFlagBits samples = %d, VkImageUsageFlags usage = %d, VkImageTiling tiling = %d, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties* pProperties = %p)",
1628 physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
1629
1630 // We do not support sparse images.
1631 *pPropertyCount = 0;
1632 }
1633
vkQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1634 VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence)
1635 {
1636 TRACE("()");
1637 UNSUPPORTED("vkQueueBindSparse");
1638 return VK_SUCCESS;
1639 }
1640
vkCreateFence(VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)1641 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence)
1642 {
1643 TRACE("(VkDevice device = %p, const VkFenceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFence* pFence = %p)",
1644 device, pCreateInfo, pAllocator, pFence);
1645
1646 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1647 while(nextInfo)
1648 {
1649 switch(nextInfo->sType)
1650 {
1651 case VK_STRUCTURE_TYPE_MAX_ENUM:
1652 // dEQP tests that this value is ignored.
1653 break;
1654 default:
1655 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1656 break;
1657 }
1658 nextInfo = nextInfo->pNext;
1659 }
1660
1661 return vk::Fence::Create(pAllocator, pCreateInfo, pFence);
1662 }
1663
vkDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)1664 VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator)
1665 {
1666 TRACE("(VkDevice device = %p, VkFence fence = %p, const VkAllocationCallbacks* pAllocator = %p)",
1667 device, static_cast<void *>(fence), pAllocator);
1668
1669 vk::destroy(fence, pAllocator);
1670 }
1671
vkResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)1672 VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences)
1673 {
1674 TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p)",
1675 device, fenceCount, pFences);
1676
1677 for(uint32_t i = 0; i < fenceCount; i++)
1678 {
1679 vk::Cast(pFences[i])->reset();
1680 }
1681
1682 return VK_SUCCESS;
1683 }
1684
vkGetFenceStatus(VkDevice device,VkFence fence)1685 VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence)
1686 {
1687 TRACE("(VkDevice device = %p, VkFence fence = %p)", device, static_cast<void *>(fence));
1688
1689 return vk::Cast(fence)->getStatus();
1690 }
1691
vkWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)1692 VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout)
1693 {
1694 TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p, VkBool32 waitAll = %d, uint64_t timeout = %" PRIu64 ")",
1695 device, int(fenceCount), pFences, int(waitAll), timeout);
1696
1697 return vk::Cast(device)->waitForFences(fenceCount, pFences, waitAll, timeout);
1698 }
1699
vkCreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)1700 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore)
1701 {
1702 TRACE("(VkDevice device = %p, const VkSemaphoreCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSemaphore* pSemaphore = %p)",
1703 device, pCreateInfo, pAllocator, pSemaphore);
1704
1705 if(pCreateInfo->flags != 0)
1706 {
1707 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1708 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
1709 }
1710
1711 VkSemaphoreType type = VK_SEMAPHORE_TYPE_BINARY;
1712 for(const auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1713 nextInfo != nullptr; nextInfo = nextInfo->pNext)
1714 {
1715 switch(nextInfo->sType)
1716 {
1717 case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
1718 // Let the semaphore constructor handle this
1719 break;
1720 case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO:
1721 {
1722 const VkSemaphoreTypeCreateInfo *info = reinterpret_cast<const VkSemaphoreTypeCreateInfo *>(nextInfo);
1723 type = info->semaphoreType;
1724 }
1725 break;
1726 default:
1727 WARN("nextInfo->sType = %s", vk::Stringify(nextInfo->sType).c_str());
1728 break;
1729 }
1730 }
1731
1732 if(type == VK_SEMAPHORE_TYPE_BINARY)
1733 {
1734 return vk::BinarySemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1735 }
1736 else
1737 {
1738 return vk::TimelineSemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1739 }
1740 }
1741
vkDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)1742 VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator)
1743 {
1744 TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, const VkAllocationCallbacks* pAllocator = %p)",
1745 device, static_cast<void *>(semaphore), pAllocator);
1746
1747 vk::destroy(semaphore, pAllocator);
1748 }
1749
1750 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
vkGetSemaphoreFdKHR(VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)1751 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd)
1752 {
1753 TRACE("(VkDevice device = %p, const VkSemaphoreGetFdInfoKHR* pGetFdInfo = %p, int* pFd = %p)",
1754 device, static_cast<const void *>(pGetFdInfo), static_cast<void *>(pFd));
1755
1756 if(pGetFdInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1757 {
1758 UNSUPPORTED("pGetFdInfo->handleType %d", int(pGetFdInfo->handleType));
1759 }
1760
1761 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetFdInfo->semaphore);
1762 ASSERT(sem != nullptr);
1763 return sem->exportFd(pFd);
1764 }
1765
vkImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreInfo)1766 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreInfo)
1767 {
1768 TRACE("(VkDevice device = %p, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreInfo = %p",
1769 device, static_cast<const void *>(pImportSemaphoreInfo));
1770
1771 if(pImportSemaphoreInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1772 {
1773 UNSUPPORTED("pImportSemaphoreInfo->handleType %d", int(pImportSemaphoreInfo->handleType));
1774 }
1775 bool temporaryImport = (pImportSemaphoreInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1776
1777 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreInfo->semaphore);
1778 ASSERT(sem != nullptr);
1779 return sem->importFd(pImportSemaphoreInfo->fd, temporaryImport);
1780 }
1781 #endif // SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
1782
1783 #if VK_USE_PLATFORM_FUCHSIA
vkImportSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo)1784 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA(
1785 VkDevice device,
1786 const VkImportSemaphoreZirconHandleInfoFUCHSIA *pImportSemaphoreZirconHandleInfo)
1787 {
1788 TRACE("(VkDevice device = %p, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo = %p)",
1789 device, pImportSemaphoreZirconHandleInfo);
1790
1791 if(pImportSemaphoreZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA)
1792 {
1793 UNSUPPORTED("pImportSemaphoreZirconHandleInfo->handleType %d", int(pImportSemaphoreZirconHandleInfo->handleType));
1794 }
1795 bool temporaryImport = (pImportSemaphoreZirconHandleInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1796 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreZirconHandleInfo->semaphore);
1797 ASSERT(sem != nullptr);
1798 return sem->importHandle(pImportSemaphoreZirconHandleInfo->zirconHandle, temporaryImport);
1799 }
1800
vkGetSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle)1801 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA(
1802 VkDevice device,
1803 const VkSemaphoreGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo,
1804 zx_handle_t *pZirconHandle)
1805 {
1806 TRACE("(VkDevice device = %p, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo = %p, zx_handle_t* pZirconHandle = %p)",
1807 device, static_cast<const void *>(pGetZirconHandleInfo), static_cast<void *>(pZirconHandle));
1808
1809 if(pGetZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA)
1810 {
1811 UNSUPPORTED("pGetZirconHandleInfo->handleType %d", int(pGetZirconHandleInfo->handleType));
1812 }
1813
1814 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetZirconHandleInfo->semaphore);
1815 ASSERT(sem != nullptr);
1816 return sem->exportHandle(pZirconHandle);
1817 }
1818 #endif // VK_USE_PLATFORM_FUCHSIA
1819
vkGetSemaphoreCounterValue(VkDevice device,VkSemaphore semaphore,uint64_t * pValue)1820 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue)
1821 {
1822 TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, uint64_t* pValue = %p)",
1823 device, static_cast<void *>(semaphore), pValue);
1824 *pValue = vk::DynamicCast<vk::TimelineSemaphore>(semaphore)->getCounterValue();
1825 return VK_SUCCESS;
1826 }
1827
vkSignalSemaphore(VkDevice device,const VkSemaphoreSignalInfo * pSignalInfo)1828 VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo)
1829 {
1830 TRACE("(VkDevice device = %p, const VkSemaphoreSignalInfo *pSignalInfo = %p)",
1831 device, pSignalInfo);
1832 vk::DynamicCast<vk::TimelineSemaphore>(pSignalInfo->semaphore)->signal(pSignalInfo->value);
1833 return VK_SUCCESS;
1834 }
1835
vkWaitSemaphores(VkDevice device,const VkSemaphoreWaitInfo * pWaitInfo,uint64_t timeout)1836 VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout)
1837 {
1838 TRACE("(VkDevice device = %p, const VkSemaphoreWaitInfo *pWaitInfo = %p, uint64_t timeout = %" PRIu64 ")",
1839 device, pWaitInfo, timeout);
1840 return vk::Cast(device)->waitForSemaphores(pWaitInfo, timeout);
1841 }
1842
vkCreateEvent(VkDevice device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)1843 VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkEvent *pEvent)
1844 {
1845 TRACE("(VkDevice device = %p, const VkEventCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkEvent* pEvent = %p)",
1846 device, pCreateInfo, pAllocator, pEvent);
1847
1848 // VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR is provided by VK_KHR_synchronization2
1849 if((pCreateInfo->flags != 0) && (pCreateInfo->flags != VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR))
1850 {
1851 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
1852 }
1853
1854 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1855 while(extInfo)
1856 {
1857 // Vulkan 1.2: "pNext must be NULL"
1858 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1859 extInfo = extInfo->pNext;
1860 }
1861
1862 return vk::Event::Create(pAllocator, pCreateInfo, pEvent);
1863 }
1864
vkDestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * pAllocator)1865 VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator)
1866 {
1867 TRACE("(VkDevice device = %p, VkEvent event = %p, const VkAllocationCallbacks* pAllocator = %p)",
1868 device, static_cast<void *>(event), pAllocator);
1869
1870 vk::destroy(event, pAllocator);
1871 }
1872
vkGetEventStatus(VkDevice device,VkEvent event)1873 VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(VkDevice device, VkEvent event)
1874 {
1875 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1876
1877 return vk::Cast(event)->getStatus();
1878 }
1879
vkSetEvent(VkDevice device,VkEvent event)1880 VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event)
1881 {
1882 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1883
1884 vk::Cast(event)->signal();
1885
1886 return VK_SUCCESS;
1887 }
1888
vkResetEvent(VkDevice device,VkEvent event)1889 VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(VkDevice device, VkEvent event)
1890 {
1891 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1892
1893 vk::Cast(event)->reset();
1894
1895 return VK_SUCCESS;
1896 }
1897
vkCreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkQueryPool * pQueryPool)1898 VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool)
1899 {
1900 TRACE("(VkDevice device = %p, const VkQueryPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkQueryPool* pQueryPool = %p)",
1901 device, pCreateInfo, pAllocator, pQueryPool);
1902
1903 if(pCreateInfo->flags != 0)
1904 {
1905 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1906 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
1907 }
1908
1909 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1910 while(extInfo)
1911 {
1912 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1913 extInfo = extInfo->pNext;
1914 }
1915
1916 return vk::QueryPool::Create(pAllocator, pCreateInfo, pQueryPool);
1917 }
1918
vkDestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * pAllocator)1919 VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator)
1920 {
1921 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
1922 device, static_cast<void *>(queryPool), pAllocator);
1923
1924 vk::destroy(queryPool, pAllocator);
1925 }
1926
vkGetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)1927 VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags)
1928 {
1929 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, size_t dataSize = %d, void* pData = %p, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
1930 device, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), int(dataSize), pData, int(stride), flags);
1931
1932 return vk::Cast(queryPool)->getResults(firstQuery, queryCount, dataSize, pData, stride, flags);
1933 }
1934
vkCreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1935 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer)
1936 {
1937 TRACE("(VkDevice device = %p, const VkBufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBuffer* pBuffer = %p)",
1938 device, pCreateInfo, pAllocator, pBuffer);
1939
1940 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1941 while(nextInfo)
1942 {
1943 switch(nextInfo->sType)
1944 {
1945 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1946 // Do nothing. Should be handled by vk::Buffer::Create().
1947 break;
1948 case VK_STRUCTURE_TYPE_MAX_ENUM:
1949 // dEQP tests that this value is ignored.
1950 break;
1951 default:
1952 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1953 break;
1954 }
1955 nextInfo = nextInfo->pNext;
1956 }
1957
1958 return vk::Buffer::Create(pAllocator, pCreateInfo, pBuffer);
1959 }
1960
vkDestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)1961 VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator)
1962 {
1963 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
1964 device, static_cast<void *>(buffer), pAllocator);
1965
1966 vk::destroy(buffer, pAllocator);
1967 }
1968
vkGetBufferDeviceAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1969 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1970 {
1971 TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1972 device, pInfo);
1973
1974 // This function must return VkBufferDeviceAddressCreateInfoEXT::deviceAddress if provided
1975 ASSERT(!vk::Cast(device)->hasExtension(VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME));
1976
1977 return vk::Cast(pInfo->buffer)->getOpaqueCaptureAddress();
1978 }
1979
vkGetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1980 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1981 {
1982 TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1983 device, pInfo);
1984
1985 return vk::Cast(pInfo->buffer)->getOpaqueCaptureAddress();
1986 }
1987
vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)1988 VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo)
1989 {
1990 TRACE("(VkDevice device = %p, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo = %p)",
1991 device, pInfo);
1992
1993 return vk::Cast(pInfo->memory)->getOpaqueCaptureAddress();
1994 }
1995
vkCreateBufferView(VkDevice device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView)1996 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBufferView *pView)
1997 {
1998 TRACE("(VkDevice device = %p, const VkBufferViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBufferView* pView = %p)",
1999 device, pCreateInfo, pAllocator, pView);
2000
2001 if(pCreateInfo->flags != 0)
2002 {
2003 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2004 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2005 }
2006
2007 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2008 while(extInfo)
2009 {
2010 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2011 extInfo = extInfo->pNext;
2012 }
2013
2014 return vk::BufferView::Create(pAllocator, pCreateInfo, pView);
2015 }
2016
vkDestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)2017 VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator)
2018 {
2019 TRACE("(VkDevice device = %p, VkBufferView bufferView = %p, const VkAllocationCallbacks* pAllocator = %p)",
2020 device, static_cast<void *>(bufferView), pAllocator);
2021
2022 vk::destroy(bufferView, pAllocator);
2023 }
2024
vkCreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)2025 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImage *pImage)
2026 {
2027 TRACE("(VkDevice device = %p, const VkImageCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImage* pImage = %p)",
2028 device, pCreateInfo, pAllocator, pImage);
2029
2030 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2031
2032 #ifdef __ANDROID__
2033 vk::BackingMemory backmem;
2034 bool swapchainImage = false;
2035 #endif
2036
2037 while(extensionCreateInfo)
2038 {
2039 // Casting to an int since some structures, such as VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID and
2040 // VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID, are not enumerated in the official Vulkan headers.
2041 switch((int)(extensionCreateInfo->sType))
2042 {
2043 #ifdef __ANDROID__
2044 case VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID:
2045 {
2046 const VkSwapchainImageCreateInfoANDROID *swapImageCreateInfo = reinterpret_cast<const VkSwapchainImageCreateInfoANDROID *>(extensionCreateInfo);
2047 backmem.androidUsage = swapImageCreateInfo->usage;
2048 }
2049 break;
2050 case VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID:
2051 {
2052 const VkNativeBufferANDROID *nativeBufferInfo = reinterpret_cast<const VkNativeBufferANDROID *>(extensionCreateInfo);
2053 backmem.nativeBufferInfo = *nativeBufferInfo;
2054 backmem.nativeBufferInfo.pNext = nullptr;
2055 swapchainImage = true;
2056 }
2057 break;
2058 case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
2059 break;
2060 case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
2061 // Do nothing. Should be handled by vk::Image::Create()
2062 break;
2063 #endif
2064 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
2065 // Do nothing. Should be handled by vk::Image::Create()
2066 break;
2067 case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
2068 /* Do nothing. We don't actually need the swapchain handle yet; we'll do all the work in vkBindImageMemory2. */
2069 break;
2070 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
2071 // Do nothing. This extension tells the driver which image formats will be used
2072 // by the application. Swiftshader is not impacted from lacking this information,
2073 // so we don't need to track the format list.
2074 break;
2075 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
2076 {
2077 // SwiftShader does not use an image's usage info for non-debug purposes outside of
2078 // vkGetPhysicalDeviceImageFormatProperties2. This also applies to separate stencil usage.
2079 const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionCreateInfo);
2080 (void)stencilUsageInfo->stencilUsage;
2081 }
2082 break;
2083 case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
2084 {
2085 // Explicitly ignored, since VK_EXT_image_drm_format_modifier is not supported
2086 ASSERT(!hasDeviceExtension(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME));
2087 }
2088 break;
2089 case VK_STRUCTURE_TYPE_MAX_ENUM:
2090 // dEQP tests that this value is ignored.
2091 break;
2092 default:
2093 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
2094 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2095 break;
2096 }
2097
2098 extensionCreateInfo = extensionCreateInfo->pNext;
2099 }
2100
2101 VkResult result = vk::Image::Create(pAllocator, pCreateInfo, pImage, vk::Cast(device));
2102
2103 #ifdef __ANDROID__
2104 if(swapchainImage)
2105 {
2106 if(result != VK_SUCCESS)
2107 {
2108 return result;
2109 }
2110
2111 vk::Image *image = vk::Cast(*pImage);
2112 VkMemoryRequirements memRequirements = image->getMemoryRequirements();
2113
2114 VkMemoryAllocateInfo allocInfo = {};
2115 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
2116 allocInfo.allocationSize = memRequirements.size;
2117 allocInfo.memoryTypeIndex = 0;
2118
2119 VkDeviceMemory devmem = { VK_NULL_HANDLE };
2120 result = vkAllocateMemory(device, &allocInfo, pAllocator, &devmem);
2121 if(result != VK_SUCCESS)
2122 {
2123 return result;
2124 }
2125
2126 vkBindImageMemory(device, *pImage, devmem, 0);
2127 backmem.externalMemory = true;
2128
2129 image->setBackingMemory(backmem);
2130 }
2131 #endif
2132
2133 return result;
2134 }
2135
vkDestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)2136 VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator)
2137 {
2138 TRACE("(VkDevice device = %p, VkImage image = %p, const VkAllocationCallbacks* pAllocator = %p)",
2139 device, static_cast<void *>(image), pAllocator);
2140
2141 #ifdef __ANDROID__
2142 vk::Image *img = vk::Cast(image);
2143 if(img && img->hasExternalMemory())
2144 {
2145 vk::destroy(img->getExternalMemory(), pAllocator);
2146 }
2147 #endif
2148
2149 vk::destroy(image, pAllocator);
2150 }
2151
vkGetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)2152 VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, VkSubresourceLayout *pLayout)
2153 {
2154 TRACE("(VkDevice device = %p, VkImage image = %p, const VkImageSubresource* pSubresource = %p, VkSubresourceLayout* pLayout = %p)",
2155 device, static_cast<void *>(image), pSubresource, pLayout);
2156
2157 vk::Cast(image)->getSubresourceLayout(pSubresource, pLayout);
2158 }
2159
vkGetImageSubresourceLayout2EXT(VkDevice device,VkImage image,const VkImageSubresource2KHR * pSubresource,VkSubresourceLayout2KHR * pLayout)2160 VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT(VkDevice device, VkImage image, const VkImageSubresource2KHR *pSubresource, VkSubresourceLayout2KHR *pLayout)
2161 {
2162 TRACE("(VkDevice device = %p, VkImage image = %p, const VkImageSubresource2KHR* pSubresource = %p, VkSubresourceLayout2KHR* pLayout = %p)",
2163 device, static_cast<void *>(image), pSubresource, pLayout);
2164
2165 // If tiling is OPTIMAL, this doesn't need to be done, but it's harmless especially since
2166 // LINEAR and OPTIMAL are the same.
2167 vk::Cast(image)->getSubresourceLayout(&pSubresource->imageSubresource, &pLayout->subresourceLayout);
2168
2169 VkBaseOutStructure *extInfo = reinterpret_cast<VkBaseOutStructure *>(pLayout->pNext);
2170 while(extInfo)
2171 {
2172 switch(extInfo->sType)
2173 {
2174 case VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT:
2175 {
2176 // Since the subresource layout is filled above already, get the size out of
2177 // that.
2178 VkSubresourceHostMemcpySizeEXT *hostMemcpySize = reinterpret_cast<VkSubresourceHostMemcpySizeEXT *>(extInfo);
2179 hostMemcpySize->size = pLayout->subresourceLayout.size;
2180 break;
2181 }
2182 default:
2183 UNSUPPORTED("pLayout->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2184 break;
2185 }
2186
2187 extInfo = extInfo->pNext;
2188 }
2189 }
2190
vkCreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)2191 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImageView *pView)
2192 {
2193 TRACE("(VkDevice device = %p, const VkImageViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImageView* pView = %p)",
2194 device, pCreateInfo, pAllocator, pView);
2195
2196 if(pCreateInfo->flags != 0)
2197 {
2198 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2199 }
2200
2201 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2202 const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
2203
2204 while(extensionCreateInfo)
2205 {
2206 switch(extensionCreateInfo->sType)
2207 {
2208 case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
2209 {
2210 const VkImageViewUsageCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkImageViewUsageCreateInfo *>(extensionCreateInfo);
2211 ASSERT(!(~vk::Cast(pCreateInfo->image)->getUsage() & multiviewCreateInfo->usage));
2212 }
2213 break;
2214 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
2215 {
2216 const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo = reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
2217 ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
2218
2219 if(ycbcrConversion)
2220 {
2221 ASSERT((pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_R) &&
2222 (pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_G) &&
2223 (pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_B) &&
2224 (pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_A));
2225 }
2226 }
2227 break;
2228 case VK_STRUCTURE_TYPE_MAX_ENUM:
2229 // dEQP tests that this value is ignored.
2230 break;
2231 case VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT:
2232 // TODO(b/218318109): Part of the VK_EXT_image_view_min_lod extension, which we don't support.
2233 // Remove when https://gitlab.khronos.org/Tracker/vk-gl-cts/-/issues/3094#note_348979 has been fixed.
2234 break;
2235 default:
2236 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2237 break;
2238 }
2239
2240 extensionCreateInfo = extensionCreateInfo->pNext;
2241 }
2242
2243 VkResult result = vk::ImageView::Create(pAllocator, pCreateInfo, pView, ycbcrConversion);
2244 if(result == VK_SUCCESS)
2245 {
2246 vk::Cast(device)->registerImageView(vk::Cast(*pView));
2247 }
2248
2249 return result;
2250 }
2251
vkDestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)2252 VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator)
2253 {
2254 TRACE("(VkDevice device = %p, VkImageView imageView = %p, const VkAllocationCallbacks* pAllocator = %p)",
2255 device, static_cast<void *>(imageView), pAllocator);
2256
2257 vk::Cast(device)->unregisterImageView(vk::Cast(imageView));
2258 vk::destroy(imageView, pAllocator);
2259 }
2260
vkCreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)2261 VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule)
2262 {
2263 TRACE("(VkDevice device = %p, const VkShaderModuleCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkShaderModule* pShaderModule = %p)",
2264 device, pCreateInfo, pAllocator, pShaderModule);
2265
2266 if(pCreateInfo->flags != 0)
2267 {
2268 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2269 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2270 }
2271
2272 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2273 while(nextInfo)
2274 {
2275 switch(nextInfo->sType)
2276 {
2277 case VK_STRUCTURE_TYPE_MAX_ENUM:
2278 // dEQP tests that this value is ignored.
2279 break;
2280 default:
2281 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2282 break;
2283 }
2284 nextInfo = nextInfo->pNext;
2285 }
2286
2287 return vk::ShaderModule::Create(pAllocator, pCreateInfo, pShaderModule);
2288 }
2289
vkDestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)2290 VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator)
2291 {
2292 TRACE("(VkDevice device = %p, VkShaderModule shaderModule = %p, const VkAllocationCallbacks* pAllocator = %p)",
2293 device, static_cast<void *>(shaderModule), pAllocator);
2294
2295 vk::destroy(shaderModule, pAllocator);
2296 }
2297
vkCreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)2298 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache)
2299 {
2300 TRACE("(VkDevice device = %p, const VkPipelineCacheCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineCache* pPipelineCache = %p)",
2301 device, pCreateInfo, pAllocator, pPipelineCache);
2302
2303 if(pCreateInfo->flags != 0 && pCreateInfo->flags != VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT)
2304 {
2305 // Flags must be 0 or VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT.
2306 // VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT: When set, the implementation may skip any
2307 // unnecessary processing needed to support simultaneous modification from multiple threads where allowed.
2308 // TODO(b/246369329): Optimize PipelineCache objects when VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT is used.
2309 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2310 }
2311
2312 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2313 while(extInfo)
2314 {
2315 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2316 extInfo = extInfo->pNext;
2317 }
2318
2319 return vk::PipelineCache::Create(pAllocator, pCreateInfo, pPipelineCache);
2320 }
2321
vkDestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)2322 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator)
2323 {
2324 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, const VkAllocationCallbacks* pAllocator = %p)",
2325 device, static_cast<void *>(pipelineCache), pAllocator);
2326
2327 vk::destroy(pipelineCache, pAllocator);
2328 }
2329
vkGetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)2330 VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData)
2331 {
2332 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, size_t* pDataSize = %p, void* pData = %p)",
2333 device, static_cast<void *>(pipelineCache), pDataSize, pData);
2334
2335 return vk::Cast(pipelineCache)->getData(pDataSize, pData);
2336 }
2337
vkMergePipelineCaches(VkDevice device,VkPipelineCache dstCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)2338 VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches)
2339 {
2340 TRACE("(VkDevice device = %p, VkPipelineCache dstCache = %p, uint32_t srcCacheCount = %d, const VkPipelineCache* pSrcCaches = %p)",
2341 device, static_cast<void *>(dstCache), int(srcCacheCount), pSrcCaches);
2342
2343 return vk::Cast(dstCache)->merge(srcCacheCount, pSrcCaches);
2344 }
2345
vkCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2346 VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2347 {
2348 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkGraphicsPipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2349 device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2350
2351 memset(pPipelines, 0, sizeof(void *) * createInfoCount);
2352
2353 VkResult errorResult = VK_SUCCESS;
2354 for(uint32_t i = 0; i < createInfoCount; i++)
2355 {
2356 VkResult result = vk::GraphicsPipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2357
2358 if(result == VK_SUCCESS)
2359 {
2360 result = static_cast<vk::GraphicsPipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2361 if(result != VK_SUCCESS)
2362 {
2363 vk::destroy(pPipelines[i], pAllocator);
2364 }
2365 }
2366
2367 if(result != VK_SUCCESS)
2368 {
2369 // According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2370 // "When an application attempts to create many pipelines in a single command,
2371 // it is possible that some subset may fail creation. In that case, the
2372 // corresponding entries in the pPipelines output array will be filled with
2373 // VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2374 // out of memory errors), the vkCreate*Pipelines commands will return an
2375 // error code. The implementation will attempt to create all pipelines, and
2376 // only return VK_NULL_HANDLE values for those that actually failed."
2377 pPipelines[i] = VK_NULL_HANDLE;
2378 errorResult = result;
2379
2380 // VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT specifies that control
2381 // will be returned to the application on failure of the corresponding pipeline
2382 // rather than continuing to create additional pipelines.
2383 if(pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT)
2384 {
2385 return errorResult;
2386 }
2387 }
2388 }
2389
2390 return errorResult;
2391 }
2392
vkCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2393 VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2394 {
2395 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkComputePipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2396 device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2397
2398 memset(pPipelines, 0, sizeof(void *) * createInfoCount);
2399
2400 VkResult errorResult = VK_SUCCESS;
2401 for(uint32_t i = 0; i < createInfoCount; i++)
2402 {
2403 VkResult result = vk::ComputePipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2404
2405 if(result == VK_SUCCESS)
2406 {
2407 result = static_cast<vk::ComputePipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2408 if(result != VK_SUCCESS)
2409 {
2410 vk::destroy(pPipelines[i], pAllocator);
2411 }
2412 }
2413
2414 if(result != VK_SUCCESS)
2415 {
2416 // According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2417 // "When an application attempts to create many pipelines in a single command,
2418 // it is possible that some subset may fail creation. In that case, the
2419 // corresponding entries in the pPipelines output array will be filled with
2420 // VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2421 // out of memory errors), the vkCreate*Pipelines commands will return an
2422 // error code. The implementation will attempt to create all pipelines, and
2423 // only return VK_NULL_HANDLE values for those that actually failed."
2424 pPipelines[i] = VK_NULL_HANDLE;
2425 errorResult = result;
2426
2427 // VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT specifies that control
2428 // will be returned to the application on failure of the corresponding pipeline
2429 // rather than continuing to create additional pipelines.
2430 if(pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT)
2431 {
2432 return errorResult;
2433 }
2434 }
2435 }
2436
2437 return errorResult;
2438 }
2439
vkDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)2440 VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator)
2441 {
2442 TRACE("(VkDevice device = %p, VkPipeline pipeline = %p, const VkAllocationCallbacks* pAllocator = %p)",
2443 device, static_cast<void *>(pipeline), pAllocator);
2444
2445 vk::destroy(pipeline, pAllocator);
2446 }
2447
vkCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)2448 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout)
2449 {
2450 TRACE("(VkDevice device = %p, const VkPipelineLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineLayout* pPipelineLayout = %p)",
2451 device, pCreateInfo, pAllocator, pPipelineLayout);
2452
2453 if(pCreateInfo->flags != 0 && pCreateInfo->flags != VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT)
2454 {
2455 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2456 }
2457
2458 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2459 while(nextInfo)
2460 {
2461 switch(nextInfo->sType)
2462 {
2463 case VK_STRUCTURE_TYPE_MAX_ENUM:
2464 // dEQP tests that this value is ignored.
2465 break;
2466 default:
2467 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2468 break;
2469 }
2470 nextInfo = nextInfo->pNext;
2471 }
2472
2473 return vk::PipelineLayout::Create(pAllocator, pCreateInfo, pPipelineLayout);
2474 }
2475
vkDestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * pAllocator)2476 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator)
2477 {
2478 TRACE("(VkDevice device = %p, VkPipelineLayout pipelineLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2479 device, static_cast<void *>(pipelineLayout), pAllocator);
2480
2481 vk::release(pipelineLayout, pAllocator);
2482 }
2483
vkCreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2484 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSampler *pSampler)
2485 {
2486 TRACE("(VkDevice device = %p, const VkSamplerCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSampler* pSampler = %p)",
2487 device, pCreateInfo, pAllocator, pSampler);
2488
2489 if(pCreateInfo->flags != 0)
2490 {
2491 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2492 }
2493
2494 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2495 const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
2496 VkClearColorValue borderColor = {};
2497
2498 while(extensionCreateInfo)
2499 {
2500 switch(static_cast<long>(extensionCreateInfo->sType))
2501 {
2502 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
2503 {
2504 const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo =
2505 reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
2506 ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
2507 }
2508 break;
2509 case VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT:
2510 {
2511 const VkSamplerCustomBorderColorCreateInfoEXT *borderColorInfo =
2512 reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT *>(extensionCreateInfo);
2513
2514 borderColor = borderColorInfo->customBorderColor;
2515 }
2516 break;
2517 default:
2518 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2519 break;
2520 }
2521
2522 extensionCreateInfo = extensionCreateInfo->pNext;
2523 }
2524
2525 vk::SamplerState samplerState(pCreateInfo, ycbcrConversion, borderColor);
2526 uint32_t samplerID = vk::Cast(device)->indexSampler(samplerState);
2527
2528 VkResult result = vk::Sampler::Create(pAllocator, pCreateInfo, pSampler, samplerState, samplerID);
2529
2530 if(*pSampler == VK_NULL_HANDLE)
2531 {
2532 ASSERT(result != VK_SUCCESS);
2533 vk::Cast(device)->removeSampler(samplerState);
2534 }
2535
2536 return result;
2537 }
2538
vkDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)2539 VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator)
2540 {
2541 TRACE("(VkDevice device = %p, VkSampler sampler = %p, const VkAllocationCallbacks* pAllocator = %p)",
2542 device, static_cast<void *>(sampler), pAllocator);
2543
2544 if(sampler != VK_NULL_HANDLE)
2545 {
2546 vk::Cast(device)->removeSampler(*vk::Cast(sampler));
2547
2548 vk::destroy(sampler, pAllocator);
2549 }
2550 }
2551
vkCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)2552 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout)
2553 {
2554 TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorSetLayout* pSetLayout = %p)",
2555 device, pCreateInfo, pAllocator, pSetLayout);
2556
2557 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2558
2559 while(extensionCreateInfo)
2560 {
2561 switch(extensionCreateInfo->sType)
2562 {
2563 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
2564 ASSERT(!vk::Cast(device)->hasExtension(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME));
2565 break;
2566 default:
2567 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2568 break;
2569 }
2570
2571 extensionCreateInfo = extensionCreateInfo->pNext;
2572 }
2573
2574 return vk::DescriptorSetLayout::Create(pAllocator, pCreateInfo, pSetLayout);
2575 }
2576
vkDestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)2577 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks *pAllocator)
2578 {
2579 TRACE("(VkDevice device = %p, VkDescriptorSetLayout descriptorSetLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2580 device, static_cast<void *>(descriptorSetLayout), pAllocator);
2581
2582 vk::destroy(descriptorSetLayout, pAllocator);
2583 }
2584
vkCreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)2585 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool)
2586 {
2587 TRACE("(VkDevice device = %p, const VkDescriptorPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorPool* pDescriptorPool = %p)",
2588 device, pCreateInfo, pAllocator, pDescriptorPool);
2589
2590 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2591 while(extInfo)
2592 {
2593 switch(extInfo->sType)
2594 {
2595 case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO:
2596 break;
2597 default:
2598 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2599 break;
2600 }
2601 extInfo = extInfo->pNext;
2602 }
2603
2604 return vk::DescriptorPool::Create(pAllocator, pCreateInfo, pDescriptorPool);
2605 }
2606
vkDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)2607 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator)
2608 {
2609 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2610 device, static_cast<void *>(descriptorPool), pAllocator);
2611
2612 vk::destroy(descriptorPool, pAllocator);
2613 }
2614
vkResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)2615 VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags)
2616 {
2617 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, VkDescriptorPoolResetFlags flags = 0x%08X)",
2618 device, static_cast<void *>(descriptorPool), int(flags));
2619
2620 if(flags != 0)
2621 {
2622 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2623 UNSUPPORTED("flags 0x%08X", int(flags));
2624 }
2625
2626 return vk::Cast(descriptorPool)->reset();
2627 }
2628
vkAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)2629 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets)
2630 {
2631 TRACE("(VkDevice device = %p, const VkDescriptorSetAllocateInfo* pAllocateInfo = %p, VkDescriptorSet* pDescriptorSets = %p)",
2632 device, pAllocateInfo, pDescriptorSets);
2633
2634 const VkDescriptorSetVariableDescriptorCountAllocateInfo *variableDescriptorCountAllocateInfo = nullptr;
2635
2636 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
2637 while(extInfo)
2638 {
2639 switch(extInfo->sType)
2640 {
2641 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO:
2642 variableDescriptorCountAllocateInfo = reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo *>(extInfo);
2643 break;
2644 default:
2645 UNSUPPORTED("pAllocateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2646 break;
2647 }
2648 extInfo = extInfo->pNext;
2649 }
2650
2651 return vk::Cast(pAllocateInfo->descriptorPool)->allocateSets(pAllocateInfo->descriptorSetCount, pAllocateInfo->pSetLayouts, pDescriptorSets, variableDescriptorCountAllocateInfo);
2652 }
2653
vkFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)2654 VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets)
2655 {
2656 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p)",
2657 device, static_cast<void *>(descriptorPool), descriptorSetCount, pDescriptorSets);
2658
2659 vk::Cast(descriptorPool)->freeSets(descriptorSetCount, pDescriptorSets);
2660
2661 return VK_SUCCESS;
2662 }
2663
vkUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2664 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies)
2665 {
2666 TRACE("(VkDevice device = %p, uint32_t descriptorWriteCount = %d, const VkWriteDescriptorSet* pDescriptorWrites = %p, uint32_t descriptorCopyCount = %d, const VkCopyDescriptorSet* pDescriptorCopies = %p)",
2667 device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2668
2669 vk::Cast(device)->updateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2670 }
2671
vkCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)2672 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer)
2673 {
2674 TRACE("(VkDevice device = %p, const VkFramebufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFramebuffer* pFramebuffer = %p)",
2675 device, pCreateInfo, pAllocator, pFramebuffer);
2676
2677 return vk::Framebuffer::Create(pAllocator, pCreateInfo, pFramebuffer);
2678 }
2679
vkDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)2680 VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator)
2681 {
2682 TRACE("(VkDevice device = %p, VkFramebuffer framebuffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
2683 device, static_cast<void *>(framebuffer), pAllocator);
2684
2685 vk::destroy(framebuffer, pAllocator);
2686 }
2687
vkCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2688 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2689 {
2690 TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2691 device, pCreateInfo, pAllocator, pRenderPass);
2692
2693 if(pCreateInfo->flags != 0)
2694 {
2695 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2696 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2697 }
2698
2699 ValidateRenderPassPNextChain(device, pCreateInfo);
2700
2701 return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2702 }
2703
vkCreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2704 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2705 {
2706 TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2707 device, pCreateInfo, pAllocator, pRenderPass);
2708
2709 if(pCreateInfo->flags != 0)
2710 {
2711 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2712 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
2713 }
2714
2715 ValidateRenderPassPNextChain(device, pCreateInfo);
2716
2717 return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2718 }
2719
vkDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)2720 VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator)
2721 {
2722 TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, const VkAllocationCallbacks* pAllocator = %p)",
2723 device, static_cast<void *>(renderPass), pAllocator);
2724
2725 vk::destroy(renderPass, pAllocator);
2726 }
2727
vkGetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)2728 VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity)
2729 {
2730 TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, VkExtent2D* pGranularity = %p)",
2731 device, static_cast<void *>(renderPass), pGranularity);
2732
2733 vk::Cast(renderPass)->getRenderAreaGranularity(pGranularity);
2734 }
2735
vkCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)2736 VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool)
2737 {
2738 TRACE("(VkDevice device = %p, const VkCommandPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkCommandPool* pCommandPool = %p)",
2739 device, pCreateInfo, pAllocator, pCommandPool);
2740
2741 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2742 while(nextInfo)
2743 {
2744 switch(nextInfo->sType)
2745 {
2746 case VK_STRUCTURE_TYPE_MAX_ENUM:
2747 // dEQP tests that this value is ignored.
2748 break;
2749 default:
2750 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2751 break;
2752 }
2753 nextInfo = nextInfo->pNext;
2754 }
2755
2756 return vk::CommandPool::Create(pAllocator, pCreateInfo, pCommandPool);
2757 }
2758
vkDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)2759 VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator)
2760 {
2761 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2762 device, static_cast<void *>(commandPool), pAllocator);
2763
2764 vk::destroy(commandPool, pAllocator);
2765 }
2766
vkResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)2767 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags)
2768 {
2769 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolResetFlags flags = %d)",
2770 device, static_cast<void *>(commandPool), int(flags));
2771
2772 return vk::Cast(commandPool)->reset(flags);
2773 }
2774
vkAllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)2775 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, VkCommandBuffer *pCommandBuffers)
2776 {
2777 TRACE("(VkDevice device = %p, const VkCommandBufferAllocateInfo* pAllocateInfo = %p, VkCommandBuffer* pCommandBuffers = %p)",
2778 device, pAllocateInfo, pCommandBuffers);
2779
2780 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
2781 while(nextInfo)
2782 {
2783 switch(nextInfo->sType)
2784 {
2785 case VK_STRUCTURE_TYPE_MAX_ENUM:
2786 // dEQP tests that this value is ignored.
2787 break;
2788 default:
2789 UNSUPPORTED("pAllocateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2790 break;
2791 }
2792 nextInfo = nextInfo->pNext;
2793 }
2794
2795 return vk::Cast(pAllocateInfo->commandPool)->allocateCommandBuffers(vk::Cast(device), pAllocateInfo->level, pAllocateInfo->commandBufferCount, pCommandBuffers);
2796 }
2797
vkFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)2798 VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
2799 {
2800 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
2801 device, static_cast<void *>(commandPool), int(commandBufferCount), pCommandBuffers);
2802
2803 vk::Cast(commandPool)->freeCommandBuffers(commandBufferCount, pCommandBuffers);
2804 }
2805
vkBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)2806 VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo)
2807 {
2808 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCommandBufferBeginInfo* pBeginInfo = %p)",
2809 commandBuffer, pBeginInfo);
2810
2811 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pBeginInfo->pNext);
2812 while(nextInfo)
2813 {
2814 switch(nextInfo->sType)
2815 {
2816 case VK_STRUCTURE_TYPE_MAX_ENUM:
2817 // dEQP tests that this value is ignored.
2818 break;
2819 default:
2820 UNSUPPORTED("pBeginInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2821 break;
2822 }
2823 nextInfo = nextInfo->pNext;
2824 }
2825
2826 return vk::Cast(commandBuffer)->begin(pBeginInfo->flags, pBeginInfo->pInheritanceInfo);
2827 }
2828
vkEndCommandBuffer(VkCommandBuffer commandBuffer)2829 VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer)
2830 {
2831 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
2832
2833 return vk::Cast(commandBuffer)->end();
2834 }
2835
vkResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)2836 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags)
2837 {
2838 TRACE("(VkCommandBuffer commandBuffer = %p, VkCommandBufferResetFlags flags = %d)", commandBuffer, int(flags));
2839
2840 return vk::Cast(commandBuffer)->reset(flags);
2841 }
2842
vkCmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)2843 VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline)
2844 {
2845 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipeline pipeline = %p)",
2846 commandBuffer, int(pipelineBindPoint), static_cast<void *>(pipeline));
2847
2848 vk::Cast(commandBuffer)->bindPipeline(pipelineBindPoint, vk::Cast(pipeline));
2849 }
2850
vkCmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)2851 VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports)
2852 {
2853 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstViewport = %d, uint32_t viewportCount = %d, const VkViewport* pViewports = %p)",
2854 commandBuffer, int(firstViewport), int(viewportCount), pViewports);
2855
2856 vk::Cast(commandBuffer)->setViewport(firstViewport, viewportCount, pViewports);
2857 }
2858
vkCmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)2859 VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors)
2860 {
2861 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstScissor = %d, uint32_t scissorCount = %d, const VkRect2D* pScissors = %p)",
2862 commandBuffer, int(firstScissor), int(scissorCount), pScissors);
2863
2864 vk::Cast(commandBuffer)->setScissor(firstScissor, scissorCount, pScissors);
2865 }
2866
vkCmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)2867 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
2868 {
2869 TRACE("(VkCommandBuffer commandBuffer = %p, float lineWidth = %f)", commandBuffer, lineWidth);
2870
2871 vk::Cast(commandBuffer)->setLineWidth(lineWidth);
2872 }
2873
vkCmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)2874 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor)
2875 {
2876 TRACE("(VkCommandBuffer commandBuffer = %p, float depthBiasConstantFactor = %f, float depthBiasClamp = %f, float depthBiasSlopeFactor = %f)",
2877 commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2878
2879 vk::Cast(commandBuffer)->setDepthBias(depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2880 }
2881
vkCmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])2882 VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
2883 {
2884 TRACE("(VkCommandBuffer commandBuffer = %p, const float blendConstants[4] = {%f, %f, %f, %f})",
2885 commandBuffer, blendConstants[0], blendConstants[1], blendConstants[2], blendConstants[3]);
2886
2887 vk::Cast(commandBuffer)->setBlendConstants(blendConstants);
2888 }
2889
vkCmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)2890 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
2891 {
2892 TRACE("(VkCommandBuffer commandBuffer = %p, float minDepthBounds = %f, float maxDepthBounds = %f)",
2893 commandBuffer, minDepthBounds, maxDepthBounds);
2894
2895 vk::Cast(commandBuffer)->setDepthBounds(minDepthBounds, maxDepthBounds);
2896 }
2897
vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)2898 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask)
2899 {
2900 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t compareMask = %d)",
2901 commandBuffer, int(faceMask), int(compareMask));
2902
2903 vk::Cast(commandBuffer)->setStencilCompareMask(faceMask, compareMask);
2904 }
2905
vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)2906 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask)
2907 {
2908 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t writeMask = %d)",
2909 commandBuffer, int(faceMask), int(writeMask));
2910
2911 vk::Cast(commandBuffer)->setStencilWriteMask(faceMask, writeMask);
2912 }
2913
vkCmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)2914 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference)
2915 {
2916 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t reference = %d)",
2917 commandBuffer, int(faceMask), int(reference));
2918
2919 vk::Cast(commandBuffer)->setStencilReference(faceMask, reference);
2920 }
2921
vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)2922 VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets)
2923 {
2924 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipelineLayout layout = %p, uint32_t firstSet = %d, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p, uint32_t dynamicOffsetCount = %d, const uint32_t* pDynamicOffsets = %p)",
2925 commandBuffer, int(pipelineBindPoint), static_cast<void *>(layout), int(firstSet), int(descriptorSetCount), pDescriptorSets, int(dynamicOffsetCount), pDynamicOffsets);
2926
2927 vk::Cast(commandBuffer)->bindDescriptorSets(pipelineBindPoint, vk::Cast(layout), firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
2928 }
2929
vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)2930 VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType)
2931 {
2932 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkIndexType indexType = %d)",
2933 commandBuffer, static_cast<void *>(buffer), int(offset), int(indexType));
2934
2935 vk::Cast(commandBuffer)->bindIndexBuffer(vk::Cast(buffer), offset, indexType);
2936 }
2937
vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)2938 VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets)
2939 {
2940 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstBinding = %d, uint32_t bindingCount = %d, const VkBuffer* pBuffers = %p, const VkDeviceSize* pOffsets = %p)",
2941 commandBuffer, int(firstBinding), int(bindingCount), pBuffers, pOffsets);
2942
2943 vk::Cast(commandBuffer)->bindVertexBuffers(firstBinding, bindingCount, pBuffers, pOffsets, nullptr, nullptr);
2944 }
2945
vkCmdBindVertexBuffers2(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes,const VkDeviceSize * pStrides)2946 VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes, const VkDeviceSize *pStrides)
2947 {
2948 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstBinding = %d, uint32_t bindingCount = %d, const VkBuffer* pBuffers = %p, const VkDeviceSize* pOffsets = %p, const VkDeviceSize *pSizes = %p, const VkDeviceSize *pStrides = %p)",
2949 commandBuffer, int(firstBinding), int(bindingCount), pBuffers, pOffsets, pSizes, pStrides);
2950
2951 vk::Cast(commandBuffer)->bindVertexBuffers(firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides);
2952 }
2953
vkCmdSetCullMode(VkCommandBuffer commandBuffer,VkCullModeFlags cullMode)2954 VKAPI_ATTR void VKAPI_CALL vkCmdSetCullMode(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode)
2955 {
2956 TRACE("(VkCommandBuffer commandBuffer = %p, VkCullModeFlags cullMode = %d)",
2957 commandBuffer, int(cullMode));
2958
2959 vk::Cast(commandBuffer)->setCullMode(cullMode);
2960 }
2961
vkCmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthBoundsTestEnable)2962 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable)
2963 {
2964 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthBoundsTestEnable = %d)",
2965 commandBuffer, int(depthBoundsTestEnable));
2966
2967 vk::Cast(commandBuffer)->setDepthBoundsTestEnable(depthBoundsTestEnable);
2968 }
2969
vkCmdSetDepthCompareOp(VkCommandBuffer commandBuffer,VkCompareOp depthCompareOp)2970 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOp(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp)
2971 {
2972 TRACE("(VkCommandBuffer commandBuffer = %p, VkCompareOp depthCompareOp = %d)",
2973 commandBuffer, int(depthCompareOp));
2974
2975 vk::Cast(commandBuffer)->setDepthCompareOp(depthCompareOp);
2976 }
2977
vkCmdSetDepthTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthTestEnable)2978 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable)
2979 {
2980 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthTestEnable = %d)",
2981 commandBuffer, int(depthTestEnable));
2982
2983 vk::Cast(commandBuffer)->setDepthTestEnable(depthTestEnable);
2984 }
2985
vkCmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,VkBool32 depthWriteEnable)2986 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnable(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable)
2987 {
2988 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthWriteEnable = %d)",
2989 commandBuffer, int(depthWriteEnable));
2990
2991 vk::Cast(commandBuffer)->setDepthWriteEnable(depthWriteEnable);
2992 }
2993
vkCmdSetFrontFace(VkCommandBuffer commandBuffer,VkFrontFace frontFace)2994 VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFace(VkCommandBuffer commandBuffer, VkFrontFace frontFace)
2995 {
2996 TRACE("(VkCommandBuffer commandBuffer = %p, VkFrontFace frontFace = %d)",
2997 commandBuffer, int(frontFace));
2998
2999 vk::Cast(commandBuffer)->setFrontFace(frontFace);
3000 }
3001
vkCmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,VkPrimitiveTopology primitiveTopology)3002 VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopology(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology)
3003 {
3004 TRACE("(VkCommandBuffer commandBuffer = %p, VkPrimitiveTopology primitiveTopology = %d)",
3005 commandBuffer, int(primitiveTopology));
3006
3007 vk::Cast(commandBuffer)->setPrimitiveTopology(primitiveTopology);
3008 }
3009
vkCmdSetScissorWithCount(VkCommandBuffer commandBuffer,uint32_t scissorCount,const VkRect2D * pScissors)3010 VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCount(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D *pScissors)
3011 {
3012 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t scissorCount = %d, const VkRect2D *pScissors = %p)",
3013 commandBuffer, scissorCount, pScissors);
3014
3015 vk::Cast(commandBuffer)->setScissorWithCount(scissorCount, pScissors);
3016 }
3017
vkCmdSetStencilOp(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,VkStencilOp failOp,VkStencilOp passOp,VkStencilOp depthFailOp,VkCompareOp compareOp)3018 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOp(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp)
3019 {
3020 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, VkStencilOp failOp = %d, VkStencilOp passOp = %d, VkStencilOp depthFailOp = %d, VkCompareOp compareOp = %d)",
3021 commandBuffer, int(faceMask), int(failOp), int(passOp), int(depthFailOp), int(compareOp));
3022
3023 vk::Cast(commandBuffer)->setStencilOp(faceMask, failOp, passOp, depthFailOp, compareOp);
3024 }
3025
vkCmdSetStencilTestEnable(VkCommandBuffer commandBuffer,VkBool32 stencilTestEnable)3026 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnable(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable)
3027 {
3028 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 stencilTestEnable = %d)",
3029 commandBuffer, int(stencilTestEnable));
3030
3031 vk::Cast(commandBuffer)->setStencilTestEnable(stencilTestEnable);
3032 }
3033
vkCmdSetViewportWithCount(VkCommandBuffer commandBuffer,uint32_t viewportCount,const VkViewport * pViewports)3034 VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCount(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport *pViewports)
3035 {
3036 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t viewportCount = %d, const VkViewport *pViewports = %p)",
3037 commandBuffer, viewportCount, pViewports);
3038
3039 vk::Cast(commandBuffer)->setViewportWithCount(viewportCount, pViewports);
3040 }
3041
vkCmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer,VkBool32 rasterizerDiscardEnable)3042 VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable)
3043 {
3044 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 rasterizerDiscardEnable = %d)",
3045 commandBuffer, rasterizerDiscardEnable);
3046
3047 vk::Cast(commandBuffer)->setRasterizerDiscardEnable(rasterizerDiscardEnable);
3048 }
3049
vkCmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,VkBool32 depthBiasEnable)3050 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnable(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable)
3051 {
3052 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthBiasEnable = %d)",
3053 commandBuffer, depthBiasEnable);
3054
3055 vk::Cast(commandBuffer)->setDepthBiasEnable(depthBiasEnable);
3056 }
3057
vkCmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,VkBool32 primitiveRestartEnable)3058 VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable)
3059 {
3060 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 primitiveRestartEnable = %d)",
3061 commandBuffer, primitiveRestartEnable);
3062
3063 vk::Cast(commandBuffer)->setPrimitiveRestartEnable(primitiveRestartEnable);
3064 }
3065
vkCmdSetVertexInputEXT(VkCommandBuffer commandBuffer,uint32_t vertexBindingDescriptionCount,const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions,uint32_t vertexAttributeDescriptionCount,const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions)3066 VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount,
3067 const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions,
3068 uint32_t vertexAttributeDescriptionCount,
3069 const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions)
3070 {
3071 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t vertexBindingDescriptionCount = %d, const VkVertexInputBindingDescription2EXT *pVertexBindingDescriptions = %p, uint32_t vertexAttributeDescriptionCount = %d, const VkVertexInputAttributeDescription2EXT *pVertexAttributeDescriptions = %p)",
3072 commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions);
3073
3074 vk::Cast(commandBuffer)->setVertexInput(vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions);
3075 }
3076
vkCmdDraw(VkCommandBuffer commandBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)3077 VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance)
3078 {
3079 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t vertexCount = %d, uint32_t instanceCount = %d, uint32_t firstVertex = %d, uint32_t firstInstance = %d)",
3080 commandBuffer, int(vertexCount), int(instanceCount), int(firstVertex), int(firstInstance));
3081
3082 vk::Cast(commandBuffer)->draw(vertexCount, instanceCount, firstVertex, firstInstance);
3083 }
3084
vkCmdDrawIndexed(VkCommandBuffer commandBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)3085 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
3086 {
3087 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t indexCount = %d, uint32_t instanceCount = %d, uint32_t firstIndex = %d, int32_t vertexOffset = %d, uint32_t firstInstance = %d)",
3088 commandBuffer, int(indexCount), int(instanceCount), int(firstIndex), int(vertexOffset), int(firstInstance));
3089
3090 vk::Cast(commandBuffer)->drawIndexed(indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
3091 }
3092
vkCmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)3093 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
3094 {
3095 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
3096 commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
3097
3098 vk::Cast(commandBuffer)->drawIndirect(vk::Cast(buffer), offset, drawCount, stride);
3099 }
3100
vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)3101 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
3102 {
3103 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
3104 commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
3105
3106 vk::Cast(commandBuffer)->drawIndexedIndirect(vk::Cast(buffer), offset, drawCount, stride);
3107 }
3108
vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)3109 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
3110 {
3111 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
3112 commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
3113 UNSUPPORTED("VK_KHR_draw_indirect_count");
3114 }
3115
vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)3116 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
3117 {
3118 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
3119 commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
3120 UNSUPPORTED("VK_KHR_draw_indirect_count");
3121 }
3122
vkCmdDispatch(VkCommandBuffer commandBuffer,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)3123 VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
3124 {
3125 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t groupCountX = %d, uint32_t groupCountY = %d, uint32_t groupCountZ = %d)",
3126 commandBuffer, int(groupCountX), int(groupCountY), int(groupCountZ));
3127
3128 vk::Cast(commandBuffer)->dispatch(groupCountX, groupCountY, groupCountZ);
3129 }
3130
vkCmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset)3131 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
3132 {
3133 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d)",
3134 commandBuffer, static_cast<void *>(buffer), int(offset));
3135
3136 vk::Cast(commandBuffer)->dispatchIndirect(vk::Cast(buffer), offset);
3137 }
3138
vkCmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)3139 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions)
3140 {
3141 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferCopy* pRegions = %p)",
3142 commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
3143
3144 vk::Cast(commandBuffer)->copyBuffer(vk::CopyBufferInfo(srcBuffer, dstBuffer, regionCount, pRegions));
3145 }
3146
vkCmdCopyBuffer2(VkCommandBuffer commandBuffer,const VkCopyBufferInfo2 * pCopyBufferInfo)3147 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfo)
3148 {
3149 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyBufferInfo2* pCopyBufferInfo = %p)",
3150 commandBuffer, pCopyBufferInfo);
3151
3152 vk::Cast(commandBuffer)->copyBuffer(*pCopyBufferInfo);
3153 }
3154
vkCmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)3155 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions)
3156 {
3157 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageCopy* pRegions = %p)",
3158 commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
3159
3160 vk::Cast(commandBuffer)->copyImage(vk::CopyImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions));
3161 }
3162
vkCmdCopyImage2(VkCommandBuffer commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)3163 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo)
3164 {
3165 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyImageInfo2* pCopyImageInfo = %p)",
3166 commandBuffer, pCopyImageInfo);
3167
3168 vk::Cast(commandBuffer)->copyImage(*pCopyImageInfo);
3169 }
3170
vkCmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)3171 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter)
3172 {
3173 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageBlit* pRegions = %p, VkFilter filter = %d)",
3174 commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions, filter);
3175
3176 vk::Cast(commandBuffer)->blitImage(vk::BlitImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter));
3177 }
3178
vkCmdBlitImage2(VkCommandBuffer commandBuffer,const VkBlitImageInfo2 * pBlitImageInfo)3179 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2 *pBlitImageInfo)
3180 {
3181 TRACE("(VkCommandBuffer commandBuffer = %p, const VkBlitImageInfo2* pBlitImageInfo = %p)",
3182 commandBuffer, pBlitImageInfo);
3183
3184 vk::Cast(commandBuffer)->blitImage(*pBlitImageInfo);
3185 }
3186
vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)3187 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions)
3188 {
3189 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
3190 commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
3191
3192 vk::Cast(commandBuffer)->copyBufferToImage(vk::CopyBufferToImageInfo(srcBuffer, dstImage, dstImageLayout, regionCount, pRegions));
3193 }
3194
vkCmdCopyBufferToImage2(VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo)3195 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo)
3196 {
3197 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo = %p)",
3198 commandBuffer, pCopyBufferToImageInfo);
3199
3200 vk::Cast(commandBuffer)->copyBufferToImage(*pCopyBufferToImageInfo);
3201 }
3202
vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)3203 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions)
3204 {
3205 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
3206 commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
3207
3208 vk::Cast(commandBuffer)->copyImageToBuffer(vk::CopyImageToBufferInfo(srcImage, srcImageLayout, dstBuffer, regionCount, pRegions));
3209 }
3210
vkCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)3211 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo)
3212 {
3213 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo = %p)",
3214 commandBuffer, pCopyImageToBufferInfo);
3215
3216 vk::Cast(commandBuffer)->copyImageToBuffer(*pCopyImageToBufferInfo);
3217 }
3218
vkCmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)3219 VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData)
3220 {
3221 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize dataSize = %d, const void* pData = %p)",
3222 commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(dataSize), pData);
3223
3224 vk::Cast(commandBuffer)->updateBuffer(vk::Cast(dstBuffer), dstOffset, dataSize, pData);
3225 }
3226
vkCmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)3227 VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data)
3228 {
3229 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize size = %d, uint32_t data = %d)",
3230 commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(size), data);
3231
3232 vk::Cast(commandBuffer)->fillBuffer(vk::Cast(dstBuffer), dstOffset, size, data);
3233 }
3234
vkCmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)3235 VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
3236 {
3237 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearColorValue* pColor = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
3238 commandBuffer, static_cast<void *>(image), int(imageLayout), pColor, int(rangeCount), pRanges);
3239
3240 vk::Cast(commandBuffer)->clearColorImage(vk::Cast(image), imageLayout, pColor, rangeCount, pRanges);
3241 }
3242
vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)3243 VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
3244 {
3245 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearDepthStencilValue* pDepthStencil = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
3246 commandBuffer, static_cast<void *>(image), int(imageLayout), pDepthStencil, int(rangeCount), pRanges);
3247
3248 vk::Cast(commandBuffer)->clearDepthStencilImage(vk::Cast(image), imageLayout, pDepthStencil, rangeCount, pRanges);
3249 }
3250
vkCmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)3251 VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects)
3252 {
3253 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t attachmentCount = %d, const VkClearAttachment* pAttachments = %p, uint32_t rectCount = %d, const VkClearRect* pRects = %p)",
3254 commandBuffer, int(attachmentCount), pAttachments, int(rectCount), pRects);
3255
3256 vk::Cast(commandBuffer)->clearAttachments(attachmentCount, pAttachments, rectCount, pRects);
3257 }
3258
vkCmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)3259 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions)
3260 {
3261 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageResolve* pRegions = %p)",
3262 commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstImage), int(dstImageLayout), regionCount, pRegions);
3263
3264 vk::Cast(commandBuffer)->resolveImage(vk::ResolveImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions));
3265 }
3266
vkCmdResolveImage2(VkCommandBuffer commandBuffer,const VkResolveImageInfo2 * pResolveImageInfo)3267 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2 *pResolveImageInfo)
3268 {
3269 TRACE("(VkCommandBuffer commandBuffer = %p, const VkResolveImageInfo2* pResolveImageInfo = %p)",
3270 commandBuffer, pResolveImageInfo);
3271
3272 vk::Cast(commandBuffer)->resolveImage(*pResolveImageInfo);
3273 }
3274
vkCmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)3275 VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
3276 {
3277 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
3278 commandBuffer, static_cast<void *>(event), int(stageMask));
3279
3280 vk::Cast(commandBuffer)->setEvent(vk::Cast(event), vk::DependencyInfo(stageMask, stageMask, VkDependencyFlags(0), 0, nullptr, 0, nullptr, 0, nullptr));
3281 }
3282
vkCmdSetEvent2(VkCommandBuffer commandBuffer,VkEvent event,const VkDependencyInfo * pDependencyInfo)3283 VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo *pDependencyInfo)
3284 {
3285 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, const VkDependencyInfo* pDependencyInfo = %p)",
3286 commandBuffer, static_cast<void *>(event), pDependencyInfo);
3287
3288 vk::Cast(commandBuffer)->setEvent(vk::Cast(event), *pDependencyInfo);
3289 }
3290
vkCmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)3291 VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
3292 {
3293 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
3294 commandBuffer, static_cast<void *>(event), int(stageMask));
3295
3296 vk::Cast(commandBuffer)->resetEvent(vk::Cast(event), stageMask);
3297 }
3298
vkCmdResetEvent2(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags2 stageMask)3299 VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask)
3300 {
3301 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags2 stageMask = %d)",
3302 commandBuffer, static_cast<void *>(event), int(stageMask));
3303
3304 vk::Cast(commandBuffer)->resetEvent(vk::Cast(event), stageMask);
3305 }
3306
vkCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)3307 VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
3308 {
3309 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t eventCount = %d, const VkEvent* pEvents = %p, VkPipelineStageFlags srcStageMask = 0x%08X, VkPipelineStageFlags dstStageMask = 0x%08X, uint32_t memoryBarrierCount = %d, const VkMemoryBarrier* pMemoryBarriers = %p, uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
3310 commandBuffer, int(eventCount), pEvents, int(srcStageMask), int(dstStageMask), int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
3311
3312 vk::Cast(commandBuffer)->waitEvents(eventCount, pEvents, vk::DependencyInfo(srcStageMask, dstStageMask, VkDependencyFlags(0), memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers));
3313 }
3314
vkCmdWaitEvents2(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,const VkDependencyInfo * pDependencyInfos)3315 VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, const VkDependencyInfo *pDependencyInfos)
3316 {
3317 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t eventCount = %d, const VkEvent* pEvents = %p, const VkDependencyInfo* pDependencyInfos = %p)",
3318 commandBuffer, int(eventCount), pEvents, pDependencyInfos);
3319
3320 vk::Cast(commandBuffer)->waitEvents(eventCount, pEvents, *pDependencyInfos);
3321 }
3322
vkCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)3323 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
3324 {
3325 TRACE(
3326 "(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlags srcStageMask = 0x%08X, VkPipelineStageFlags dstStageMask = 0x%08X, VkDependencyFlags dependencyFlags = %d, uint32_t memoryBarrierCount = %d, onst VkMemoryBarrier* pMemoryBarriers = %p,"
3327 " uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
3328 commandBuffer, int(srcStageMask), int(dstStageMask), dependencyFlags, int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
3329
3330 vk::Cast(commandBuffer)->pipelineBarrier(vk::DependencyInfo(srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers));
3331 }
3332
vkCmdPipelineBarrier2(VkCommandBuffer commandBuffer,const VkDependencyInfo * pDependencyInfo)3333 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfo *pDependencyInfo)
3334 {
3335 TRACE("(VkCommandBuffer commandBuffer = %p, const VkDependencyInfo* pDependencyInfo = %p)",
3336 commandBuffer, pDependencyInfo);
3337
3338 vk::Cast(commandBuffer)->pipelineBarrier(*pDependencyInfo);
3339 }
3340
vkCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags)3341 VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags)
3342 {
3343 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d, VkQueryControlFlags flags = %d)",
3344 commandBuffer, static_cast<void *>(queryPool), query, int(flags));
3345
3346 vk::Cast(commandBuffer)->beginQuery(vk::Cast(queryPool), query, flags);
3347 }
3348
vkCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query)3349 VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query)
3350 {
3351 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d)",
3352 commandBuffer, static_cast<void *>(queryPool), int(query));
3353
3354 vk::Cast(commandBuffer)->endQuery(vk::Cast(queryPool), query);
3355 }
3356
vkCmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)3357 VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
3358 {
3359 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
3360 commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount));
3361
3362 vk::Cast(commandBuffer)->resetQueryPool(vk::Cast(queryPool), firstQuery, queryCount);
3363 }
3364
vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query)3365 VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query)
3366 {
3367 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlagBits pipelineStage = %d, VkQueryPool queryPool = %p, uint32_t query = %d)",
3368 commandBuffer, int(pipelineStage), static_cast<void *>(queryPool), int(query));
3369
3370 vk::Cast(commandBuffer)->writeTimestamp(pipelineStage, vk::Cast(queryPool), query);
3371 }
3372
vkCmdWriteTimestamp2(VkCommandBuffer commandBuffer,VkPipelineStageFlags2 stage,VkQueryPool queryPool,uint32_t query)3373 VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query)
3374 {
3375 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlags2 stage = %d, VkQueryPool queryPool = %p, uint32_t query = %d)",
3376 commandBuffer, int(stage), static_cast<void *>(queryPool), int(query));
3377
3378 vk::Cast(commandBuffer)->writeTimestamp(stage, vk::Cast(queryPool), query);
3379 }
3380
vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)3381 VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags)
3382 {
3383 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
3384 commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), static_cast<void *>(dstBuffer), int(dstOffset), int(stride), int(flags));
3385
3386 vk::Cast(commandBuffer)->copyQueryPoolResults(vk::Cast(queryPool), firstQuery, queryCount, vk::Cast(dstBuffer), dstOffset, stride, flags);
3387 }
3388
vkCmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)3389 VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues)
3390 {
3391 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineLayout layout = %p, VkShaderStageFlags stageFlags = %d, uint32_t offset = %d, uint32_t size = %d, const void* pValues = %p)",
3392 commandBuffer, static_cast<void *>(layout), stageFlags, offset, size, pValues);
3393
3394 vk::Cast(commandBuffer)->pushConstants(vk::Cast(layout), stageFlags, offset, size, pValues);
3395 }
3396
vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)3397 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents)
3398 {
3399 VkSubpassBeginInfo subpassBeginInfo = { VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, nullptr, contents };
3400 vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, &subpassBeginInfo);
3401 }
3402
vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)3403 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, const VkSubpassBeginInfoKHR *pSubpassBeginInfo)
3404 {
3405 TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderPassBeginInfo* pRenderPassBegin = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p)",
3406 commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
3407
3408 const VkBaseInStructure *renderPassBeginInfo = reinterpret_cast<const VkBaseInStructure *>(pRenderPassBegin->pNext);
3409 const VkRenderPassAttachmentBeginInfo *attachmentBeginInfo = nullptr;
3410 while(renderPassBeginInfo)
3411 {
3412 switch(renderPassBeginInfo->sType)
3413 {
3414 case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
3415 // This extension controls which render area is used on which physical device,
3416 // in order to distribute rendering between multiple physical devices.
3417 // SwiftShader only has a single physical device, so this extension does nothing in this case.
3418 break;
3419 case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO:
3420 attachmentBeginInfo = reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>(renderPassBeginInfo);
3421 break;
3422 case VK_STRUCTURE_TYPE_MAX_ENUM:
3423 // dEQP tests that this value is ignored.
3424 break;
3425 default:
3426 UNSUPPORTED("pRenderPassBegin->pNext sType = %s", vk::Stringify(renderPassBeginInfo->sType).c_str());
3427 break;
3428 }
3429
3430 renderPassBeginInfo = renderPassBeginInfo->pNext;
3431 }
3432
3433 vk::Cast(commandBuffer)->beginRenderPass(vk::Cast(pRenderPassBegin->renderPass), vk::Cast(pRenderPassBegin->framebuffer), pRenderPassBegin->renderArea, pRenderPassBegin->clearValueCount, pRenderPassBegin->pClearValues, pSubpassBeginInfo->contents, attachmentBeginInfo);
3434 }
3435
vkCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)3436 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
3437 {
3438 TRACE("(VkCommandBuffer commandBuffer = %p, VkSubpassContents contents = %d)",
3439 commandBuffer, contents);
3440
3441 vk::Cast(commandBuffer)->nextSubpass(contents);
3442 }
3443
vkCmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)3444 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const VkSubpassEndInfoKHR *pSubpassEndInfo)
3445 {
3446 TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)",
3447 commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
3448
3449 vk::Cast(commandBuffer)->nextSubpass(pSubpassBeginInfo->contents);
3450 }
3451
vkCmdEndRenderPass(VkCommandBuffer commandBuffer)3452 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer)
3453 {
3454 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
3455
3456 vk::Cast(commandBuffer)->endRenderPass();
3457 }
3458
vkCmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)3459 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo)
3460 {
3461 TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)", commandBuffer, pSubpassEndInfo);
3462
3463 vk::Cast(commandBuffer)->endRenderPass();
3464 }
3465
vkCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)3466 VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
3467 {
3468 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
3469 commandBuffer, commandBufferCount, pCommandBuffers);
3470
3471 vk::Cast(commandBuffer)->executeCommands(commandBufferCount, pCommandBuffers);
3472 }
3473
vkCmdBeginRendering(VkCommandBuffer commandBuffer,const VkRenderingInfo * pRenderingInfo)3474 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRendering(VkCommandBuffer commandBuffer, const VkRenderingInfo *pRenderingInfo)
3475 {
3476 TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderingInfo* pRenderingInfo = %p)",
3477 commandBuffer, pRenderingInfo);
3478
3479 vk::Cast(commandBuffer)->beginRendering(pRenderingInfo);
3480 }
3481
vkCmdEndRendering(VkCommandBuffer commandBuffer)3482 VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering(VkCommandBuffer commandBuffer)
3483 {
3484 TRACE("(VkCommandBuffer commandBuffer = %p)",
3485 commandBuffer);
3486
3487 vk::Cast(commandBuffer)->endRendering();
3488 }
3489
vkEnumerateInstanceVersion(uint32_t * pApiVersion)3490 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t *pApiVersion)
3491 {
3492 TRACE("(uint32_t* pApiVersion = %p)", pApiVersion);
3493 *pApiVersion = vk::API_VERSION;
3494 return VK_SUCCESS;
3495 }
3496
vkBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)3497 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos)
3498 {
3499 TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindBufferMemoryInfo* pBindInfos = %p)",
3500 device, bindInfoCount, pBindInfos);
3501
3502 for(uint32_t i = 0; i < bindInfoCount; i++)
3503 {
3504 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pBindInfos[i].pNext);
3505 while(extInfo)
3506 {
3507 UNSUPPORTED("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
3508 extInfo = extInfo->pNext;
3509 }
3510
3511 if(!vk::Cast(pBindInfos[i].buffer)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
3512 {
3513 UNSUPPORTED("vkBindBufferMemory2 with invalid external memory");
3514 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
3515 }
3516 }
3517
3518 for(uint32_t i = 0; i < bindInfoCount; i++)
3519 {
3520 vk::Cast(pBindInfos[i].buffer)->bind(vk::Cast(pBindInfos[i].memory), pBindInfos[i].memoryOffset);
3521 }
3522
3523 return VK_SUCCESS;
3524 }
3525
vkBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)3526 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo *pBindInfos)
3527 {
3528 TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindImageMemoryInfo* pBindInfos = %p)",
3529 device, bindInfoCount, pBindInfos);
3530
3531 for(uint32_t i = 0; i < bindInfoCount; i++)
3532 {
3533 if(!vk::Cast(pBindInfos[i].image)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
3534 {
3535 UNSUPPORTED("vkBindImageMemory2 with invalid external memory");
3536 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3537 }
3538 }
3539
3540 for(uint32_t i = 0; i < bindInfoCount; i++)
3541 {
3542 vk::DeviceMemory *memory = vk::Cast(pBindInfos[i].memory);
3543 VkDeviceSize offset = pBindInfos[i].memoryOffset;
3544
3545 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pBindInfos[i].pNext);
3546 while(extInfo)
3547 {
3548 switch(extInfo->sType)
3549 {
3550 case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
3551 /* Do nothing */
3552 break;
3553
3554 #ifndef __ANDROID__
3555 case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
3556 {
3557 const auto *swapchainInfo = reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>(extInfo);
3558 memory = vk::Cast(swapchainInfo->swapchain)->getImage(swapchainInfo->imageIndex).getImageMemory();
3559 offset = 0;
3560 }
3561 break;
3562 #endif
3563
3564 default:
3565 UNSUPPORTED("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
3566 break;
3567 }
3568 extInfo = extInfo->pNext;
3569 }
3570
3571 vk::Cast(pBindInfos[i].image)->bind(memory, offset);
3572 }
3573
3574 return VK_SUCCESS;
3575 }
3576
vkGetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)3577 VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
3578 {
3579 TRACE("(VkDevice device = %p, uint32_t heapIndex = %d, uint32_t localDeviceIndex = %d, uint32_t remoteDeviceIndex = %d, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures = %p)",
3580 device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
3581
3582 ASSERT(localDeviceIndex != remoteDeviceIndex); // "localDeviceIndex must not equal remoteDeviceIndex"
3583 UNSUPPORTED("remoteDeviceIndex: %d", int(remoteDeviceIndex)); // Only one physical device is supported, and since the device indexes can't be equal, this should never be called.
3584 }
3585
vkCmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)3586 VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
3587 {
3588 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t deviceMask = %d", commandBuffer, deviceMask);
3589
3590 vk::Cast(commandBuffer)->setDeviceMask(deviceMask);
3591 }
3592
vkCmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)3593 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
3594 {
3595 TRACE("(VkCommandBuffer commandBuffer = %p, baseGroupX = %u, baseGroupY = %u, baseGroupZ = %u, groupCountX = %u, groupCountY = %u, groupCountZ = %u)",
3596 commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
3597
3598 vk::Cast(commandBuffer)->dispatchBase(baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
3599 }
3600
vkResetQueryPool(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)3601 VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
3602 {
3603 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
3604 device, static_cast<void *>(queryPool), firstQuery, queryCount);
3605 vk::Cast(queryPool)->reset(firstQuery, queryCount);
3606 }
3607
vkEnumeratePhysicalDeviceGroups(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)3608 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
3609 {
3610 TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceGroupCount = %p, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties = %p)",
3611 instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3612
3613 return vk::Cast(instance)->getPhysicalDeviceGroups(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3614 }
3615
vkGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3616 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
3617 {
3618 TRACE("(VkDevice device = %p, const VkImageMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
3619 device, pInfo, pMemoryRequirements);
3620
3621 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pInfo->pNext);
3622 while(extInfo)
3623 {
3624 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3625 extInfo = extInfo->pNext;
3626 }
3627
3628 vk::Cast(pInfo->image)->getMemoryRequirements(pMemoryRequirements);
3629 }
3630
vkGetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3631 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
3632 {
3633 TRACE("(VkDevice device = %p, const VkBufferMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
3634 device, pInfo, pMemoryRequirements);
3635
3636 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pInfo->pNext);
3637 while(extInfo)
3638 {
3639 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3640 extInfo = extInfo->pNext;
3641 }
3642
3643 VkBaseOutStructure *extensionRequirements = reinterpret_cast<VkBaseOutStructure *>(pMemoryRequirements->pNext);
3644 while(extensionRequirements)
3645 {
3646 switch(extensionRequirements->sType)
3647 {
3648 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
3649 {
3650 auto *requirements = reinterpret_cast<VkMemoryDedicatedRequirements *>(extensionRequirements);
3651 vk::Cast(device)->getRequirements(requirements);
3652 }
3653 break;
3654 default:
3655 UNSUPPORTED("pMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3656 break;
3657 }
3658
3659 extensionRequirements = extensionRequirements->pNext;
3660 }
3661
3662 vkGetBufferMemoryRequirements(device, pInfo->buffer, &(pMemoryRequirements->memoryRequirements));
3663 }
3664
vkGetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)3665 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
3666 {
3667 TRACE("(VkDevice device = %p, const VkImageSparseMemoryRequirementsInfo2* pInfo = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements = %p)",
3668 device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
3669
3670 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pInfo->pNext);
3671 while(extInfo)
3672 {
3673 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3674 extInfo = extInfo->pNext;
3675 }
3676
3677 if(pSparseMemoryRequirements) // Valid to be NULL
3678 {
3679 const auto *extensionRequirements = reinterpret_cast<const VkBaseInStructure *>(pSparseMemoryRequirements->pNext);
3680 while(extensionRequirements)
3681 {
3682 UNSUPPORTED("pSparseMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3683 extensionRequirements = extensionRequirements->pNext;
3684 }
3685 }
3686
3687 // The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
3688 // "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
3689 *pSparseMemoryRequirementCount = 0;
3690 }
3691
vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)3692 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 *pFeatures)
3693 {
3694 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures2* pFeatures = %p)", physicalDevice, pFeatures);
3695
3696 vk::Cast(physicalDevice)->getFeatures2(pFeatures);
3697 }
3698
vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)3699 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 *pProperties)
3700 {
3701 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties2* pProperties = %p)", physicalDevice, pProperties);
3702
3703 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pProperties->pNext);
3704 while(extensionProperties)
3705 {
3706 // Casting to an int since some structures, such as VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID,
3707 // are not enumerated in the official Vulkan headers.
3708 switch((int)(extensionProperties->sType))
3709 {
3710 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
3711 {
3712 auto *properties = reinterpret_cast<VkPhysicalDeviceIDProperties *>(extensionProperties);
3713 vk::Cast(physicalDevice)->getProperties(properties);
3714 }
3715 break;
3716 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
3717 {
3718 auto *properties = reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>(extensionProperties);
3719 vk::Cast(physicalDevice)->getProperties(properties);
3720 }
3721 break;
3722 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES:
3723 {
3724 auto *properties = reinterpret_cast<VkPhysicalDeviceMaintenance4Properties *>(extensionProperties);
3725 vk::Cast(physicalDevice)->getProperties(properties);
3726 }
3727 break;
3728 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
3729 {
3730 auto *properties = reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>(extensionProperties);
3731 vk::Cast(physicalDevice)->getProperties(properties);
3732 }
3733 break;
3734 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
3735 {
3736 auto *properties = reinterpret_cast<VkPhysicalDevicePointClippingProperties *>(extensionProperties);
3737 vk::Cast(physicalDevice)->getProperties(properties);
3738 }
3739 break;
3740 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
3741 {
3742 auto *properties = reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>(extensionProperties);
3743 vk::Cast(physicalDevice)->getProperties(properties);
3744 }
3745 break;
3746 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
3747 {
3748 auto *properties = reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>(extensionProperties);
3749 vk::Cast(physicalDevice)->getProperties(properties);
3750 }
3751 break;
3752 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
3753 {
3754 auto *properties = reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(extensionProperties);
3755 vk::Cast(physicalDevice)->getProperties(properties);
3756 }
3757 break;
3758 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES:
3759 {
3760 auto *properties = reinterpret_cast<VkPhysicalDeviceDriverProperties *>(extensionProperties);
3761 vk::Cast(physicalDevice)->getProperties(properties);
3762 }
3763 break;
3764 #ifdef __ANDROID__
3765 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID:
3766 {
3767 auto *properties = reinterpret_cast<VkPhysicalDevicePresentationPropertiesANDROID *>(extensionProperties);
3768 vk::Cast(physicalDevice)->getProperties(properties);
3769 }
3770 break;
3771 #endif
3772 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
3773 {
3774 auto *properties = reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>(extensionProperties);
3775 vk::Cast(physicalDevice)->getProperties(properties);
3776 }
3777 break;
3778 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT:
3779 {
3780 auto *properties = reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>(extensionProperties);
3781 vk::Cast(physicalDevice)->getProperties(properties);
3782 }
3783 break;
3784 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES:
3785 {
3786 auto *properties = reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>(extensionProperties);
3787 vk::Cast(physicalDevice)->getProperties(properties);
3788 }
3789 break;
3790 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
3791 {
3792 auto *properties = reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>(extensionProperties);
3793 vk::Cast(physicalDevice)->getProperties(properties);
3794 }
3795 break;
3796 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES:
3797 {
3798 auto *properties = reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>(extensionProperties);
3799 vk::Cast(physicalDevice)->getProperties(properties);
3800 }
3801 break;
3802 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES:
3803 {
3804 auto *properties = reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>(extensionProperties);
3805 vk::Cast(physicalDevice)->getProperties(properties);
3806 }
3807 break;
3808 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
3809 {
3810 auto *properties = reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>(extensionProperties);
3811 vk::Cast(physicalDevice)->getProperties(properties);
3812 }
3813 break;
3814 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES:
3815 {
3816 auto *properties = reinterpret_cast<VkPhysicalDeviceVulkan13Properties *>(extensionProperties);
3817 vk::Cast(physicalDevice)->getProperties(properties);
3818 }
3819 break;
3820 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES:
3821 {
3822 auto *properties = reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>(extensionProperties);
3823 vk::Cast(physicalDevice)->getProperties(properties);
3824 }
3825 break;
3826 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES:
3827 {
3828 auto *properties = reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>(extensionProperties);
3829 vk::Cast(physicalDevice)->getProperties(properties);
3830 }
3831 break;
3832 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT:
3833 {
3834 auto *properties = reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT *>(extensionProperties);
3835 vk::Cast(physicalDevice)->getProperties(properties);
3836 }
3837 break;
3838 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
3839 {
3840 auto *properties = reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(extensionProperties);
3841 vk::Cast(physicalDevice)->getProperties(properties);
3842 }
3843 break;
3844 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES:
3845 {
3846 auto *properties = reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties *>(extensionProperties);
3847 vk::Cast(physicalDevice)->getProperties(properties);
3848 }
3849 break;
3850 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES:
3851 {
3852 auto *properties = reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties *>(extensionProperties);
3853 vk::Cast(physicalDevice)->getProperties(properties);
3854 }
3855 break;
3856 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES:
3857 {
3858 auto *properties = reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties *>(extensionProperties);
3859 vk::Cast(physicalDevice)->getProperties(properties);
3860 }
3861 break;
3862 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES:
3863 {
3864 auto *properties = reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties *>(extensionProperties);
3865 vk::Cast(physicalDevice)->getProperties(properties);
3866 }
3867 break;
3868 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT:
3869 {
3870 auto *properties = reinterpret_cast<VkPhysicalDevicePipelineRobustnessPropertiesEXT *>(extensionProperties);
3871 vk::Cast(physicalDevice)->getProperties(properties);
3872 }
3873 break;
3874 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT:
3875 {
3876 auto *properties = reinterpret_cast<VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *>(extensionProperties);
3877 vk::Cast(physicalDevice)->getProperties(properties);
3878 }
3879 break;
3880 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT:
3881 {
3882 auto *properties = reinterpret_cast<VkPhysicalDeviceHostImageCopyPropertiesEXT *>(extensionProperties);
3883 vk::Cast(physicalDevice)->getProperties(properties);
3884 }
3885 break;
3886 default:
3887 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
3888 UNSUPPORTED("pProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3889 break;
3890 }
3891
3892 extensionProperties = extensionProperties->pNext;
3893 }
3894
3895 vkGetPhysicalDeviceProperties(physicalDevice, &(pProperties->properties));
3896 }
3897
vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)3898 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 *pFormatProperties)
3899 {
3900 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties2* pFormatProperties = %p)",
3901 physicalDevice, format, pFormatProperties);
3902
3903 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pFormatProperties->pNext);
3904 while(extensionProperties)
3905 {
3906 switch(extensionProperties->sType)
3907 {
3908 case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3:
3909 {
3910 auto *properties3 = reinterpret_cast<VkFormatProperties3 *>(extensionProperties);
3911 vk::Cast(physicalDevice)->GetFormatProperties(format, properties3);
3912 }
3913 break;
3914 default:
3915 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
3916 UNSUPPORTED("pFormatProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3917 break;
3918 }
3919
3920 extensionProperties = extensionProperties->pNext;
3921 }
3922
3923 vkGetPhysicalDeviceFormatProperties(physicalDevice, format, &(pFormatProperties->formatProperties));
3924 }
3925
vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3926 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VkImageFormatProperties2 *pImageFormatProperties)
3927 {
3928 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo = %p, VkImageFormatProperties2* pImageFormatProperties = %p)",
3929 physicalDevice, pImageFormatInfo, pImageFormatProperties);
3930
3931 // "If the combination of parameters to vkGetPhysicalDeviceImageFormatProperties is not supported by the implementation
3932 // for use in vkCreateImage, then all members of VkImageFormatProperties will be filled with zero."
3933 memset(&pImageFormatProperties->imageFormatProperties, 0, sizeof(VkImageFormatProperties));
3934
3935 const VkBaseInStructure *extensionFormatInfo = reinterpret_cast<const VkBaseInStructure *>(pImageFormatInfo->pNext);
3936
3937 const VkExternalMemoryHandleTypeFlagBits *handleType = nullptr;
3938 VkImageUsageFlags stencilUsage = 0;
3939 while(extensionFormatInfo)
3940 {
3941 switch(extensionFormatInfo->sType)
3942 {
3943 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
3944 {
3945 // Per the Vulkan spec on VkImageFormatListcreateInfo:
3946 // "If the pNext chain of VkImageCreateInfo includes a
3947 // VkImageFormatListCreateInfo structure, then that
3948 // structure contains a list of all formats that can be
3949 // used when creating views of this image"
3950 // This limitation does not affect SwiftShader's behavior and
3951 // the Vulkan Validation Layers can detect Views created with a
3952 // format which is not included in that list.
3953 }
3954 break;
3955 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
3956 {
3957 const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionFormatInfo);
3958 stencilUsage = stencilUsageInfo->stencilUsage;
3959 }
3960 break;
3961 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
3962 {
3963 const VkPhysicalDeviceExternalImageFormatInfo *imageFormatInfo = reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>(extensionFormatInfo);
3964 handleType = &(imageFormatInfo->handleType);
3965 }
3966 break;
3967 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
3968 {
3969 // Explicitly ignored, since VK_EXT_image_drm_format_modifier is not supported
3970 ASSERT(!hasDeviceExtension(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME));
3971 }
3972 break;
3973 default:
3974 UNSUPPORTED("pImageFormatInfo->pNext sType = %s", vk::Stringify(extensionFormatInfo->sType).c_str());
3975 break;
3976 }
3977
3978 extensionFormatInfo = extensionFormatInfo->pNext;
3979 }
3980
3981 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pImageFormatProperties->pNext);
3982
3983 #ifdef __ANDROID__
3984 bool hasAHBUsage = false;
3985 #endif
3986
3987 while(extensionProperties)
3988 {
3989 switch(extensionProperties->sType)
3990 {
3991 case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
3992 {
3993 auto *properties = reinterpret_cast<VkExternalImageFormatProperties *>(extensionProperties);
3994 vk::Cast(physicalDevice)->getProperties(handleType, properties);
3995 }
3996 break;
3997 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
3998 {
3999 auto *properties = reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>(extensionProperties);
4000 vk::Cast(physicalDevice)->getProperties(properties);
4001 }
4002 break;
4003 case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
4004 {
4005 // Explicitly ignored, since VK_AMD_texture_gather_bias_lod is not supported
4006 ASSERT(!hasDeviceExtension(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME));
4007 }
4008 break;
4009 case VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT:
4010 {
4011 auto *properties = reinterpret_cast<VkHostImageCopyDevicePerformanceQueryEXT *>(extensionProperties);
4012 // Host image copy is equally performant on the host with SwiftShader; it's the same code running on the main thread.
4013 properties->optimalDeviceAccess = VK_TRUE;
4014 properties->identicalMemoryLayout = VK_TRUE;
4015 }
4016 break;
4017 #ifdef __ANDROID__
4018 case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
4019 {
4020 auto *properties = reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>(extensionProperties);
4021 vk::Cast(physicalDevice)->getProperties(pImageFormatInfo, properties);
4022 hasAHBUsage = true;
4023 }
4024 break;
4025 #endif
4026 default:
4027 UNSUPPORTED("pImageFormatProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
4028 break;
4029 }
4030
4031 extensionProperties = extensionProperties->pNext;
4032 }
4033
4034 vk::Format format = pImageFormatInfo->format;
4035 VkImageType type = pImageFormatInfo->type;
4036 VkImageTiling tiling = pImageFormatInfo->tiling;
4037 VkImageUsageFlags usage = pImageFormatInfo->usage;
4038 VkImageCreateFlags flags = pImageFormatInfo->flags;
4039
4040 if(!vk::Cast(physicalDevice)->isFormatSupported(format, type, tiling, usage, stencilUsage, flags))
4041 {
4042 return VK_ERROR_FORMAT_NOT_SUPPORTED;
4043 }
4044
4045 vk::Cast(physicalDevice)->getImageFormatProperties(format, type, tiling, usage, flags, &pImageFormatProperties->imageFormatProperties);
4046
4047 #ifdef __ANDROID__
4048 if(hasAHBUsage)
4049 {
4050 // AHardwareBuffer_lock may only be called with a single layer.
4051 pImageFormatProperties->imageFormatProperties.maxArrayLayers = 1;
4052 pImageFormatProperties->imageFormatProperties.maxMipLevels = 1;
4053 }
4054 #endif
4055
4056 return VK_SUCCESS;
4057 }
4058
vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)4059 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties)
4060 {
4061 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties2* pQueueFamilyProperties = %p)",
4062 physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
4063
4064 if(!pQueueFamilyProperties)
4065 {
4066 *pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
4067 }
4068 else
4069 {
4070 vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
4071 }
4072 }
4073
vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)4074 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
4075 {
4076 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties2* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
4077
4078 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pMemoryProperties->pNext);
4079 while(extInfo)
4080 {
4081 UNSUPPORTED("pMemoryProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4082 extInfo = extInfo->pNext;
4083 }
4084
4085 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &(pMemoryProperties->memoryProperties));
4086 }
4087
vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)4088 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VkSparseImageFormatProperties2 *pProperties)
4089 {
4090 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo = %p, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties2* pProperties = %p)",
4091 physicalDevice, pFormatInfo, pPropertyCount, pProperties);
4092
4093 if(pProperties)
4094 {
4095 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pProperties->pNext);
4096 while(extInfo)
4097 {
4098 UNSUPPORTED("pProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4099 extInfo = extInfo->pNext;
4100 }
4101 }
4102
4103 // We do not support sparse images.
4104 *pPropertyCount = 0;
4105 }
4106
vkGetPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice,uint32_t * pToolCount,VkPhysicalDeviceToolProperties * pToolProperties)4107 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice, uint32_t *pToolCount, VkPhysicalDeviceToolProperties *pToolProperties)
4108 {
4109 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pToolCount = %p, VkPhysicalDeviceToolProperties* pToolProperties = %p)",
4110 physicalDevice, pToolCount, pToolProperties);
4111
4112 if(!pToolProperties)
4113 {
4114 *pToolCount = 0;
4115 return VK_SUCCESS;
4116 }
4117
4118 return VK_SUCCESS;
4119 }
4120
vkTrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)4121 VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags)
4122 {
4123 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolTrimFlags flags = %d)",
4124 device, static_cast<void *>(commandPool), flags);
4125
4126 if(flags != 0)
4127 {
4128 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
4129 UNSUPPORTED("flags 0x%08X", int(flags));
4130 }
4131
4132 vk::Cast(commandPool)->trim(flags);
4133 }
4134
vkGetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)4135 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue)
4136 {
4137 TRACE("(VkDevice device = %p, const VkDeviceQueueInfo2* pQueueInfo = %p, VkQueue* pQueue = %p)",
4138 device, pQueueInfo, pQueue);
4139
4140 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pQueueInfo->pNext);
4141 while(extInfo)
4142 {
4143 UNSUPPORTED("pQueueInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4144 extInfo = extInfo->pNext;
4145 }
4146
4147 if(pQueueInfo->flags != 0)
4148 {
4149 // The only flag that can be set here is VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
4150 // According to the Vulkan 1.2.132 spec, 4.3.1. Queue Family Properties:
4151 // "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT specifies that the device queue is a
4152 // protected-capable queue. If the protected memory feature is not enabled,
4153 // the VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT bit of flags must not be set."
4154 UNSUPPORTED("VkPhysicalDeviceVulkan11Features::protectedMemory");
4155 }
4156
4157 vkGetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
4158 }
4159
vkCreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)4160 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSamplerYcbcrConversion *pYcbcrConversion)
4161 {
4162 TRACE("(VkDevice device = %p, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSamplerYcbcrConversion* pYcbcrConversion = %p)",
4163 device, pCreateInfo, pAllocator, pYcbcrConversion);
4164
4165 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
4166 while(extInfo)
4167 {
4168 switch(extInfo->sType)
4169 {
4170 #ifdef __ANDROID__
4171 case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
4172 break;
4173 #endif
4174 default:
4175 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4176 break;
4177 }
4178 extInfo = extInfo->pNext;
4179 }
4180
4181 return vk::SamplerYcbcrConversion::Create(pAllocator, pCreateInfo, pYcbcrConversion);
4182 }
4183
vkDestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)4184 VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks *pAllocator)
4185 {
4186 TRACE("(VkDevice device = %p, VkSamplerYcbcrConversion ycbcrConversion = %p, const VkAllocationCallbacks* pAllocator = %p)",
4187 device, static_cast<void *>(ycbcrConversion), pAllocator);
4188
4189 vk::destroy(ycbcrConversion, pAllocator);
4190 }
4191
vkCreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4192 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
4193 {
4194 TRACE("(VkDevice device = %p, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate = %p)",
4195 device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
4196
4197 if(pCreateInfo->flags != 0)
4198 {
4199 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
4200 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
4201 }
4202
4203 if(pCreateInfo->templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
4204 {
4205 UNSUPPORTED("pCreateInfo->templateType %d", int(pCreateInfo->templateType));
4206 }
4207
4208 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
4209 while(extInfo)
4210 {
4211 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4212 extInfo = extInfo->pNext;
4213 }
4214
4215 return vk::DescriptorUpdateTemplate::Create(pAllocator, pCreateInfo, pDescriptorUpdateTemplate);
4216 }
4217
vkDestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)4218 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks *pAllocator)
4219 {
4220 TRACE("(VkDevice device = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const VkAllocationCallbacks* pAllocator = %p)",
4221 device, static_cast<void *>(descriptorUpdateTemplate), pAllocator);
4222
4223 vk::destroy(descriptorUpdateTemplate, pAllocator);
4224 }
4225
vkUpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)4226 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData)
4227 {
4228 TRACE("(VkDevice device = %p, VkDescriptorSet descriptorSet = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const void* pData = %p)",
4229 device, static_cast<void *>(descriptorSet), static_cast<void *>(descriptorUpdateTemplate), pData);
4230
4231 vk::Cast(descriptorUpdateTemplate)->updateDescriptorSet(vk::Cast(device), descriptorSet, pData);
4232 }
4233
vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)4234 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VkExternalBufferProperties *pExternalBufferProperties)
4235 {
4236 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo = %p, VkExternalBufferProperties* pExternalBufferProperties = %p)",
4237 physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
4238
4239 vk::Cast(physicalDevice)->getProperties(pExternalBufferInfo, pExternalBufferProperties);
4240 }
4241
vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)4242 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VkExternalFenceProperties *pExternalFenceProperties)
4243 {
4244 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo = %p, VkExternalFenceProperties* pExternalFenceProperties = %p)",
4245 physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
4246
4247 vk::Cast(physicalDevice)->getProperties(pExternalFenceInfo, pExternalFenceProperties);
4248 }
4249
vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)4250 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
4251 {
4252 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo = %p, VkExternalSemaphoreProperties* pExternalSemaphoreProperties = %p)",
4253 physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
4254
4255 vk::Cast(physicalDevice)->getProperties(pExternalSemaphoreInfo, pExternalSemaphoreProperties);
4256 }
4257
vkGetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)4258 VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, VkDescriptorSetLayoutSupport *pSupport)
4259 {
4260 TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, VkDescriptorSetLayoutSupport* pSupport = %p)",
4261 device, pCreateInfo, pSupport);
4262
4263 VkBaseOutStructure *layoutSupport = reinterpret_cast<VkBaseOutStructure *>(pSupport->pNext);
4264 while(layoutSupport)
4265 {
4266 switch(layoutSupport->sType)
4267 {
4268 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT:
4269 break;
4270 default:
4271 UNSUPPORTED("pSupport->pNext sType = %s", vk::Stringify(layoutSupport->sType).c_str());
4272 break;
4273 }
4274
4275 layoutSupport = layoutSupport->pNext;
4276 }
4277
4278 vk::Cast(device)->getDescriptorSetLayoutSupport(pCreateInfo, pSupport);
4279 }
4280
vkCreatePrivateDataSlot(VkDevice device,const VkPrivateDataSlotCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPrivateDataSlot * pPrivateDataSlot)4281 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlot(VkDevice device, const VkPrivateDataSlotCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPrivateDataSlot *pPrivateDataSlot)
4282 {
4283 TRACE("(VkDevice device = %p, const VkPrivateDataSlotCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPrivateDataSlot* pPrivateDataSlot = %p)",
4284 device, pCreateInfo, pAllocator, pPrivateDataSlot);
4285
4286 return vk::PrivateData::Create(pAllocator, pCreateInfo, pPrivateDataSlot);
4287 }
4288
vkDestroyPrivateDataSlot(VkDevice device,VkPrivateDataSlot privateDataSlot,const VkAllocationCallbacks * pAllocator)4289 VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlot(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks *pAllocator)
4290 {
4291 TRACE("(VkDevice device = %p, VkPrivateDataSlot privateDataSlot = %p, const VkAllocationCallbacks* pAllocator = %p)",
4292 device, static_cast<void *>(privateDataSlot), pAllocator);
4293
4294 vk::Cast(device)->removePrivateDataSlot(vk::Cast(privateDataSlot));
4295 vk::destroy(privateDataSlot, pAllocator);
4296 }
4297
vkSetPrivateData(VkDevice device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlot privateDataSlot,uint64_t data)4298 VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateData(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data)
4299 {
4300 TRACE("(VkDevice device = %p, VkObjectType objectType = %d, uint64_t objectHandle = %" PRIu64 ", VkPrivateDataSlot privateDataSlot = %p, uint64_t data = %" PRIu64 ")",
4301 device, objectType, objectHandle, static_cast<void *>(privateDataSlot), data);
4302
4303 return vk::Cast(device)->setPrivateData(objectType, objectHandle, vk::Cast(privateDataSlot), data);
4304 }
4305
vkGetPrivateData(VkDevice device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlot privateDataSlot,uint64_t * pData)4306 VKAPI_ATTR void VKAPI_CALL vkGetPrivateData(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t *pData)
4307 {
4308 TRACE("(VkDevice device = %p, VkObjectType objectType = %d, uint64_t objectHandle = %" PRIu64 ", VkPrivateDataSlot privateDataSlot = %p, uint64_t data = %p)",
4309 device, objectType, objectHandle, static_cast<void *>(privateDataSlot), pData);
4310
4311 vk::Cast(device)->getPrivateData(objectType, objectHandle, vk::Cast(privateDataSlot), pData);
4312 }
4313
vkGetDeviceBufferMemoryRequirements(VkDevice device,const VkDeviceBufferMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4314 VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirements(VkDevice device, const VkDeviceBufferMemoryRequirements *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
4315 {
4316 TRACE("(VkDevice device = %p, const VkDeviceBufferMemoryRequirements* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
4317 device, pInfo, pMemoryRequirements);
4318
4319 pMemoryRequirements->memoryRequirements =
4320 vk::Buffer::GetMemoryRequirements(pInfo->pCreateInfo->size, pInfo->pCreateInfo->usage);
4321 }
4322
vkGetDeviceImageMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4323 VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirements(VkDevice device, const VkDeviceImageMemoryRequirements *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
4324 {
4325 TRACE("(VkDevice device = %p, const VkDeviceImageMemoryRequirements* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
4326 device, pInfo, pMemoryRequirements);
4327
4328 const auto *extInfo = reinterpret_cast<const VkBaseInStructure *>(pInfo->pNext);
4329 while(extInfo)
4330 {
4331 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4332 extInfo = extInfo->pNext;
4333 }
4334
4335 // Create a temporary image object to obtain the memory requirements.
4336 // TODO(b/221299948): Reduce overhead by using a lightweight local proxy.
4337 pMemoryRequirements->memoryRequirements = {};
4338 const VkAllocationCallbacks *pAllocator = nullptr;
4339 VkImage image = { VK_NULL_HANDLE };
4340 VkResult result = vk::Image::Create(pAllocator, pInfo->pCreateInfo, &image, vk::Cast(device));
4341 if(result == VK_SUCCESS)
4342 {
4343 vk::Cast(image)->getMemoryRequirements(pMemoryRequirements);
4344 }
4345 vk::destroy(image, pAllocator);
4346 }
4347
vkGetDeviceImageSparseMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)4348 VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements(VkDevice device, const VkDeviceImageMemoryRequirements *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
4349 {
4350 TRACE("(VkDevice device = %p, const VkDeviceImageMemoryRequirements* pInfo = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements = %p)",
4351 device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
4352
4353 *pSparseMemoryRequirementCount = 0;
4354 }
4355
vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)4356 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern)
4357 {
4358 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t lineStippleFactor = %u, uint16_t lineStipplePattern = %u)",
4359 commandBuffer, lineStippleFactor, lineStipplePattern);
4360
4361 static constexpr uint16_t solidLine = 0xFFFFu;
4362 if(lineStipplePattern != solidLine)
4363 {
4364 // VkPhysicalDeviceLineRasterizationFeaturesEXT::stippled*Lines are all set to VK_FALSE and,
4365 // according to the Vulkan spec for VkPipelineRasterizationLineStateCreateInfoEXT:
4366 // "If stippledLineEnable is VK_FALSE, the values of lineStippleFactor and lineStipplePattern are ignored."
4367 WARN("vkCmdSetLineStippleEXT: line stipple pattern ignored : 0x%04X", lineStipplePattern);
4368 }
4369 }
4370
vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)4371 VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
4372 {
4373 TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4374 commandBuffer, pLabelInfo);
4375
4376 vk::Cast(commandBuffer)->beginDebugUtilsLabel(pLabelInfo);
4377 }
4378
vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)4379 VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)
4380 {
4381 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
4382
4383 vk::Cast(commandBuffer)->endDebugUtilsLabel();
4384 }
4385
vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)4386 VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
4387 {
4388 TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4389 commandBuffer, pLabelInfo);
4390
4391 vk::Cast(commandBuffer)->insertDebugUtilsLabel(pLabelInfo);
4392 }
4393
vkCreateDebugUtilsMessengerEXT(VkInstance instance,const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDebugUtilsMessengerEXT * pMessenger)4394 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT *pMessenger)
4395 {
4396 TRACE("(VkInstance instance = %p, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDebugUtilsMessengerEXT* pMessenger = %p)",
4397 instance, pCreateInfo, pAllocator, pMessenger);
4398
4399 if(pCreateInfo->flags != 0)
4400 {
4401 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
4402 UNSUPPORTED("pCreateInfo->flags 0x%08X", int(pCreateInfo->flags));
4403 }
4404
4405 return vk::DebugUtilsMessenger::Create(pAllocator, pCreateInfo, pMessenger);
4406 }
4407
vkDestroyDebugUtilsMessengerEXT(VkInstance instance,VkDebugUtilsMessengerEXT messenger,const VkAllocationCallbacks * pAllocator)4408 VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks *pAllocator)
4409 {
4410 TRACE("(VkInstance instance = %p, VkDebugUtilsMessengerEXT messenger = %p, const VkAllocationCallbacks* pAllocator = %p)",
4411 instance, static_cast<void *>(messenger), pAllocator);
4412
4413 vk::destroy(messenger, pAllocator);
4414 }
4415
vkQueueBeginDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)4416 VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
4417 {
4418 TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4419 queue, pLabelInfo);
4420
4421 vk::Cast(queue)->beginDebugUtilsLabel(pLabelInfo);
4422 }
4423
vkQueueEndDebugUtilsLabelEXT(VkQueue queue)4424 VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT(VkQueue queue)
4425 {
4426 TRACE("(VkQueue queue = %p)", queue);
4427
4428 vk::Cast(queue)->endDebugUtilsLabel();
4429 }
4430
vkQueueInsertDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)4431 VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
4432 {
4433 TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4434 queue, pLabelInfo);
4435
4436 vk::Cast(queue)->insertDebugUtilsLabel(pLabelInfo);
4437 }
4438
vkSetDebugUtilsObjectNameEXT(VkDevice device,const VkDebugUtilsObjectNameInfoEXT * pNameInfo)4439 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo)
4440 {
4441 TRACE("(VkDevice device = %p, const VkDebugUtilsObjectNameInfoEXT* pNameInfo = %p)",
4442 device, pNameInfo);
4443
4444 return vk::Cast(device)->setDebugUtilsObjectName(pNameInfo);
4445 }
4446
vkSetDebugUtilsObjectTagEXT(VkDevice device,const VkDebugUtilsObjectTagInfoEXT * pTagInfo)4447 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo)
4448 {
4449 TRACE("(VkDevice device = %p, const VkDebugUtilsObjectTagInfoEXT* pTagInfo = %p)",
4450 device, pTagInfo);
4451
4452 return vk::Cast(device)->setDebugUtilsObjectTag(pTagInfo);
4453 }
4454
vkSubmitDebugUtilsMessageEXT(VkInstance instance,VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData)4455 VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData)
4456 {
4457 TRACE("(VkInstance instance = %p, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity = %d, VkDebugUtilsMessageTypeFlagsEXT messageTypes = %d, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData = %p)",
4458 instance, messageSeverity, messageTypes, pCallbackData);
4459
4460 vk::Cast(instance)->submitDebugUtilsMessage(messageSeverity, messageTypes, pCallbackData);
4461 }
4462
vkCopyMemoryToImageEXT(VkDevice device,const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo)4463 VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT(VkDevice device, const VkCopyMemoryToImageInfoEXT *pCopyMemoryToImageInfo)
4464 {
4465 TRACE("(VkDevice device = %p, const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo = %p)",
4466 device, pCopyMemoryToImageInfo);
4467
4468 constexpr auto allRecognizedFlagBits = VK_HOST_IMAGE_COPY_MEMCPY_EXT;
4469 ASSERT(!(pCopyMemoryToImageInfo->flags & ~allRecognizedFlagBits));
4470
4471 vk::Image *dstImage = vk::Cast(pCopyMemoryToImageInfo->dstImage);
4472 for(uint32_t i = 0; i < pCopyMemoryToImageInfo->regionCount; i++)
4473 {
4474 dstImage->copyFromMemory(pCopyMemoryToImageInfo->pRegions[i]);
4475 }
4476
4477 return VK_SUCCESS;
4478 }
4479
vkCopyImageToMemoryEXT(VkDevice device,const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo)4480 VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemoryEXT(VkDevice device, const VkCopyImageToMemoryInfoEXT *pCopyImageToMemoryInfo)
4481 {
4482 TRACE("(VkDevice device = %p, const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo = %p)",
4483 device, pCopyImageToMemoryInfo);
4484
4485 constexpr auto allRecognizedFlagBits = VK_HOST_IMAGE_COPY_MEMCPY_EXT;
4486 ASSERT(!(pCopyImageToMemoryInfo->flags & ~allRecognizedFlagBits));
4487
4488 vk::Image *srcImage = vk::Cast(pCopyImageToMemoryInfo->srcImage);
4489 for(uint32_t i = 0; i < pCopyImageToMemoryInfo->regionCount; i++)
4490 {
4491 srcImage->copyToMemory(pCopyImageToMemoryInfo->pRegions[i]);
4492 }
4493
4494 return VK_SUCCESS;
4495 }
4496
vkCopyImageToImageEXT(VkDevice device,const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo)4497 VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImageEXT(VkDevice device, const VkCopyImageToImageInfoEXT *pCopyImageToImageInfo)
4498 {
4499 TRACE("(VkDevice device = %p, const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo = %p)",
4500 device, pCopyImageToImageInfo);
4501
4502 constexpr auto allRecognizedFlagBits = VK_HOST_IMAGE_COPY_MEMCPY_EXT;
4503 ASSERT(!(pCopyImageToImageInfo->flags & ~allRecognizedFlagBits));
4504
4505 vk::Image *srcImage = vk::Cast(pCopyImageToImageInfo->srcImage);
4506 vk::Image *dstImage = vk::Cast(pCopyImageToImageInfo->dstImage);
4507 for(uint32_t i = 0; i < pCopyImageToImageInfo->regionCount; i++)
4508 {
4509 srcImage->copyTo(dstImage, pCopyImageToImageInfo->pRegions[i]);
4510 }
4511
4512 return VK_SUCCESS;
4513 }
4514
vkTransitionImageLayoutEXT(VkDevice device,uint32_t transitionCount,const VkHostImageLayoutTransitionInfoEXT * pTransitions)4515 VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT *pTransitions)
4516 {
4517 TRACE("(VkDevice device = %p, uint32_t transitionCount = %u, const VkHostImageLayoutTransitionInfoEXT* pTransitions = %p)",
4518 device, transitionCount, pTransitions);
4519
4520 // This function is a no-op; there are no image layouts in SwiftShader.
4521 return VK_SUCCESS;
4522 }
4523
4524 #ifdef VK_USE_PLATFORM_XCB_KHR
vkCreateXcbSurfaceKHR(VkInstance instance,const VkXcbSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4525 VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4526 {
4527 TRACE("(VkInstance instance = %p, VkXcbSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4528 instance, pCreateInfo, pAllocator, pSurface);
4529
4530 // VUID-VkXcbSurfaceCreateInfoKHR-connection-01310 : connection must point to a valid X11 xcb_connection_t
4531 ASSERT(pCreateInfo->connection);
4532
4533 return vk::XcbSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4534 }
4535
vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id)4536 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t *connection, xcb_visualid_t visual_id)
4537 {
4538 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, xcb_connection_t* connection = %p, xcb_visualid_t visual_id = %d)",
4539 physicalDevice, int(queueFamilyIndex), connection, int(visual_id));
4540
4541 return VK_TRUE;
4542 }
4543 #endif
4544
4545 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
vkCreateWaylandSurfaceKHR(VkInstance instance,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4546 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4547 {
4548 TRACE("(VkInstance instance = %p, VkWaylandSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4549 instance, pCreateInfo, pAllocator, pSurface);
4550
4551 return vk::WaylandSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4552 }
4553
vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * display)4554 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display *display)
4555 {
4556 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, struct wl_display* display = %p)",
4557 physicalDevice, int(queueFamilyIndex), display);
4558
4559 return VK_TRUE;
4560 }
4561 #endif
4562
4563 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
vkCreateDirectFBSurfaceEXT(VkInstance instance,const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4564 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4565 {
4566 TRACE("(VkInstance instance = %p, VkDirectFBSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4567 instance, pCreateInfo, pAllocator, pSurface);
4568
4569 return vk::DirectFBSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
4570 }
4571
vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,IDirectFB * dfb)4572 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB *dfb)
4573 {
4574 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, IDirectFB* dfb = %p)",
4575 physicalDevice, int(queueFamilyIndex), dfb);
4576
4577 return VK_TRUE;
4578 }
4579 #endif
4580
4581 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)4582 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode)
4583 {
4584 TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, VkDisplayModeCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkDisplayModeKHR* pModei = %p)",
4585 physicalDevice, static_cast<void *>(display), pCreateInfo, pAllocator, pMode);
4586
4587 return VK_SUCCESS;
4588 }
4589
vkCreateDisplayPlaneSurfaceKHR(VkInstance instance,const VkDisplaySurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4590 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4591 {
4592 TRACE("(VkInstance instance = %p, VkDisplaySurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4593 instance, pCreateInfo, pAllocator, pSurface);
4594
4595 return vk::DisplaySurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4596 }
4597
vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,uint32_t * pPropertyCount,VkDisplayModePropertiesKHR * pProperties)4598 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties)
4599 {
4600 TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, uint32_t* pPropertyCount = %p, VkDisplayModePropertiesKHR* pProperties = %p)",
4601 physicalDevice, static_cast<void *>(display), pPropertyCount, pProperties);
4602
4603 return vk::DisplaySurfaceKHR::GetDisplayModeProperties(pPropertyCount, pProperties);
4604 }
4605
vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkDisplayModeKHR mode,uint32_t planeIndex,VkDisplayPlaneCapabilitiesKHR * pCapabilities)4606 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities)
4607 {
4608 TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayModeKHR mode = %p, uint32_t planeIndex = %d, VkDisplayPlaneCapabilitiesKHR* pCapabilities = %p)",
4609 physicalDevice, static_cast<void *>(mode), planeIndex, pCapabilities);
4610
4611 return vk::DisplaySurfaceKHR::GetDisplayPlaneCapabilities(pCapabilities);
4612 }
4613
vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice,uint32_t planeIndex,uint32_t * pDisplayCount,VkDisplayKHR * pDisplays)4614 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount, VkDisplayKHR *pDisplays)
4615 {
4616 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t planeIndex = %d, uint32_t* pDisplayCount = %p, VkDisplayKHR* pDisplays = %p)",
4617 physicalDevice, planeIndex, pDisplayCount, pDisplays);
4618
4619 return vk::DisplaySurfaceKHR::GetDisplayPlaneSupportedDisplays(pDisplayCount, pDisplays);
4620 }
4621
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPlanePropertiesKHR * pProperties)4622 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPlanePropertiesKHR *pProperties)
4623 {
4624 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPlanePropertiesKHR* pProperties = %p)",
4625 physicalDevice, pPropertyCount, pProperties);
4626
4627 return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayPlaneProperties(pPropertyCount, pProperties);
4628 }
4629
vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPropertiesKHR * pProperties)4630 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPropertiesKHR *pProperties)
4631 {
4632 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPropertiesKHR* pProperties = %p)",
4633 physicalDevice, pPropertyCount, pProperties);
4634
4635 return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayProperties(pPropertyCount, pProperties);
4636 }
4637 #endif
4638
4639 #ifdef VK_USE_PLATFORM_MACOS_MVK
vkCreateMacOSSurfaceMVK(VkInstance instance,const VkMacOSSurfaceCreateInfoMVK * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4640 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4641 {
4642 TRACE("(VkInstance instance = %p, VkMacOSSurfaceCreateInfoMVK* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4643 instance, pCreateInfo, pAllocator, pSurface);
4644
4645 return vk::MacOSSurfaceMVK::Create(pAllocator, pCreateInfo, pSurface);
4646 }
4647 #endif
4648
4649 #ifdef VK_USE_PLATFORM_METAL_EXT
vkCreateMetalSurfaceEXT(VkInstance instance,const VkMetalSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4650 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4651 {
4652 TRACE("(VkInstance instance = %p, VkMetalSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4653 instance, pCreateInfo, pAllocator, pSurface);
4654
4655 return vk::MetalSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
4656 }
4657 #endif
4658
4659 #ifdef VK_USE_PLATFORM_WIN32_KHR
vkCreateWin32SurfaceKHR(VkInstance instance,const VkWin32SurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4660 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4661 {
4662 TRACE("(VkInstance instance = %p, VkWin32SurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4663 instance, pCreateInfo, pAllocator, pSurface);
4664
4665 return vk::Win32SurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4666 }
4667
vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex)4668 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex)
4669 {
4670 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d)",
4671 physicalDevice, queueFamilyIndex);
4672 return VK_TRUE;
4673 }
4674 #endif
4675
vkCreateHeadlessSurfaceEXT(VkInstance instance,const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4676 VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4677 {
4678 TRACE("(VkInstance instance = %p, VkHeadlessSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4679 instance, pCreateInfo, pAllocator, pSurface);
4680
4681 return vk::HeadlessSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4682 }
4683
4684 #ifndef __ANDROID__
vkDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)4685 VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator)
4686 {
4687 TRACE("(VkInstance instance = %p, VkSurfaceKHR surface = %p, const VkAllocationCallbacks* pAllocator = %p)",
4688 instance, static_cast<void *>(surface), pAllocator);
4689
4690 vk::destroy(surface, pAllocator);
4691 }
4692
vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported)4693 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32 *pSupported)
4694 {
4695 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, VkSurface surface = %p, VKBool32* pSupported = %p)",
4696 physicalDevice, int(queueFamilyIndex), static_cast<void *>(surface), pSupported);
4697
4698 *pSupported = VK_TRUE;
4699 return VK_SUCCESS;
4700 }
4701
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * pSurfaceCapabilities)4702 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities)
4703 {
4704 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities = %p)",
4705 physicalDevice, static_cast<void *>(surface), pSurfaceCapabilities);
4706
4707 return vk::Cast(surface)->getSurfaceCapabilities(nullptr, pSurfaceCapabilities, nullptr);
4708 }
4709
vkGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,VkSurfaceCapabilities2KHR * pSurfaceCapabilities)4710 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, VkSurfaceCapabilities2KHR *pSurfaceCapabilities)
4711 {
4712 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo = %p, VkSurfaceCapabilities2KHR *pSurfaceCapabilities = %p)",
4713 physicalDevice, pSurfaceInfo, pSurfaceCapabilities);
4714
4715 return vk::Cast(pSurfaceInfo->surface)->getSurfaceCapabilities(pSurfaceInfo->pNext, &pSurfaceCapabilities->surfaceCapabilities, pSurfaceCapabilities->pNext);
4716 }
4717
vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)4718 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pSurfaceFormatCount, VkSurfaceFormatKHR *pSurfaceFormats)
4719 {
4720 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p. uint32_t* pSurfaceFormatCount = %p, VkSurfaceFormatKHR* pSurfaceFormats = %p)",
4721 physicalDevice, static_cast<void *>(surface), pSurfaceFormatCount, pSurfaceFormats);
4722
4723 if(!pSurfaceFormats)
4724 {
4725 *pSurfaceFormatCount = vk::Cast(surface)->getSurfaceFormatsCount(nullptr);
4726 return VK_SUCCESS;
4727 }
4728
4729 std::vector<VkSurfaceFormat2KHR> formats(*pSurfaceFormatCount);
4730
4731 VkResult result = vk::Cast(surface)->getSurfaceFormats(nullptr, pSurfaceFormatCount, formats.data());
4732
4733 if(result == VK_SUCCESS || result == VK_INCOMPLETE)
4734 {
4735 // The value returned in pSurfaceFormatCount is either capped at the original value,
4736 // or is smaller because there aren't that many formats.
4737 ASSERT(*pSurfaceFormatCount <= formats.size());
4738
4739 for(size_t i = 0; i < *pSurfaceFormatCount; ++i)
4740 {
4741 pSurfaceFormats[i] = formats[i].surfaceFormat;
4742 }
4743 }
4744
4745 return result;
4746 }
4747
vkGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)4748 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, uint32_t *pSurfaceFormatCount, VkSurfaceFormat2KHR *pSurfaceFormats)
4749 {
4750 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo = %p. uint32_t* pSurfaceFormatCount = %p, VkSurfaceFormat2KHR* pSurfaceFormats = %p)",
4751 physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats);
4752
4753 if(!pSurfaceFormats)
4754 {
4755 *pSurfaceFormatCount = vk::Cast(pSurfaceInfo->surface)->getSurfaceFormatsCount(pSurfaceInfo->pNext);
4756 return VK_SUCCESS;
4757 }
4758
4759 return vk::Cast(pSurfaceInfo->surface)->getSurfaceFormats(pSurfaceInfo->pNext, pSurfaceFormatCount, pSurfaceFormats);
4760 }
4761
vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)4762 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pPresentModeCount, VkPresentModeKHR *pPresentModes)
4763 {
4764 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p uint32_t* pPresentModeCount = %p, VkPresentModeKHR* pPresentModes = %p)",
4765 physicalDevice, static_cast<void *>(surface), pPresentModeCount, pPresentModes);
4766
4767 if(!pPresentModes)
4768 {
4769 *pPresentModeCount = vk::Cast(surface)->getPresentModeCount();
4770 return VK_SUCCESS;
4771 }
4772
4773 return vk::Cast(surface)->getPresentModes(pPresentModeCount, pPresentModes);
4774 }
4775
vkCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain)4776 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain)
4777 {
4778 TRACE("(VkDevice device = %p, const VkSwapchainCreateInfoKHR* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSwapchainKHR* pSwapchain = %p)",
4779 device, pCreateInfo, pAllocator, pSwapchain);
4780
4781 if(pCreateInfo->oldSwapchain)
4782 {
4783 vk::Cast(pCreateInfo->oldSwapchain)->retire();
4784 }
4785
4786 if(vk::Cast(pCreateInfo->surface)->hasAssociatedSwapchain())
4787 {
4788 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
4789 }
4790
4791 VkResult status = vk::SwapchainKHR::Create(pAllocator, pCreateInfo, pSwapchain);
4792
4793 if(status != VK_SUCCESS)
4794 {
4795 return status;
4796 }
4797
4798 auto *swapchain = vk::Cast(*pSwapchain);
4799 status = swapchain->createImages(device, pCreateInfo);
4800
4801 if(status != VK_SUCCESS)
4802 {
4803 vk::destroy(*pSwapchain, pAllocator);
4804 return status;
4805 }
4806
4807 vk::Cast(pCreateInfo->surface)->associateSwapchain(swapchain);
4808
4809 return VK_SUCCESS;
4810 }
4811
vkDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)4812 VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator)
4813 {
4814 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, const VkAllocationCallbacks* pAllocator = %p)",
4815 device, static_cast<void *>(swapchain), pAllocator);
4816
4817 vk::destroy(swapchain, pAllocator);
4818 }
4819
vkGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages)4820 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages)
4821 {
4822 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint32_t* pSwapchainImageCount = %p, VkImage* pSwapchainImages = %p)",
4823 device, static_cast<void *>(swapchain), pSwapchainImageCount, pSwapchainImages);
4824
4825 if(!pSwapchainImages)
4826 {
4827 *pSwapchainImageCount = vk::Cast(swapchain)->getImageCount();
4828 return VK_SUCCESS;
4829 }
4830
4831 return vk::Cast(swapchain)->getImages(pSwapchainImageCount, pSwapchainImages);
4832 }
4833
vkAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)4834 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex)
4835 {
4836 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint64_t timeout = %" PRIu64 ", VkSemaphore semaphore = %p, VkFence fence = %p, uint32_t* pImageIndex = %p)",
4837 device, static_cast<void *>(swapchain), timeout, static_cast<void *>(semaphore), static_cast<void *>(fence), pImageIndex);
4838
4839 return vk::Cast(swapchain)->getNextImage(timeout, vk::DynamicCast<vk::BinarySemaphore>(semaphore), vk::Cast(fence), pImageIndex);
4840 }
4841
vkQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo)4842 VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo)
4843 {
4844 TRACE("(VkQueue queue = %p, const VkPresentInfoKHR* pPresentInfo = %p)",
4845 queue, pPresentInfo);
4846
4847 return vk::Cast(queue)->present(pPresentInfo);
4848 }
4849
vkAcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)4850 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo, uint32_t *pImageIndex)
4851 {
4852 TRACE("(VkDevice device = %p, const VkAcquireNextImageInfoKHR *pAcquireInfo = %p, uint32_t *pImageIndex = %p",
4853 device, pAcquireInfo, pImageIndex);
4854
4855 return vk::Cast(pAcquireInfo->swapchain)->getNextImage(pAcquireInfo->timeout, vk::DynamicCast<vk::BinarySemaphore>(pAcquireInfo->semaphore), vk::Cast(pAcquireInfo->fence), pImageIndex);
4856 }
4857
vkReleaseSwapchainImagesEXT(VkDevice device,const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo)4858 VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesEXT(VkDevice device, const VkReleaseSwapchainImagesInfoEXT *pReleaseInfo)
4859 {
4860 TRACE("(VkDevice device = %p, const VkReleaseSwapchainImagesInfoEXT *pReleaseInfo = %p",
4861 device, pReleaseInfo);
4862
4863 return vk::Cast(pReleaseInfo->swapchain)->releaseImages(pReleaseInfo->imageIndexCount, pReleaseInfo->pImageIndices);
4864 }
4865
vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device,VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities)4866 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities)
4867 {
4868 TRACE("(VkDevice device = %p, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities = %p)",
4869 device, pDeviceGroupPresentCapabilities);
4870
4871 for(unsigned int i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; i++)
4872 {
4873 // The only real physical device in the presentation group is device 0,
4874 // and it can present to itself.
4875 pDeviceGroupPresentCapabilities->presentMask[i] = (i == 0) ? 1 : 0;
4876 }
4877
4878 pDeviceGroupPresentCapabilities->modes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4879
4880 return VK_SUCCESS;
4881 }
4882
vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device,VkSurfaceKHR surface,VkDeviceGroupPresentModeFlagsKHR * pModes)4883 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR *pModes)
4884 {
4885 TRACE("(VkDevice device = %p, VkSurfaceKHR surface = %p, VkDeviceGroupPresentModeFlagsKHR *pModes = %p)",
4886 device, static_cast<void *>(surface), pModes);
4887
4888 *pModes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4889 return VK_SUCCESS;
4890 }
4891
vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pRectCount,VkRect2D * pRects)4892 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pRectCount, VkRect2D *pRects)
4893 {
4894 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, uint32_t* pRectCount = %p, VkRect2D* pRects = %p)",
4895 physicalDevice, static_cast<void *>(surface), pRectCount, pRects);
4896
4897 return vk::Cast(surface)->getPresentRectangles(pRectCount, pRects);
4898 }
4899
4900 #endif // ! __ANDROID__
4901
4902 #ifdef __ANDROID__
4903
vkGetSwapchainGrallocUsage2ANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)4904 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsage2ANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainUsage, uint64_t *grallocConsumerUsage, uint64_t *grallocProducerUsage)
4905 {
4906 TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, VkSwapchainImageUsageFlagsANDROID swapchainUsage = %d, uint64_t* grallocConsumerUsage = %p, uin64_t* grallocProducerUsage = %p)",
4907 device, format, imageUsage, swapchainUsage, grallocConsumerUsage, grallocProducerUsage);
4908
4909 *grallocConsumerUsage = 0;
4910 *grallocProducerUsage = GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN;
4911
4912 return VK_SUCCESS;
4913 }
4914
vkGetSwapchainGrallocUsageANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)4915 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsageANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, int *grallocUsage)
4916 {
4917 TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, int* grallocUsage = %p)",
4918 device, format, imageUsage, grallocUsage);
4919
4920 *grallocUsage = GRALLOC_USAGE_SW_WRITE_OFTEN;
4921
4922 return VK_SUCCESS;
4923 }
4924
vkAcquireImageANDROID(VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)4925 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireImageANDROID(VkDevice device, VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence)
4926 {
4927 TRACE("(VkDevice device = %p, VkImage image = %p, int nativeFenceFd = %d, VkSemaphore semaphore = %p, VkFence fence = %p)",
4928 device, static_cast<void *>(image), nativeFenceFd, static_cast<void *>(semaphore), static_cast<void *>(fence));
4929
4930 if(nativeFenceFd >= 0)
4931 {
4932 sync_wait(nativeFenceFd, -1);
4933 close(nativeFenceFd);
4934 }
4935
4936 if(fence != VK_NULL_HANDLE)
4937 {
4938 vk::Cast(fence)->complete();
4939 }
4940
4941 if(semaphore != VK_NULL_HANDLE)
4942 {
4943 vk::DynamicCast<vk::BinarySemaphore>(semaphore)->signal();
4944 }
4945
4946 return VK_SUCCESS;
4947 }
4948
vkQueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)4949 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSignalReleaseImageANDROID(VkQueue queue, uint32_t waitSemaphoreCount, const VkSemaphore *pWaitSemaphores, VkImage image, int *pNativeFenceFd)
4950 {
4951 TRACE("(VkQueue queue = %p, uint32_t waitSemaphoreCount = %d, const VkSemaphore* pWaitSemaphores = %p, VkImage image = %p, int* pNativeFenceFd = %p)",
4952 queue, waitSemaphoreCount, pWaitSemaphores, static_cast<void *>(image), pNativeFenceFd);
4953
4954 // This is a hack to deal with screen tearing for now.
4955 // Need to correctly implement threading using VkSemaphore
4956 // to get rid of it. b/132458423
4957 vkQueueWaitIdle(queue);
4958
4959 *pNativeFenceFd = -1;
4960
4961 return vk::Cast(image)->prepareForExternalUseANDROID();
4962 }
4963 #endif // __ANDROID__
4964 }
4965