• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //    http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "VkBuffer.hpp"
16 #include "VkBufferView.hpp"
17 #include "VkCommandBuffer.hpp"
18 #include "VkCommandPool.hpp"
19 #include "VkConfig.hpp"
20 #include "VkDebugUtilsMessenger.hpp"
21 #include "VkDescriptorPool.hpp"
22 #include "VkDescriptorSetLayout.hpp"
23 #include "VkDescriptorUpdateTemplate.hpp"
24 #include "VkDestroy.hpp"
25 #include "VkDevice.hpp"
26 #include "VkDeviceMemory.hpp"
27 #include "VkEvent.hpp"
28 #include "VkFence.hpp"
29 #include "VkFramebuffer.hpp"
30 #include "VkGetProcAddress.hpp"
31 #include "VkImage.hpp"
32 #include "VkImageView.hpp"
33 #include "VkInstance.hpp"
34 #include "VkPhysicalDevice.hpp"
35 #include "VkPipeline.hpp"
36 #include "VkPipelineCache.hpp"
37 #include "VkPipelineLayout.hpp"
38 #include "VkQueryPool.hpp"
39 #include "VkQueue.hpp"
40 #include "VkRenderPass.hpp"
41 #include "VkSampler.hpp"
42 #include "VkSemaphore.hpp"
43 #include "VkShaderModule.hpp"
44 #include "VkStringify.hpp"
45 #include "VkTimelineSemaphore.hpp"
46 
47 #include "System/Debug.hpp"
48 
49 #if defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_MACOS_MVK)
50 #	include "WSI/MetalSurface.hpp"
51 #endif
52 
53 #ifdef VK_USE_PLATFORM_XCB_KHR
54 #	include "WSI/XcbSurfaceKHR.hpp"
55 #endif
56 
57 #ifdef VK_USE_PLATFORM_XLIB_KHR
58 #	include "WSI/XlibSurfaceKHR.hpp"
59 #endif
60 
61 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
62 #	include "WSI/WaylandSurfaceKHR.hpp"
63 #endif
64 
65 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
66 #	include "WSI/DirectFBSurfaceEXT.hpp"
67 #endif
68 
69 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
70 #	include "WSI/DisplaySurfaceKHR.hpp"
71 #endif
72 
73 #ifdef VK_USE_PLATFORM_WIN32_KHR
74 #	include "WSI/Win32SurfaceKHR.hpp"
75 #endif
76 
77 #include "WSI/HeadlessSurfaceKHR.hpp"
78 
79 #ifdef __ANDROID__
80 #	include "commit.h"
81 #	include "System/GrallocAndroid.hpp"
82 #	include <android/log.h>
83 #	include <hardware/gralloc1.h>
84 #	include <sync/sync.h>
85 #	ifdef SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
86 #		include "VkDeviceMemoryExternalAndroid.hpp"
87 #	endif
88 #endif
89 
90 #include "WSI/VkSwapchainKHR.hpp"
91 
92 #include "Reactor/Nucleus.hpp"
93 
94 #include "marl/mutex.h"
95 #include "marl/scheduler.h"
96 #include "marl/thread.h"
97 #include "marl/tsa.h"
98 
99 #include "System/CPUID.hpp"
100 
101 #include <algorithm>
102 #include <cinttypes>
103 #include <cstring>
104 #include <map>
105 #include <string>
106 
107 namespace {
108 
109 // Enable commit_id.py and #include commit.h for other platforms.
110 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
logBuildVersionInformation()111 void logBuildVersionInformation()
112 {
113 	// TODO(b/144093703): Don't call __android_log_print() directly
114 	__android_log_print(ANDROID_LOG_INFO, "SwiftShader", "SwiftShader Version: %s", SWIFTSHADER_VERSION_STRING);
115 }
116 #endif  // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
117 
118 // setReactorDefaultConfig() sets the default configuration for Vulkan's use of
119 // Reactor.
setReactorDefaultConfig()120 void setReactorDefaultConfig()
121 {
122 	auto cfg = rr::Config::Edit()
123 	               .set(rr::Optimization::Level::Default)
124 	               .clearOptimizationPasses()
125 	               .add(rr::Optimization::Pass::ScalarReplAggregates)
126 	               .add(rr::Optimization::Pass::SCCP)
127 	               .add(rr::Optimization::Pass::CFGSimplification)
128 	               .add(rr::Optimization::Pass::EarlyCSEPass)
129 	               .add(rr::Optimization::Pass::CFGSimplification)
130 	               .add(rr::Optimization::Pass::InstructionCombining);
131 
132 	rr::Nucleus::adjustDefaultConfig(cfg);
133 }
134 
setCPUDefaults()135 void setCPUDefaults()
136 {
137 	sw::CPUID::setEnableSSE4_1(true);
138 	sw::CPUID::setEnableSSSE3(true);
139 	sw::CPUID::setEnableSSE3(true);
140 	sw::CPUID::setEnableSSE2(true);
141 	sw::CPUID::setEnableSSE(true);
142 }
143 
getOrCreateScheduler()144 std::shared_ptr<marl::Scheduler> getOrCreateScheduler()
145 {
146 	struct Scheduler
147 	{
148 		marl::mutex mutex;
149 		std::weak_ptr<marl::Scheduler> weakptr GUARDED_BY(mutex);
150 	};
151 
152 	static Scheduler scheduler;
153 
154 	marl::lock lock(scheduler.mutex);
155 	auto sptr = scheduler.weakptr.lock();
156 	if(!sptr)
157 	{
158 		marl::Scheduler::Config cfg;
159 		cfg.setWorkerThreadCount(std::min<size_t>(marl::Thread::numLogicalCPUs(), 16));
160 		cfg.setWorkerThreadInitializer([](int) {
161 			sw::CPUID::setFlushToZero(true);
162 			sw::CPUID::setDenormalsAreZero(true);
163 		});
164 		sptr = std::make_shared<marl::Scheduler>(cfg);
165 		scheduler.weakptr = sptr;
166 	}
167 	return sptr;
168 }
169 
170 // initializeLibrary() is called by vkCreateInstance() to perform one-off global
171 // initialization of the swiftshader driver.
initializeLibrary()172 void initializeLibrary()
173 {
174 	static bool doOnce = [] {
175 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
176 		logBuildVersionInformation();
177 #endif  // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
178 		setReactorDefaultConfig();
179 		setCPUDefaults();
180 		return true;
181 	}();
182 	(void)doOnce;
183 }
184 
185 template<class T>
ValidateRenderPassPNextChain(VkDevice device,const T * pCreateInfo)186 void ValidateRenderPassPNextChain(VkDevice device, const T *pCreateInfo)
187 {
188 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
189 
190 	while(extensionCreateInfo)
191 	{
192 		switch(extensionCreateInfo->sType)
193 		{
194 			case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
195 			{
196 				const VkRenderPassInputAttachmentAspectCreateInfo *inputAttachmentAspectCreateInfo = reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>(extensionCreateInfo);
197 
198 				for(uint32_t i = 0; i < inputAttachmentAspectCreateInfo->aspectReferenceCount; i++)
199 				{
200 					const auto &aspectReference = inputAttachmentAspectCreateInfo->pAspectReferences[i];
201 					ASSERT(aspectReference.subpass < pCreateInfo->subpassCount);
202 					const auto &subpassDescription = pCreateInfo->pSubpasses[aspectReference.subpass];
203 					ASSERT(aspectReference.inputAttachmentIndex < subpassDescription.inputAttachmentCount);
204 					const auto &attachmentReference = subpassDescription.pInputAttachments[aspectReference.inputAttachmentIndex];
205 					if(attachmentReference.attachment != VK_ATTACHMENT_UNUSED)
206 					{
207 						// If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, for any
208 						// element of the pInputAttachments member of any element of pSubpasses where the attachment member
209 						// is not VK_ATTACHMENT_UNUSED, the aspectMask member of the corresponding element of
210 						// VkRenderPassInputAttachmentAspectCreateInfo::pAspectReferences must only include aspects that are
211 						// present in images of the format specified by the element of pAttachments at attachment
212 						vk::Format format(pCreateInfo->pAttachments[attachmentReference.attachment].format);
213 						bool isDepth = format.isDepth();
214 						bool isStencil = format.isStencil();
215 						ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) || (!isDepth && !isStencil));
216 						ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) || isDepth);
217 						ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) || isStencil);
218 					}
219 				}
220 			}
221 			break;
222 			case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
223 			{
224 				const VkRenderPassMultiviewCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>(extensionCreateInfo);
225 				ASSERT((multiviewCreateInfo->subpassCount == 0) || (multiviewCreateInfo->subpassCount == pCreateInfo->subpassCount));
226 				ASSERT((multiviewCreateInfo->dependencyCount == 0) || (multiviewCreateInfo->dependencyCount == pCreateInfo->dependencyCount));
227 
228 				bool zeroMask = (multiviewCreateInfo->pViewMasks[0] == 0);
229 				for(uint32_t i = 1; i < multiviewCreateInfo->subpassCount; i++)
230 				{
231 					ASSERT((multiviewCreateInfo->pViewMasks[i] == 0) == zeroMask);
232 				}
233 
234 				if(zeroMask)
235 				{
236 					ASSERT(multiviewCreateInfo->correlationMaskCount == 0);
237 				}
238 
239 				for(uint32_t i = 0; i < multiviewCreateInfo->dependencyCount; i++)
240 				{
241 					const auto &dependency = pCreateInfo->pDependencies[i];
242 					if(multiviewCreateInfo->pViewOffsets[i] != 0)
243 					{
244 						ASSERT(dependency.srcSubpass != dependency.dstSubpass);
245 						ASSERT(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT);
246 					}
247 					if(zeroMask)
248 					{
249 						ASSERT(!(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT));
250 					}
251 				}
252 
253 				// If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo,
254 				// each element of its pViewMask member must not include a bit at a position
255 				// greater than the value of VkPhysicalDeviceLimits::maxFramebufferLayers
256 				// pViewMask is a 32 bit value. If maxFramebufferLayers > 32, it's impossible
257 				// for pViewMask to contain a bit at an illegal position
258 				// Note: Verify pViewMask values instead if we hit this assert
259 				ASSERT(vk::Cast(device)->getPhysicalDevice()->getProperties().limits.maxFramebufferLayers >= 32);
260 			}
261 			break;
262 			default:
263 				LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
264 				break;
265 		}
266 
267 		extensionCreateInfo = extensionCreateInfo->pNext;
268 	}
269 }
270 
271 }  // namespace
272 
273 extern "C" {
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)274 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
275 {
276 	TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
277 
278 	return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
279 }
280 
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)281 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
282 {
283 	*pSupportedVersion = 3;
284 	return VK_SUCCESS;
285 }
286 
287 #if VK_USE_PLATFORM_FUCHSIA
288 
289 // This symbol must be exported by a Fuchsia Vulkan ICD. The Vulkan loader will
290 // call it, passing the address of a global function pointer that can later be
291 // used at runtime to connect to Fuchsia FIDL services, as required by certain
292 // extensions. See https://fxbug.dev/13095 for more details.
293 //
294 // NOTE: This entry point has not been upstreamed to Khronos yet, which reserves
295 //       all symbols starting with vk_icd. See https://fxbug.dev/13074 which
296 //       tracks upstreaming progress.
vk_icdInitializeConnectToServiceCallback(PFN_vkConnectToService callback)297 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdInitializeConnectToServiceCallback(
298     PFN_vkConnectToService callback)
299 {
300 	TRACE("(callback = %p)", callback);
301 	vk::icdFuchsiaServiceConnectCallback = callback;
302 	return VK_SUCCESS;
303 }
304 
305 #endif  // VK_USE_PLATFORM_FUCHSIA
306 
307 struct ExtensionProperties : public VkExtensionProperties
308 {
309 	bool isSupported = true;
310 };
311 
312 static const ExtensionProperties instanceExtensionProperties[] = {
313 	{ { VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION } },
314 	{ { VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION } },
315 	{ { VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION } },
316 	{ { VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION } },
317 	{ { VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION } },
318 	{ { VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION } },
319 	{ { VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, VK_EXT_HEADLESS_SURFACE_SPEC_VERSION } },
320 #ifndef __ANDROID__
321 	{ { VK_KHR_SURFACE_EXTENSION_NAME, VK_KHR_SURFACE_SPEC_VERSION } },
322 #endif
323 #ifdef VK_USE_PLATFORM_XCB_KHR
324 	{ { VK_KHR_XCB_SURFACE_EXTENSION_NAME, VK_KHR_XCB_SURFACE_SPEC_VERSION }, vk::XcbSurfaceKHR::hasLibXCB() },
325 #endif
326 #ifdef VK_USE_PLATFORM_XLIB_KHR
327 	{ { VK_KHR_XLIB_SURFACE_EXTENSION_NAME, VK_KHR_XLIB_SURFACE_SPEC_VERSION }, static_cast<bool>(libX11) },
328 #endif
329 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
330 	{ { VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, VK_KHR_WAYLAND_SURFACE_SPEC_VERSION } },
331 #endif
332 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
333 	{ { VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME, VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION } },
334 #endif
335 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
336 	{ { VK_KHR_DISPLAY_EXTENSION_NAME, VK_KHR_DISPLAY_SPEC_VERSION } },
337 #endif
338 #ifdef VK_USE_PLATFORM_MACOS_MVK
339 	{ { VK_MVK_MACOS_SURFACE_EXTENSION_NAME, VK_MVK_MACOS_SURFACE_SPEC_VERSION } },
340 #endif
341 #ifdef VK_USE_PLATFORM_METAL_EXT
342 	{ { VK_EXT_METAL_SURFACE_EXTENSION_NAME, VK_EXT_METAL_SURFACE_SPEC_VERSION } },
343 #endif
344 #ifdef VK_USE_PLATFORM_WIN32_KHR
345 	{ { VK_KHR_WIN32_SURFACE_EXTENSION_NAME, VK_KHR_WIN32_SURFACE_SPEC_VERSION } },
346 #endif
347 };
348 
349 static const ExtensionProperties deviceExtensionProperties[] = {
350 	{ { VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION } },
351 	// Vulkan 1.1 promoted extensions
352 	{ { VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, VK_KHR_BIND_MEMORY_2_SPEC_VERSION } },
353 	{ { VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION } },
354 	{ { VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION } },
355 	{ { VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION } },
356 	{ { VK_KHR_DEVICE_GROUP_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_SPEC_VERSION } },
357 	{ { VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_SPEC_VERSION } },
358 	{ { VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION } },
359 	{ { VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
360 	{ { VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION } },
361 	{ { VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_KHR_MAINTENANCE1_SPEC_VERSION } },
362 	{ { VK_KHR_MAINTENANCE2_EXTENSION_NAME, VK_KHR_MAINTENANCE2_SPEC_VERSION } },
363 	{ { VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_KHR_MAINTENANCE3_SPEC_VERSION } },
364 	{ { VK_KHR_MULTIVIEW_EXTENSION_NAME, VK_KHR_MULTIVIEW_SPEC_VERSION } },
365 	{ { VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME, VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION } },
366 	{ { VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION } },
367 	{ { VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION } },
368 	// Only 1.1 core version of this is supported. The extension has additional requirements
369 	//{{ VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION }},
370 	{ { VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION } },
371 	// Only 1.1 core version of this is supported. The extension has additional requirements
372 	//{{ VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, VK_KHR_VARIABLE_POINTERS_SPEC_VERSION }},
373 	{ { VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION } },
374 	// The following extension is only used to add support for Bresenham lines
375 	{ { VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, VK_EXT_LINE_RASTERIZATION_SPEC_VERSION } },
376 	// The following extension is used by ANGLE to emulate blitting the stencil buffer
377 	{ { VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION } },
378 	{ { VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME, VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION } },
379 #ifndef __ANDROID__
380 	// We fully support the KHR_swapchain v70 additions, so just track the spec version.
381 	{ { VK_KHR_SWAPCHAIN_EXTENSION_NAME, VK_KHR_SWAPCHAIN_SPEC_VERSION } },
382 #else
383 	// We only support V7 of this extension. Missing functionality: in V8,
384 	// it becomes possible to pass a VkNativeBufferANDROID structure to
385 	// vkBindImageMemory2. Android's swapchain implementation does this in
386 	// order to support passing VkBindImageMemorySwapchainInfoKHR
387 	// (from KHR_swapchain v70) to vkBindImageMemory2.
388 	{ { VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME, 7 } },
389 #endif
390 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
391 	{ { VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION } },
392 #endif
393 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
394 	{ { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION } },
395 #endif
396 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
397 	{ { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION } },
398 #endif
399 
400 	{ { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION } },
401 
402 #if VK_USE_PLATFORM_FUCHSIA
403 	{ { VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
404 	{ { VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION } },
405 #endif
406 	{ { VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, VK_EXT_PROVOKING_VERTEX_SPEC_VERSION } },
407 #if !defined(__ANDROID__)
408 	{ { VK_GOOGLE_SAMPLER_FILTERING_PRECISION_EXTENSION_NAME, VK_GOOGLE_SAMPLER_FILTERING_PRECISION_SPEC_VERSION } },
409 #endif
410 	{ { VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION } },
411 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
412 	{ { VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME, VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION } },
413 #endif  // SWIFTSHADER_DEVICE_MEMORY_REPORT
414 	// Vulkan 1.2 promoted extensions
415 	{ { VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, VK_EXT_HOST_QUERY_RESET_SPEC_VERSION } },
416 	{ { VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME, VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION } },
417 	{ { VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME, VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION } },
418 	{ { VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION } },
419 	{ { VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION } },
420 	{ { VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME, VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION } },
421 	{ { VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME, VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION } },
422 	{ { VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION } },
423 	{ { VK_KHR_SPIRV_1_4_EXTENSION_NAME, VK_KHR_SPIRV_1_4_SPEC_VERSION } },
424 	{ { VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION } },
425 	{ { VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION } },
426 };
427 
numSupportedExtensions(const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)428 static uint32_t numSupportedExtensions(const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
429 {
430 	uint32_t count = 0;
431 
432 	for(uint32_t i = 0; i < extensionPropertiesCount; i++)
433 	{
434 		if(extensionProperties[i].isSupported)
435 		{
436 			count++;
437 		}
438 	}
439 
440 	return count;
441 }
442 
numInstanceSupportedExtensions()443 static uint32_t numInstanceSupportedExtensions()
444 {
445 	return numSupportedExtensions(instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
446 }
447 
numDeviceSupportedExtensions()448 static uint32_t numDeviceSupportedExtensions()
449 {
450 	return numSupportedExtensions(deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
451 }
452 
hasExtension(const char * extensionName,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)453 static bool hasExtension(const char *extensionName, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
454 {
455 	for(uint32_t i = 0; i < extensionPropertiesCount; i++)
456 	{
457 		if(strcmp(extensionName, extensionProperties[i].extensionName) == 0)
458 		{
459 			return extensionProperties[i].isSupported;
460 		}
461 	}
462 
463 	return false;
464 }
465 
hasInstanceExtension(const char * extensionName)466 static bool hasInstanceExtension(const char *extensionName)
467 {
468 	return hasExtension(extensionName, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
469 }
470 
hasDeviceExtension(const char * extensionName)471 static bool hasDeviceExtension(const char *extensionName)
472 {
473 	return hasExtension(extensionName, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
474 }
475 
copyExtensions(VkExtensionProperties * pProperties,uint32_t toCopy,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)476 static void copyExtensions(VkExtensionProperties *pProperties, uint32_t toCopy, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
477 {
478 	for(uint32_t i = 0, j = 0; i < toCopy; i++, j++)
479 	{
480 		while((j < extensionPropertiesCount) && !extensionProperties[j].isSupported)
481 		{
482 			j++;
483 		}
484 		if(j < extensionPropertiesCount)
485 		{
486 			pProperties[i] = extensionProperties[j];
487 		}
488 	}
489 }
490 
copyInstanceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)491 static void copyInstanceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
492 {
493 	copyExtensions(pProperties, toCopy, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
494 }
495 
copyDeviceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)496 static void copyDeviceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
497 {
498 	copyExtensions(pProperties, toCopy, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
499 }
500 
vkCreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)501 VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance)
502 {
503 	TRACE("(const VkInstanceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkInstance* pInstance = %p)",
504 	      pCreateInfo, pAllocator, pInstance);
505 
506 	initializeLibrary();
507 
508 	if(pCreateInfo->flags != 0)
509 	{
510 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
511 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
512 	}
513 
514 	if(pCreateInfo->enabledLayerCount != 0)
515 	{
516 		UNIMPLEMENTED("b/148240133: pCreateInfo->enabledLayerCount != 0");  // FIXME(b/148240133)
517 	}
518 
519 	for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
520 	{
521 		if(!hasInstanceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
522 		{
523 			return VK_ERROR_EXTENSION_NOT_PRESENT;
524 		}
525 	}
526 
527 	VkDebugUtilsMessengerEXT messenger = { VK_NULL_HANDLE };
528 	if(pCreateInfo->pNext)
529 	{
530 		const VkBaseInStructure *createInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
531 		switch(createInfo->sType)
532 		{
533 			case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
534 			{
535 				const VkDebugUtilsMessengerCreateInfoEXT *debugUtilsMessengerCreateInfoEXT = reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>(createInfo);
536 				VkResult result = vk::DebugUtilsMessenger::Create(pAllocator, debugUtilsMessengerCreateInfoEXT, &messenger);
537 				if(result != VK_SUCCESS)
538 				{
539 					return result;
540 				}
541 			}
542 			break;
543 			case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
544 				// According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
545 				// "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
546 				//  VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
547 				//  internal use by the loader, and do not have corresponding
548 				//  Vulkan structures in this Specification."
549 				break;
550 			default:
551 				LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(createInfo->sType).c_str());
552 				break;
553 		}
554 	}
555 
556 	*pInstance = VK_NULL_HANDLE;
557 	VkPhysicalDevice physicalDevice = VK_NULL_HANDLE;
558 
559 	VkResult result = vk::DispatchablePhysicalDevice::Create(pAllocator, pCreateInfo, &physicalDevice);
560 	if(result != VK_SUCCESS)
561 	{
562 		vk::destroy(messenger, pAllocator);
563 		return result;
564 	}
565 
566 	result = vk::DispatchableInstance::Create(pAllocator, pCreateInfo, pInstance, physicalDevice, vk::Cast(messenger));
567 	if(result != VK_SUCCESS)
568 	{
569 		vk::destroy(messenger, pAllocator);
570 		vk::destroy(physicalDevice, pAllocator);
571 		return result;
572 	}
573 
574 	return result;
575 }
576 
vkDestroyInstance(VkInstance instance,const VkAllocationCallbacks * pAllocator)577 VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator)
578 {
579 	TRACE("(VkInstance instance = %p, const VkAllocationCallbacks* pAllocator = %p)", instance, pAllocator);
580 
581 	vk::destroy(instance, pAllocator);
582 }
583 
vkEnumeratePhysicalDevices(VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)584 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices)
585 {
586 	TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceCount = %p, VkPhysicalDevice* pPhysicalDevices = %p)",
587 	      instance, pPhysicalDeviceCount, pPhysicalDevices);
588 
589 	return vk::Cast(instance)->getPhysicalDevices(pPhysicalDeviceCount, pPhysicalDevices);
590 }
591 
vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)592 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures)
593 {
594 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures* pFeatures = %p)",
595 	      physicalDevice, pFeatures);
596 
597 	*pFeatures = vk::Cast(physicalDevice)->getFeatures();
598 }
599 
vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)600 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties)
601 {
602 	TRACE("GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties* pFormatProperties = %p)",
603 	      physicalDevice, (int)format, pFormatProperties);
604 
605 	vk::PhysicalDevice::GetFormatProperties(format, pFormatProperties);
606 }
607 
vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)608 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties)
609 {
610 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkImageTiling tiling = %d, VkImageUsageFlags usage = %d, VkImageCreateFlags flags = %d, VkImageFormatProperties* pImageFormatProperties = %p)",
611 	      physicalDevice, (int)format, (int)type, (int)tiling, usage, flags, pImageFormatProperties);
612 
613 	VkPhysicalDeviceImageFormatInfo2 info2 = {};
614 	info2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
615 	info2.pNext = nullptr;
616 	info2.format = format;
617 	info2.type = type;
618 	info2.tiling = tiling;
619 	info2.usage = usage;
620 	info2.flags = flags;
621 
622 	VkImageFormatProperties2 properties2 = {};
623 	properties2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
624 	properties2.pNext = nullptr;
625 
626 	VkResult result = vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, &info2, &properties2);
627 
628 	*pImageFormatProperties = properties2.imageFormatProperties;
629 
630 	return result;
631 }
632 
vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)633 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties)
634 {
635 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties* pProperties = %p)",
636 	      physicalDevice, pProperties);
637 
638 	*pProperties = vk::Cast(physicalDevice)->getProperties();
639 }
640 
vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)641 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties)
642 {
643 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties* pQueueFamilyProperties = %p))", physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
644 
645 	if(!pQueueFamilyProperties)
646 	{
647 		*pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
648 	}
649 	else
650 	{
651 		vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
652 	}
653 }
654 
vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)655 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties)
656 {
657 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
658 
659 	*pMemoryProperties = vk::PhysicalDevice::GetMemoryProperties();
660 }
661 
vkGetInstanceProcAddr(VkInstance instance,const char * pName)662 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *pName)
663 {
664 	TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
665 
666 	return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
667 }
668 
vkGetDeviceProcAddr(VkDevice device,const char * pName)669 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char *pName)
670 {
671 	TRACE("(VkDevice device = %p, const char* pName = %p)", device, pName);
672 
673 	return vk::GetDeviceProcAddr(vk::Cast(device), pName);
674 }
675 
vkCreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)676 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice)
677 {
678 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkDeviceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDevice* pDevice = %p)",
679 	      physicalDevice, pCreateInfo, pAllocator, pDevice);
680 
681 	if(pCreateInfo->flags != 0)
682 	{
683 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
684 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
685 	}
686 
687 	if(pCreateInfo->enabledLayerCount != 0)
688 	{
689 		// "The ppEnabledLayerNames and enabledLayerCount members of VkDeviceCreateInfo are deprecated and their values must be ignored by implementations."
690 		UNSUPPORTED("pCreateInfo->enabledLayerCount != 0");
691 	}
692 
693 	for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
694 	{
695 		if(!hasDeviceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
696 		{
697 			return VK_ERROR_EXTENSION_NOT_PRESENT;
698 		}
699 	}
700 
701 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
702 
703 	const VkPhysicalDeviceFeatures *enabledFeatures = pCreateInfo->pEnabledFeatures;
704 
705 	while(extensionCreateInfo)
706 	{
707 		// Casting to a long since some structures, such as
708 		// VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT
709 		// are not enumerated in the official Vulkan header
710 		switch((long)(extensionCreateInfo->sType))
711 		{
712 			case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
713 				// According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
714 				// "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
715 				//  VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
716 				//  internal use by the loader, and do not have corresponding
717 				//  Vulkan structures in this Specification."
718 				break;
719 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
720 			{
721 				ASSERT(!pCreateInfo->pEnabledFeatures);  // "If the pNext chain includes a VkPhysicalDeviceFeatures2 structure, then pEnabledFeatures must be NULL"
722 
723 				const VkPhysicalDeviceFeatures2 *physicalDeviceFeatures2 = reinterpret_cast<const VkPhysicalDeviceFeatures2 *>(extensionCreateInfo);
724 
725 				enabledFeatures = &physicalDeviceFeatures2->features;
726 			}
727 			break;
728 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
729 			{
730 				const VkPhysicalDeviceSamplerYcbcrConversionFeatures *samplerYcbcrConversionFeatures = reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(extensionCreateInfo);
731 
732 				// YCbCr conversion is supported.
733 				// samplerYcbcrConversionFeatures->samplerYcbcrConversion can be VK_TRUE or VK_FALSE.
734 				// No action needs to be taken on our end in either case; it's the apps responsibility that
735 				// "To create a sampler Y'CbCr conversion, the samplerYcbcrConversion feature must be enabled."
736 				(void)samplerYcbcrConversionFeatures->samplerYcbcrConversion;
737 			}
738 			break;
739 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
740 			{
741 				const VkPhysicalDevice16BitStorageFeatures *storage16BitFeatures = reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>(extensionCreateInfo);
742 
743 				if(storage16BitFeatures->storageBuffer16BitAccess != VK_FALSE ||
744 				   storage16BitFeatures->uniformAndStorageBuffer16BitAccess != VK_FALSE ||
745 				   storage16BitFeatures->storagePushConstant16 != VK_FALSE ||
746 				   storage16BitFeatures->storageInputOutput16 != VK_FALSE)
747 				{
748 					return VK_ERROR_FEATURE_NOT_PRESENT;
749 				}
750 			}
751 			break;
752 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES:
753 			{
754 				const VkPhysicalDeviceVariablePointerFeatures *variablePointerFeatures = reinterpret_cast<const VkPhysicalDeviceVariablePointerFeatures *>(extensionCreateInfo);
755 
756 				if(variablePointerFeatures->variablePointersStorageBuffer != VK_FALSE ||
757 				   variablePointerFeatures->variablePointers != VK_FALSE)
758 				{
759 					return VK_ERROR_FEATURE_NOT_PRESENT;
760 				}
761 			}
762 			break;
763 			case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
764 			{
765 				const VkDeviceGroupDeviceCreateInfo *groupDeviceCreateInfo = reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>(extensionCreateInfo);
766 
767 				if((groupDeviceCreateInfo->physicalDeviceCount != 1) ||
768 				   (groupDeviceCreateInfo->pPhysicalDevices[0] != physicalDevice))
769 				{
770 					return VK_ERROR_FEATURE_NOT_PRESENT;
771 				}
772 			}
773 			break;
774 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
775 			{
776 				const VkPhysicalDeviceMultiviewFeatures *multiviewFeatures = reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>(extensionCreateInfo);
777 
778 				if(multiviewFeatures->multiviewGeometryShader ||
779 				   multiviewFeatures->multiviewTessellationShader)
780 				{
781 					return VK_ERROR_FEATURE_NOT_PRESENT;
782 				}
783 			}
784 			break;
785 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
786 			{
787 				const VkPhysicalDeviceShaderDrawParametersFeatures *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>(extensionCreateInfo);
788 
789 				if(shaderDrawParametersFeatures->shaderDrawParameters)
790 				{
791 					return VK_ERROR_FEATURE_NOT_PRESENT;
792 				}
793 			}
794 			break;
795 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR:
796 			{
797 				const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(extensionCreateInfo);
798 
799 				// Separate depth and stencil layouts is already supported
800 				(void)(shaderDrawParametersFeatures->separateDepthStencilLayouts);
801 			}
802 			break;
803 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
804 			{
805 				const VkPhysicalDeviceLineRasterizationFeaturesEXT *lineRasterizationFeatures = reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>(extensionCreateInfo);
806 				if((lineRasterizationFeatures->smoothLines != VK_FALSE) ||
807 				   (lineRasterizationFeatures->stippledBresenhamLines != VK_FALSE) ||
808 				   (lineRasterizationFeatures->stippledRectangularLines != VK_FALSE) ||
809 				   (lineRasterizationFeatures->stippledSmoothLines != VK_FALSE))
810 				{
811 					return VK_ERROR_FEATURE_NOT_PRESENT;
812 				}
813 			}
814 			break;
815 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT:
816 			{
817 				const VkPhysicalDeviceProvokingVertexFeaturesEXT *provokingVertexFeatures = reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>(extensionCreateInfo);
818 
819 				// Provoking vertex is supported.
820 				// provokingVertexFeatures->provokingVertexLast can be VK_TRUE or VK_FALSE.
821 				// No action needs to be taken on our end in either case; it's the apps responsibility to check
822 				// that the provokingVertexLast feature is enabled before using the provoking vertex convention.
823 				(void)provokingVertexFeatures->provokingVertexLast;
824 			}
825 			break;
826 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT:
827 			{
828 				const VkPhysicalDeviceImageRobustnessFeaturesEXT *imageRobustnessFeatures = reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeaturesEXT *>(extensionCreateInfo);
829 
830 				// We currently always provide robust image accesses. When the feature is disabled, results are
831 				// undefined (for images with Dim != Buffer), so providing robustness is also acceptable.
832 				// TODO(b/159329067): Only provide robustness when requested.
833 				(void)imageRobustnessFeatures->robustImageAccess;
834 			}
835 			break;
836 			// For unsupported structures, check that we don't expose the corresponding extension string:
837 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT:
838 				ASSERT(!hasDeviceExtension(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME));
839 				break;
840 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR:
841 			{
842 				const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *imagelessFramebufferFeatures = reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(extensionCreateInfo);
843 				// Always provide Imageless Framebuffers
844 				(void)imagelessFramebufferFeatures->imagelessFramebuffer;
845 			}
846 			break;
847 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES:
848 			{
849 				const VkPhysicalDeviceScalarBlockLayoutFeatures *scalarBlockLayoutFeatures = reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>(extensionCreateInfo);
850 
851 				// VK_EXT_scalar_block_layout is supported, allowing C-like structure layout for SPIR-V blocks.
852 				(void)scalarBlockLayoutFeatures->scalarBlockLayout;
853 			}
854 			break;
855 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
856 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT:
857 			{
858 				const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *deviceMemoryReportFeatures = reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>(extensionCreateInfo);
859 				(void)deviceMemoryReportFeatures->deviceMemoryReport;
860 			}
861 			break;
862 #endif  // SWIFTSHADER_DEVICE_MEMORY_REPORT
863 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES:
864 			{
865 				const VkPhysicalDeviceHostQueryResetFeatures *hostQueryResetFeatures = reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>(extensionCreateInfo);
866 
867 				// VK_EXT_host_query_reset is always enabled.
868 				(void)hostQueryResetFeatures->hostQueryReset;
869 				break;
870 			}
871 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES:
872 			{
873 				const auto *tsFeatures = reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>(extensionCreateInfo);
874 
875 				// VK_KHR_timeline_semaphores is always enabled
876 				(void)tsFeatures->timelineSemaphore;
877 				break;
878 			}
879 			default:
880 				// "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
881 				LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
882 				break;
883 		}
884 
885 		extensionCreateInfo = extensionCreateInfo->pNext;
886 	}
887 
888 	ASSERT(pCreateInfo->queueCreateInfoCount > 0);
889 
890 	if(enabledFeatures)
891 	{
892 		if(!vk::Cast(physicalDevice)->hasFeatures(*enabledFeatures))
893 		{
894 			return VK_ERROR_FEATURE_NOT_PRESENT;
895 		}
896 	}
897 
898 	uint32_t queueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
899 
900 	for(uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++)
901 	{
902 		const VkDeviceQueueCreateInfo &queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
903 		if(queueCreateInfo.flags != 0)
904 		{
905 			UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d]->flags %d", i, queueCreateInfo.flags);
906 		}
907 
908 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(queueCreateInfo.pNext);
909 		while(extInfo)
910 		{
911 			LOG_TRAP("pCreateInfo->pQueueCreateInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
912 			extInfo = extInfo->pNext;
913 		}
914 
915 		ASSERT(queueCreateInfo.queueFamilyIndex < queueFamilyPropertyCount);
916 		(void)queueFamilyPropertyCount;  // Silence unused variable warning
917 	}
918 
919 	auto scheduler = getOrCreateScheduler();
920 	return vk::DispatchableDevice::Create(pAllocator, pCreateInfo, pDevice, vk::Cast(physicalDevice), enabledFeatures, scheduler);
921 }
922 
vkDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)923 VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator)
924 {
925 	TRACE("(VkDevice device = %p, const VkAllocationCallbacks* pAllocator = %p)", device, pAllocator);
926 
927 	vk::destroy(device, pAllocator);
928 }
929 
vkEnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)930 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
931 {
932 	TRACE("(const char* pLayerName = %p, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)",
933 	      pLayerName, pPropertyCount, pProperties);
934 
935 	uint32_t extensionPropertiesCount = numInstanceSupportedExtensions();
936 
937 	if(!pProperties)
938 	{
939 		*pPropertyCount = extensionPropertiesCount;
940 		return VK_SUCCESS;
941 	}
942 
943 	auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
944 	copyInstanceExtensions(pProperties, toCopy);
945 
946 	*pPropertyCount = toCopy;
947 	return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
948 }
949 
vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)950 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
951 {
952 	TRACE("(VkPhysicalDevice physicalDevice = %p, const char* pLayerName, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
953 
954 	uint32_t extensionPropertiesCount = numDeviceSupportedExtensions();
955 
956 	if(!pProperties)
957 	{
958 		*pPropertyCount = extensionPropertiesCount;
959 		return VK_SUCCESS;
960 	}
961 
962 	auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
963 	copyDeviceExtensions(pProperties, toCopy);
964 
965 	*pPropertyCount = toCopy;
966 	return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
967 }
968 
vkEnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)969 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties)
970 {
971 	TRACE("(uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", pPropertyCount, pProperties);
972 
973 	if(!pProperties)
974 	{
975 		*pPropertyCount = 0;
976 		return VK_SUCCESS;
977 	}
978 
979 	return VK_SUCCESS;
980 }
981 
vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)982 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkLayerProperties *pProperties)
983 {
984 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
985 
986 	if(!pProperties)
987 	{
988 		*pPropertyCount = 0;
989 		return VK_SUCCESS;
990 	}
991 
992 	return VK_SUCCESS;
993 }
994 
vkGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)995 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue)
996 {
997 	TRACE("(VkDevice device = %p, uint32_t queueFamilyIndex = %d, uint32_t queueIndex = %d, VkQueue* pQueue = %p)",
998 	      device, queueFamilyIndex, queueIndex, pQueue);
999 
1000 	*pQueue = vk::Cast(device)->getQueue(queueFamilyIndex, queueIndex);
1001 }
1002 
vkQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)1003 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence)
1004 {
1005 	TRACE("(VkQueue queue = %p, uint32_t submitCount = %d, const VkSubmitInfo* pSubmits = %p, VkFence fence = %p)",
1006 	      queue, submitCount, pSubmits, static_cast<void *>(fence));
1007 
1008 	return vk::Cast(queue)->submit(submitCount, pSubmits, vk::Cast(fence));
1009 }
1010 
vkQueueWaitIdle(VkQueue queue)1011 VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue)
1012 {
1013 	TRACE("(VkQueue queue = %p)", queue);
1014 
1015 	return vk::Cast(queue)->waitIdle();
1016 }
1017 
vkDeviceWaitIdle(VkDevice device)1018 VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device)
1019 {
1020 	TRACE("(VkDevice device = %p)", device);
1021 
1022 	return vk::Cast(device)->waitIdle();
1023 }
1024 
vkAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1025 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory)
1026 {
1027 	TRACE("(VkDevice device = %p, const VkMemoryAllocateInfo* pAllocateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDeviceMemory* pMemory = %p)",
1028 	      device, pAllocateInfo, pAllocator, pMemory);
1029 
1030 	const VkBaseInStructure *allocationInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
1031 	while(allocationInfo)
1032 	{
1033 		switch(allocationInfo->sType)
1034 		{
1035 			case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO:
1036 				// This can safely be ignored, as the Vulkan spec mentions:
1037 				// "If the pNext chain includes a VkMemoryDedicatedAllocateInfo structure, then that structure
1038 				//  includes a handle of the sole buffer or image resource that the memory *can* be bound to."
1039 				break;
1040 			case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
1041 				// This extension controls on which physical devices the memory gets allocated.
1042 				// SwiftShader only has a single physical device, so this extension does nothing in this case.
1043 				break;
1044 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
1045 			case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
1046 			{
1047 				auto *importInfo = reinterpret_cast<const VkImportMemoryFdInfoKHR *>(allocationInfo);
1048 				if(importInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1049 				{
1050 					UNSUPPORTED("importInfo->handleType %u", importInfo->handleType);
1051 					return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1052 				}
1053 				break;
1054 			}
1055 #endif  // SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
1056 			case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
1057 			{
1058 				auto *exportInfo = reinterpret_cast<const VkExportMemoryAllocateInfo *>(allocationInfo);
1059 				switch(exportInfo->handleTypes)
1060 				{
1061 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
1062 					case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
1063 						break;
1064 #endif
1065 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
1066 					case VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID:
1067 						break;
1068 #endif
1069 #if VK_USE_PLATFORM_FUCHSIA
1070 					case VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA:
1071 						break;
1072 #endif
1073 					default:
1074 						UNSUPPORTED("exportInfo->handleTypes %u", exportInfo->handleTypes);
1075 						return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1076 				}
1077 				break;
1078 			}
1079 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
1080 			case VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID:
1081 				// Ignore
1082 				break;
1083 #endif  // SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
1084 			case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
1085 			{
1086 				auto *importInfo = reinterpret_cast<const VkImportMemoryHostPointerInfoEXT *>(allocationInfo);
1087 				if(importInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT && importInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT)
1088 				{
1089 					UNSUPPORTED("importInfo->handleType %u", importInfo->handleType);
1090 					return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1091 				}
1092 				break;
1093 			}
1094 #if VK_USE_PLATFORM_FUCHSIA
1095 			case VK_STRUCTURE_TYPE_TEMP_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA:
1096 			{
1097 				auto *importInfo = reinterpret_cast<const VkImportMemoryZirconHandleInfoFUCHSIA *>(allocationInfo);
1098 				if(importInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA)
1099 				{
1100 					UNSUPPORTED("importInfo->handleType %u", importInfo->handleType);
1101 					return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1102 				}
1103 				break;
1104 			}
1105 #endif  // VK_USE_PLATFORM_FUCHSIA
1106 			default:
1107 				LOG_TRAP("pAllocateInfo->pNext sType = %s", vk::Stringify(allocationInfo->sType).c_str());
1108 				break;
1109 		}
1110 
1111 		allocationInfo = allocationInfo->pNext;
1112 	}
1113 
1114 	VkResult result = vk::DeviceMemory::Create(pAllocator, pAllocateInfo, pMemory, vk::Cast(device));
1115 	if(result != VK_SUCCESS)
1116 	{
1117 		return result;
1118 	}
1119 
1120 	// Make sure the memory allocation is done now so that OOM errors can be checked now
1121 	result = vk::Cast(*pMemory)->allocate();
1122 	if(result != VK_SUCCESS)
1123 	{
1124 		vk::destroy(*pMemory, pAllocator);
1125 		*pMemory = VK_NULL_HANDLE;
1126 	}
1127 
1128 	return result;
1129 }
1130 
vkFreeMemory(VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)1131 VKAPI_ATTR void VKAPI_CALL vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator)
1132 {
1133 	TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, const VkAllocationCallbacks* pAllocator = %p)",
1134 	      device, static_cast<void *>(memory), pAllocator);
1135 
1136 	vk::destroy(memory, pAllocator);
1137 }
1138 
1139 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
vkGetMemoryFdKHR(VkDevice device,const VkMemoryGetFdInfoKHR * getFdInfo,int * pFd)1140 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR *getFdInfo, int *pFd)
1141 {
1142 	TRACE("(VkDevice device = %p, const VkMemoryGetFdInfoKHR* getFdInfo = %p, int* pFd = %p",
1143 	      device, getFdInfo, pFd);
1144 
1145 	if(getFdInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1146 	{
1147 		UNSUPPORTED("pGetFdInfo->handleType %u", getFdInfo->handleType);
1148 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1149 	}
1150 	return vk::Cast(getFdInfo->memory)->exportFd(pFd);
1151 }
1152 
vkGetMemoryFdPropertiesKHR(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)1153 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR *pMemoryFdProperties)
1154 {
1155 	TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, int fd = %d, VkMemoryFdPropertiesKHR* pMemoryFdProperties = %p)",
1156 	      device, handleType, fd, pMemoryFdProperties);
1157 
1158 	if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1159 	{
1160 		UNSUPPORTED("handleType %u", handleType);
1161 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1162 	}
1163 
1164 	if(fd < 0)
1165 	{
1166 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1167 	}
1168 
1169 	const VkPhysicalDeviceMemoryProperties &memoryProperties =
1170 	    vk::PhysicalDevice::GetMemoryProperties();
1171 
1172 	// All SwiftShader memory types support this!
1173 	pMemoryFdProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1174 
1175 	return VK_SUCCESS;
1176 }
1177 #endif  // SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
1178 #if VK_USE_PLATFORM_FUCHSIA
vkGetMemoryZirconHandleFUCHSIA(VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pGetHandleInfo,zx_handle_t * pHandle)1179 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA *pGetHandleInfo, zx_handle_t *pHandle)
1180 {
1181 	TRACE("(VkDevice device = %p, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetHandleInfo = %p, zx_handle_t* pHandle = %p",
1182 	      device, pGetHandleInfo, pHandle);
1183 
1184 	if(pGetHandleInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA)
1185 	{
1186 		UNSUPPORTED("pGetHandleInfo->handleType %u", pGetHandleInfo->handleType);
1187 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1188 	}
1189 	return vk::Cast(pGetHandleInfo->memory)->exportHandle(pHandle);
1190 }
1191 
vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,zx_handle_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties)1192 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t handle, VkMemoryZirconHandlePropertiesFUCHSIA *pMemoryZirconHandleProperties)
1193 {
1194 	TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, zx_handle_t handle = %d, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties = %p)",
1195 	      device, handleType, handle, pMemoryZirconHandleProperties);
1196 
1197 	if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA)
1198 	{
1199 		UNSUPPORTED("handleType %u", handleType);
1200 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1201 	}
1202 
1203 	if(handle == ZX_HANDLE_INVALID)
1204 	{
1205 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1206 	}
1207 
1208 	const VkPhysicalDeviceMemoryProperties &memoryProperties =
1209 	    vk::PhysicalDevice::GetMemoryProperties();
1210 
1211 	// All SwiftShader memory types support this!
1212 	pMemoryZirconHandleProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1213 
1214 	return VK_SUCCESS;
1215 }
1216 #endif  // VK_USE_PLATFORM_FUCHSIA
1217 
vkGetMemoryHostPointerPropertiesEXT(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties)1218 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void *pHostPointer, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties)
1219 {
1220 	TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, const void *pHostPointer = %p, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties = %p)",
1221 	      device, handleType, pHostPointer, pMemoryHostPointerProperties);
1222 
1223 	if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT && handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT)
1224 	{
1225 		UNSUPPORTED("handleType %u", handleType);
1226 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1227 	}
1228 	pMemoryHostPointerProperties->memoryTypeBits = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1229 
1230 	return VK_SUCCESS;
1231 }
1232 
1233 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1234 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo, struct AHardwareBuffer **pBuffer)
1235 {
1236 	TRACE("(VkDevice device = %p, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo = %p, struct AHardwareBuffer **pBuffer = %p)",
1237 	      device, pInfo, pBuffer);
1238 
1239 	return vk::Cast(pInfo->memory)->exportAndroidHardwareBuffer(pBuffer);
1240 }
1241 
vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)1242 VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties)
1243 {
1244 	TRACE("(VkDevice device = %p, const struct AHardwareBuffer *buffer = %p, VkAndroidHardwareBufferPropertiesANDROID *pProperties = %p)",
1245 	      device, buffer, pProperties);
1246 
1247 	return vk::DeviceMemory::GetAndroidHardwareBufferProperties(device, buffer, pProperties);
1248 }
1249 #endif  // SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
1250 
vkMapMemory(VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1251 VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData)
1252 {
1253 	TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize offset = %d, VkDeviceSize size = %d, VkMemoryMapFlags flags = %d, void** ppData = %p)",
1254 	      device, static_cast<void *>(memory), int(offset), int(size), flags, ppData);
1255 
1256 	if(flags != 0)
1257 	{
1258 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1259 		UNSUPPORTED("flags %d", int(flags));
1260 	}
1261 
1262 	return vk::Cast(memory)->map(offset, size, ppData);
1263 }
1264 
vkUnmapMemory(VkDevice device,VkDeviceMemory memory)1265 VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(VkDevice device, VkDeviceMemory memory)
1266 {
1267 	TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p)", device, static_cast<void *>(memory));
1268 
1269 	// Noop, memory will be released when the DeviceMemory object is released
1270 }
1271 
vkFlushMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1272 VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1273 {
1274 	TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1275 	      device, memoryRangeCount, pMemoryRanges);
1276 
1277 	// Noop, host and device memory are the same to SwiftShader
1278 
1279 	return VK_SUCCESS;
1280 }
1281 
vkInvalidateMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1282 VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1283 {
1284 	TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1285 	      device, memoryRangeCount, pMemoryRanges);
1286 
1287 	// Noop, host and device memory are the same to SwiftShader
1288 
1289 	return VK_SUCCESS;
1290 }
1291 
vkGetDeviceMemoryCommitment(VkDevice pDevice,VkDeviceMemory pMemory,VkDeviceSize * pCommittedMemoryInBytes)1292 VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(VkDevice pDevice, VkDeviceMemory pMemory, VkDeviceSize *pCommittedMemoryInBytes)
1293 {
1294 	TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize* pCommittedMemoryInBytes = %p)",
1295 	      pDevice, static_cast<void *>(pMemory), pCommittedMemoryInBytes);
1296 
1297 	auto memory = vk::Cast(pMemory);
1298 
1299 #if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
1300 	const auto &memoryProperties = vk::PhysicalDevice::GetMemoryProperties();
1301 	uint32_t typeIndex = memory->getMemoryTypeIndex();
1302 	ASSERT(typeIndex < memoryProperties.memoryTypeCount);
1303 	ASSERT(memoryProperties.memoryTypes[typeIndex].propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
1304 #endif
1305 
1306 	*pCommittedMemoryInBytes = memory->getCommittedMemoryInBytes();
1307 }
1308 
vkBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1309 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1310 {
1311 	TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1312 	      device, static_cast<void *>(buffer), static_cast<void *>(memory), int(memoryOffset));
1313 
1314 	if(!vk::Cast(buffer)->canBindToMemory(vk::Cast(memory)))
1315 	{
1316 		UNSUPPORTED("vkBindBufferMemory with invalid external memory");
1317 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1318 	}
1319 	vk::Cast(buffer)->bind(vk::Cast(memory), memoryOffset);
1320 	return VK_SUCCESS;
1321 }
1322 
vkBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)1323 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1324 {
1325 	TRACE("(VkDevice device = %p, VkImage image = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1326 	      device, static_cast<void *>(image), static_cast<void *>(memory), int(memoryOffset));
1327 
1328 	if(!vk::Cast(image)->canBindToMemory(vk::Cast(memory)))
1329 	{
1330 		UNSUPPORTED("vkBindImageMemory with invalid external memory");
1331 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1332 	}
1333 	vk::Cast(image)->bind(vk::Cast(memory), memoryOffset);
1334 	return VK_SUCCESS;
1335 }
1336 
vkGetBufferMemoryRequirements(VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)1337 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements)
1338 {
1339 	TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1340 	      device, static_cast<void *>(buffer), pMemoryRequirements);
1341 
1342 	*pMemoryRequirements = vk::Cast(buffer)->getMemoryRequirements();
1343 }
1344 
vkGetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)1345 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements)
1346 {
1347 	TRACE("(VkDevice device = %p, VkImage image = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1348 	      device, static_cast<void *>(image), pMemoryRequirements);
1349 
1350 	*pMemoryRequirements = vk::Cast(image)->getMemoryRequirements();
1351 }
1352 
vkGetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1353 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
1354 {
1355 	TRACE("(VkDevice device = %p, VkImage image = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements* pSparseMemoryRequirements = %p)",
1356 	      device, static_cast<void *>(image), pSparseMemoryRequirementCount, pSparseMemoryRequirements);
1357 
1358 	// The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
1359 	// "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
1360 	*pSparseMemoryRequirementCount = 0;
1361 }
1362 
vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkSampleCountFlagBits samples,VkImageUsageFlags usage,VkImageTiling tiling,uint32_t * pPropertyCount,VkSparseImageFormatProperties * pProperties)1363 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties)
1364 {
1365 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkSampleCountFlagBits samples = %d, VkImageUsageFlags usage = %d, VkImageTiling tiling = %d, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties* pProperties = %p)",
1366 	      physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
1367 
1368 	// We do not support sparse images.
1369 	*pPropertyCount = 0;
1370 }
1371 
vkQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1372 VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence)
1373 {
1374 	TRACE("()");
1375 	UNSUPPORTED("vkQueueBindSparse");
1376 	return VK_SUCCESS;
1377 }
1378 
vkCreateFence(VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)1379 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence)
1380 {
1381 	TRACE("(VkDevice device = %p, const VkFenceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFence* pFence = %p)",
1382 	      device, pCreateInfo, pAllocator, pFence);
1383 
1384 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1385 	while(nextInfo)
1386 	{
1387 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1388 		nextInfo = nextInfo->pNext;
1389 	}
1390 
1391 	return vk::Fence::Create(pAllocator, pCreateInfo, pFence);
1392 }
1393 
vkDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)1394 VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator)
1395 {
1396 	TRACE("(VkDevice device = %p, VkFence fence = %p, const VkAllocationCallbacks* pAllocator = %p)",
1397 	      device, static_cast<void *>(fence), pAllocator);
1398 
1399 	vk::destroy(fence, pAllocator);
1400 }
1401 
vkResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)1402 VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences)
1403 {
1404 	TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p)",
1405 	      device, fenceCount, pFences);
1406 
1407 	for(uint32_t i = 0; i < fenceCount; i++)
1408 	{
1409 		vk::Cast(pFences[i])->reset();
1410 	}
1411 
1412 	return VK_SUCCESS;
1413 }
1414 
vkGetFenceStatus(VkDevice device,VkFence fence)1415 VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence)
1416 {
1417 	TRACE("(VkDevice device = %p, VkFence fence = %p)", device, static_cast<void *>(fence));
1418 
1419 	return vk::Cast(fence)->getStatus();
1420 }
1421 
vkWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)1422 VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout)
1423 {
1424 	TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p, VkBool32 waitAll = %d, uint64_t timeout = %" PRIu64 ")",
1425 	      device, int(fenceCount), pFences, int(waitAll), timeout);
1426 
1427 	return vk::Cast(device)->waitForFences(fenceCount, pFences, waitAll, timeout);
1428 }
1429 
vkCreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)1430 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore)
1431 {
1432 	TRACE("(VkDevice device = %p, const VkSemaphoreCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSemaphore* pSemaphore = %p)",
1433 	      device, pCreateInfo, pAllocator, pSemaphore);
1434 
1435 	if(pCreateInfo->flags != 0)
1436 	{
1437 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1438 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1439 	}
1440 
1441 	VkSemaphoreType type = VK_SEMAPHORE_TYPE_BINARY;
1442 	for(const auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1443 	    nextInfo != nullptr; nextInfo = nextInfo->pNext)
1444 	{
1445 		switch(nextInfo->sType)
1446 		{
1447 			case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
1448 				// Let the semaphore constructor handle this
1449 				break;
1450 			case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO:
1451 			{
1452 				const VkSemaphoreTypeCreateInfo *info = reinterpret_cast<const VkSemaphoreTypeCreateInfo *>(nextInfo);
1453 				type = info->semaphoreType;
1454 			}
1455 			break;
1456 			default:
1457 				WARN("nextInfo->sType = %s", vk::Stringify(nextInfo->sType).c_str());
1458 				break;
1459 		}
1460 	}
1461 
1462 	if(type == VK_SEMAPHORE_TYPE_BINARY)
1463 	{
1464 		return vk::BinarySemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1465 	}
1466 	else
1467 	{
1468 		return vk::TimelineSemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1469 	}
1470 }
1471 
vkDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)1472 VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator)
1473 {
1474 	TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, const VkAllocationCallbacks* pAllocator = %p)",
1475 	      device, static_cast<void *>(semaphore), pAllocator);
1476 
1477 	vk::destroy(semaphore, pAllocator);
1478 }
1479 
1480 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
vkGetSemaphoreFdKHR(VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)1481 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd)
1482 {
1483 	TRACE("(VkDevice device = %p, const VkSemaphoreGetFdInfoKHR* pGetFdInfo = %p, int* pFd = %p)",
1484 	      device, static_cast<const void *>(pGetFdInfo), static_cast<void *>(pFd));
1485 
1486 	if(pGetFdInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1487 	{
1488 		UNSUPPORTED("pGetFdInfo->handleType %d", int(pGetFdInfo->handleType));
1489 	}
1490 
1491 	auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetFdInfo->semaphore);
1492 	ASSERT(sem != nullptr);
1493 	return sem->exportFd(pFd);
1494 }
1495 
vkImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreInfo)1496 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreInfo)
1497 {
1498 	TRACE("(VkDevice device = %p, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreInfo = %p",
1499 	      device, static_cast<const void *>(pImportSemaphoreInfo));
1500 
1501 	if(pImportSemaphoreInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1502 	{
1503 		UNSUPPORTED("pImportSemaphoreInfo->handleType %d", int(pImportSemaphoreInfo->handleType));
1504 	}
1505 	bool temporaryImport = (pImportSemaphoreInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1506 
1507 	auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreInfo->semaphore);
1508 	ASSERT(sem != nullptr);
1509 	return sem->importFd(pImportSemaphoreInfo->fd, temporaryImport);
1510 }
1511 #endif  // SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
1512 
1513 #if VK_USE_PLATFORM_FUCHSIA
vkImportSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo)1514 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA(
1515     VkDevice device,
1516     const VkImportSemaphoreZirconHandleInfoFUCHSIA *pImportSemaphoreZirconHandleInfo)
1517 {
1518 	TRACE("(VkDevice device = %p, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo = %p)",
1519 	      device, pImportSemaphoreZirconHandleInfo);
1520 
1521 	if(pImportSemaphoreZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA)
1522 	{
1523 		UNSUPPORTED("pImportSemaphoreZirconHandleInfo->handleType %d", int(pImportSemaphoreZirconHandleInfo->handleType));
1524 	}
1525 	bool temporaryImport = (pImportSemaphoreZirconHandleInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1526 	auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreZirconHandleInfo->semaphore);
1527 	ASSERT(sem != nullptr);
1528 	return sem->importHandle(pImportSemaphoreZirconHandleInfo->handle, temporaryImport);
1529 }
1530 
vkGetSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle)1531 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA(
1532     VkDevice device,
1533     const VkSemaphoreGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo,
1534     zx_handle_t *pZirconHandle)
1535 {
1536 	TRACE("(VkDevice device = %p, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo = %p, zx_handle_t* pZirconHandle = %p)",
1537 	      device, static_cast<const void *>(pGetZirconHandleInfo), static_cast<void *>(pZirconHandle));
1538 
1539 	if(pGetZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA)
1540 	{
1541 		UNSUPPORTED("pGetZirconHandleInfo->handleType %d", int(pGetZirconHandleInfo->handleType));
1542 	}
1543 
1544 	auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetZirconHandleInfo->semaphore);
1545 	ASSERT(sem != nullptr);
1546 	return sem->exportHandle(pZirconHandle);
1547 }
1548 #endif  // VK_USE_PLATFORM_FUCHSIA
1549 
vkGetSemaphoreCounterValue(VkDevice device,VkSemaphore semaphore,uint64_t * pValue)1550 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue)
1551 {
1552 	TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, uint64_t* pValue = %p)",
1553 	      device, static_cast<void *>(semaphore), pValue);
1554 	*pValue = vk::DynamicCast<vk::TimelineSemaphore>(semaphore)->getCounterValue();
1555 	return VK_SUCCESS;
1556 }
1557 
vkSignalSemaphore(VkDevice device,const VkSemaphoreSignalInfo * pSignalInfo)1558 VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo)
1559 {
1560 	TRACE("(VkDevice device = %p, const VkSemaphoreSignalInfo *pSignalInfo = %p)",
1561 	      device, pSignalInfo);
1562 	vk::DynamicCast<vk::TimelineSemaphore>(pSignalInfo->semaphore)->signal(pSignalInfo->value);
1563 	return VK_SUCCESS;
1564 }
1565 
vkWaitSemaphores(VkDevice device,const VkSemaphoreWaitInfo * pWaitInfo,uint64_t timeout)1566 VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout)
1567 {
1568 	TRACE("(VkDevice device = %p, const VkSemaphoreWaitInfo *pWaitInfo = %p, uint64_t timeout = %" PRIu64 ")",
1569 	      device, pWaitInfo, timeout);
1570 	return vk::Cast(device)->waitForSemaphores(pWaitInfo, timeout);
1571 }
1572 
vkCreateEvent(VkDevice device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)1573 VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkEvent *pEvent)
1574 {
1575 	TRACE("(VkDevice device = %p, const VkEventCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkEvent* pEvent = %p)",
1576 	      device, pCreateInfo, pAllocator, pEvent);
1577 
1578 	if(pCreateInfo->flags != 0)
1579 	{
1580 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1581 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1582 	}
1583 
1584 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1585 	while(extInfo)
1586 	{
1587 		// Vulkan 1.2: "pNext must be NULL"
1588 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1589 		extInfo = extInfo->pNext;
1590 	}
1591 
1592 	return vk::Event::Create(pAllocator, pCreateInfo, pEvent);
1593 }
1594 
vkDestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * pAllocator)1595 VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator)
1596 {
1597 	TRACE("(VkDevice device = %p, VkEvent event = %p, const VkAllocationCallbacks* pAllocator = %p)",
1598 	      device, static_cast<void *>(event), pAllocator);
1599 
1600 	vk::destroy(event, pAllocator);
1601 }
1602 
vkGetEventStatus(VkDevice device,VkEvent event)1603 VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(VkDevice device, VkEvent event)
1604 {
1605 	TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1606 
1607 	return vk::Cast(event)->getStatus();
1608 }
1609 
vkSetEvent(VkDevice device,VkEvent event)1610 VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event)
1611 {
1612 	TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1613 
1614 	vk::Cast(event)->signal();
1615 
1616 	return VK_SUCCESS;
1617 }
1618 
vkResetEvent(VkDevice device,VkEvent event)1619 VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(VkDevice device, VkEvent event)
1620 {
1621 	TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1622 
1623 	vk::Cast(event)->reset();
1624 
1625 	return VK_SUCCESS;
1626 }
1627 
vkCreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkQueryPool * pQueryPool)1628 VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool)
1629 {
1630 	TRACE("(VkDevice device = %p, const VkQueryPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkQueryPool* pQueryPool = %p)",
1631 	      device, pCreateInfo, pAllocator, pQueryPool);
1632 
1633 	if(pCreateInfo->flags != 0)
1634 	{
1635 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1636 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1637 	}
1638 
1639 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1640 	while(extInfo)
1641 	{
1642 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1643 		extInfo = extInfo->pNext;
1644 	}
1645 
1646 	return vk::QueryPool::Create(pAllocator, pCreateInfo, pQueryPool);
1647 }
1648 
vkDestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * pAllocator)1649 VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator)
1650 {
1651 	TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
1652 	      device, static_cast<void *>(queryPool), pAllocator);
1653 
1654 	vk::destroy(queryPool, pAllocator);
1655 }
1656 
vkGetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)1657 VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags)
1658 {
1659 	TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, size_t dataSize = %d, void* pData = %p, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
1660 	      device, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), int(dataSize), pData, int(stride), flags);
1661 
1662 	return vk::Cast(queryPool)->getResults(firstQuery, queryCount, dataSize, pData, stride, flags);
1663 }
1664 
vkCreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1665 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer)
1666 {
1667 	TRACE("(VkDevice device = %p, const VkBufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBuffer* pBuffer = %p)",
1668 	      device, pCreateInfo, pAllocator, pBuffer);
1669 
1670 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1671 	while(nextInfo)
1672 	{
1673 		switch(nextInfo->sType)
1674 		{
1675 			case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1676 				// Do nothing. Should be handled by vk::Buffer::Create().
1677 				break;
1678 			default:
1679 				LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1680 				break;
1681 		}
1682 		nextInfo = nextInfo->pNext;
1683 	}
1684 
1685 	return vk::Buffer::Create(pAllocator, pCreateInfo, pBuffer);
1686 }
1687 
vkDestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)1688 VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator)
1689 {
1690 	TRACE("(VkDevice device = %p, VkBuffer buffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
1691 	      device, static_cast<void *>(buffer), pAllocator);
1692 
1693 	vk::destroy(buffer, pAllocator);
1694 }
1695 
vkGetBufferDeviceAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1696 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1697 {
1698 	TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1699 	      device, pInfo);
1700 	UNSUPPORTED("VK_KHR_buffer_device_address");
1701 	return 0;
1702 }
1703 
vkGetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1704 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1705 {
1706 	TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1707 	      device, pInfo);
1708 	UNSUPPORTED("VK_KHR_buffer_device_address");
1709 	return 0;
1710 }
1711 
vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)1712 VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo)
1713 {
1714 	TRACE("(VkDevice device = %p, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo = %p)",
1715 	      device, pInfo);
1716 	UNSUPPORTED("VK_KHR_buffer_device_address");
1717 	return 0;
1718 }
1719 
vkCreateBufferView(VkDevice device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView)1720 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBufferView *pView)
1721 {
1722 	TRACE("(VkDevice device = %p, const VkBufferViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBufferView* pView = %p)",
1723 	      device, pCreateInfo, pAllocator, pView);
1724 
1725 	if(pCreateInfo->flags != 0)
1726 	{
1727 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1728 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1729 	}
1730 
1731 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1732 	while(extInfo)
1733 	{
1734 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1735 		extInfo = extInfo->pNext;
1736 	}
1737 
1738 	return vk::BufferView::Create(pAllocator, pCreateInfo, pView);
1739 }
1740 
vkDestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)1741 VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator)
1742 {
1743 	TRACE("(VkDevice device = %p, VkBufferView bufferView = %p, const VkAllocationCallbacks* pAllocator = %p)",
1744 	      device, static_cast<void *>(bufferView), pAllocator);
1745 
1746 	vk::destroy(bufferView, pAllocator);
1747 }
1748 
vkCreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)1749 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImage *pImage)
1750 {
1751 	TRACE("(VkDevice device = %p, const VkImageCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImage* pImage = %p)",
1752 	      device, pCreateInfo, pAllocator, pImage);
1753 
1754 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1755 
1756 #ifdef __ANDROID__
1757 	vk::BackingMemory backmem;
1758 	bool swapchainImage = false;
1759 #endif
1760 
1761 	while(extensionCreateInfo)
1762 	{
1763 		switch((long)(extensionCreateInfo->sType))
1764 		{
1765 #ifdef __ANDROID__
1766 			case VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID:
1767 			{
1768 				const VkSwapchainImageCreateInfoANDROID *swapImageCreateInfo = reinterpret_cast<const VkSwapchainImageCreateInfoANDROID *>(extensionCreateInfo);
1769 				backmem.androidUsage = swapImageCreateInfo->usage;
1770 			}
1771 			break;
1772 			case VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID:
1773 			{
1774 				const VkNativeBufferANDROID *nativeBufferInfo = reinterpret_cast<const VkNativeBufferANDROID *>(extensionCreateInfo);
1775 				backmem.nativeHandle = nativeBufferInfo->handle;
1776 				backmem.stride = nativeBufferInfo->stride;
1777 				swapchainImage = true;
1778 			}
1779 			break;
1780 			case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
1781 				break;
1782 #endif
1783 			case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
1784 				// Do nothing. Should be handled by vk::Image::Create()
1785 				break;
1786 			case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
1787 				/* Do nothing. We don't actually need the swapchain handle yet; we'll do all the work in vkBindImageMemory2. */
1788 				break;
1789 			case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
1790 				// Do nothing. This extension tells the driver which image formats will be used
1791 				// by the application. Swiftshader is not impacted from lacking this information,
1792 				// so we don't need to track the format list.
1793 				break;
1794 			case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
1795 			{
1796 				// SwiftShader does not use an image's usage info for non-debug purposes outside of
1797 				// vkGetPhysicalDeviceImageFormatProperties2. This also applies to separate stencil usage.
1798 				const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionCreateInfo);
1799 				(void)stencilUsageInfo->stencilUsage;
1800 			}
1801 			break;
1802 			default:
1803 				// "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
1804 				LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1805 				break;
1806 		}
1807 
1808 		extensionCreateInfo = extensionCreateInfo->pNext;
1809 	}
1810 
1811 	VkResult result = vk::Image::Create(pAllocator, pCreateInfo, pImage, vk::Cast(device));
1812 
1813 #ifdef __ANDROID__
1814 	if(swapchainImage)
1815 	{
1816 		if(result != VK_SUCCESS)
1817 		{
1818 			return result;
1819 		}
1820 
1821 		vk::Image *image = vk::Cast(*pImage);
1822 		VkMemoryRequirements memRequirements = image->getMemoryRequirements();
1823 
1824 		VkMemoryAllocateInfo allocInfo = {};
1825 		allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1826 		allocInfo.allocationSize = memRequirements.size;
1827 		allocInfo.memoryTypeIndex = 0;
1828 
1829 		VkDeviceMemory devmem = { VK_NULL_HANDLE };
1830 		result = vkAllocateMemory(device, &allocInfo, pAllocator, &devmem);
1831 		if(result != VK_SUCCESS)
1832 		{
1833 			return result;
1834 		}
1835 
1836 		vkBindImageMemory(device, *pImage, devmem, 0);
1837 		backmem.externalMemory = true;
1838 
1839 		image->setBackingMemory(backmem);
1840 	}
1841 #endif
1842 
1843 	return result;
1844 }
1845 
vkDestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)1846 VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator)
1847 {
1848 	TRACE("(VkDevice device = %p, VkImage image = %p, const VkAllocationCallbacks* pAllocator = %p)",
1849 	      device, static_cast<void *>(image), pAllocator);
1850 
1851 #ifdef __ANDROID__
1852 	vk::Image *img = vk::Cast(image);
1853 	if(img && img->hasExternalMemory())
1854 	{
1855 		vk::destroy(img->getExternalMemory(), pAllocator);
1856 	}
1857 #endif
1858 
1859 	vk::destroy(image, pAllocator);
1860 }
1861 
vkGetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)1862 VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, VkSubresourceLayout *pLayout)
1863 {
1864 	TRACE("(VkDevice device = %p, VkImage image = %p, const VkImageSubresource* pSubresource = %p, VkSubresourceLayout* pLayout = %p)",
1865 	      device, static_cast<void *>(image), pSubresource, pLayout);
1866 
1867 	vk::Cast(image)->getSubresourceLayout(pSubresource, pLayout);
1868 }
1869 
vkCreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)1870 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImageView *pView)
1871 {
1872 	TRACE("(VkDevice device = %p, const VkImageViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImageView* pView = %p)",
1873 	      device, pCreateInfo, pAllocator, pView);
1874 
1875 	if(pCreateInfo->flags != 0)
1876 	{
1877 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1878 	}
1879 
1880 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1881 	const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
1882 
1883 	while(extensionCreateInfo)
1884 	{
1885 		switch(extensionCreateInfo->sType)
1886 		{
1887 			case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR:
1888 			{
1889 				const VkImageViewUsageCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkImageViewUsageCreateInfo *>(extensionCreateInfo);
1890 				ASSERT(!(~vk::Cast(pCreateInfo->image)->getUsage() & multiviewCreateInfo->usage));
1891 			}
1892 			break;
1893 			case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
1894 			{
1895 				const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo = reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
1896 				ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
1897 
1898 				if(ycbcrConversion)
1899 				{
1900 					ASSERT((pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_R) &&
1901 					       (pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_G) &&
1902 					       (pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_B) &&
1903 					       (pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_A));
1904 				}
1905 			}
1906 			break;
1907 			default:
1908 				LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1909 				break;
1910 		}
1911 
1912 		extensionCreateInfo = extensionCreateInfo->pNext;
1913 	}
1914 
1915 	VkResult result = vk::ImageView::Create(pAllocator, pCreateInfo, pView, ycbcrConversion);
1916 	if(result == VK_SUCCESS)
1917 	{
1918 		vk::Cast(device)->registerImageView(vk::Cast(*pView));
1919 	}
1920 
1921 	return result;
1922 }
1923 
vkDestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)1924 VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator)
1925 {
1926 	TRACE("(VkDevice device = %p, VkImageView imageView = %p, const VkAllocationCallbacks* pAllocator = %p)",
1927 	      device, static_cast<void *>(imageView), pAllocator);
1928 
1929 	vk::Cast(device)->unregisterImageView(vk::Cast(imageView));
1930 	vk::destroy(imageView, pAllocator);
1931 }
1932 
vkCreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)1933 VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule)
1934 {
1935 	TRACE("(VkDevice device = %p, const VkShaderModuleCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkShaderModule* pShaderModule = %p)",
1936 	      device, pCreateInfo, pAllocator, pShaderModule);
1937 
1938 	if(pCreateInfo->flags != 0)
1939 	{
1940 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1941 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1942 	}
1943 
1944 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1945 	while(nextInfo)
1946 	{
1947 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1948 		nextInfo = nextInfo->pNext;
1949 	}
1950 
1951 	return vk::ShaderModule::Create(pAllocator, pCreateInfo, pShaderModule);
1952 }
1953 
vkDestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)1954 VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator)
1955 {
1956 	TRACE("(VkDevice device = %p, VkShaderModule shaderModule = %p, const VkAllocationCallbacks* pAllocator = %p)",
1957 	      device, static_cast<void *>(shaderModule), pAllocator);
1958 
1959 	vk::destroy(shaderModule, pAllocator);
1960 }
1961 
vkCreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)1962 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache)
1963 {
1964 	TRACE("(VkDevice device = %p, const VkPipelineCacheCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineCache* pPipelineCache = %p)",
1965 	      device, pCreateInfo, pAllocator, pPipelineCache);
1966 
1967 	if(pCreateInfo->flags != 0)
1968 	{
1969 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1970 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1971 	}
1972 
1973 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1974 	while(extInfo)
1975 	{
1976 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1977 		extInfo = extInfo->pNext;
1978 	}
1979 
1980 	return vk::PipelineCache::Create(pAllocator, pCreateInfo, pPipelineCache);
1981 }
1982 
vkDestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)1983 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator)
1984 {
1985 	TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, const VkAllocationCallbacks* pAllocator = %p)",
1986 	      device, static_cast<void *>(pipelineCache), pAllocator);
1987 
1988 	vk::destroy(pipelineCache, pAllocator);
1989 }
1990 
vkGetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)1991 VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData)
1992 {
1993 	TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, size_t* pDataSize = %p, void* pData = %p)",
1994 	      device, static_cast<void *>(pipelineCache), pDataSize, pData);
1995 
1996 	return vk::Cast(pipelineCache)->getData(pDataSize, pData);
1997 }
1998 
vkMergePipelineCaches(VkDevice device,VkPipelineCache dstCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)1999 VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches)
2000 {
2001 	TRACE("(VkDevice device = %p, VkPipelineCache dstCache = %p, uint32_t srcCacheCount = %d, const VkPipelineCache* pSrcCaches = %p)",
2002 	      device, static_cast<void *>(dstCache), int(srcCacheCount), pSrcCaches);
2003 
2004 	return vk::Cast(dstCache)->merge(srcCacheCount, pSrcCaches);
2005 }
2006 
vkCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2007 VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2008 {
2009 	TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkGraphicsPipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2010 	      device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2011 
2012 	VkResult errorResult = VK_SUCCESS;
2013 	for(uint32_t i = 0; i < createInfoCount; i++)
2014 	{
2015 		VkResult result = vk::GraphicsPipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2016 
2017 		if(result == VK_SUCCESS)
2018 		{
2019 			static_cast<vk::GraphicsPipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2020 		}
2021 		else
2022 		{
2023 			// According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2024 			// "When an application attempts to create many pipelines in a single command,
2025 			//  it is possible that some subset may fail creation. In that case, the
2026 			//  corresponding entries in the pPipelines output array will be filled with
2027 			//  VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2028 			//  out of memory errors), the vkCreate*Pipelines commands will return an
2029 			//  error code. The implementation will attempt to create all pipelines, and
2030 			//  only return VK_NULL_HANDLE values for those that actually failed."
2031 			pPipelines[i] = VK_NULL_HANDLE;
2032 			errorResult = result;
2033 		}
2034 	}
2035 
2036 	return errorResult;
2037 }
2038 
vkCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2039 VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2040 {
2041 	TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkComputePipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2042 	      device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2043 
2044 	VkResult errorResult = VK_SUCCESS;
2045 	for(uint32_t i = 0; i < createInfoCount; i++)
2046 	{
2047 		VkResult result = vk::ComputePipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2048 
2049 		if(result == VK_SUCCESS)
2050 		{
2051 			static_cast<vk::ComputePipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2052 		}
2053 		else
2054 		{
2055 			// According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2056 			// "When an application attempts to create many pipelines in a single command,
2057 			//  it is possible that some subset may fail creation. In that case, the
2058 			//  corresponding entries in the pPipelines output array will be filled with
2059 			//  VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2060 			//  out of memory errors), the vkCreate*Pipelines commands will return an
2061 			//  error code. The implementation will attempt to create all pipelines, and
2062 			//  only return VK_NULL_HANDLE values for those that actually failed."
2063 			pPipelines[i] = VK_NULL_HANDLE;
2064 			errorResult = result;
2065 		}
2066 	}
2067 
2068 	return errorResult;
2069 }
2070 
vkDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)2071 VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator)
2072 {
2073 	TRACE("(VkDevice device = %p, VkPipeline pipeline = %p, const VkAllocationCallbacks* pAllocator = %p)",
2074 	      device, static_cast<void *>(pipeline), pAllocator);
2075 
2076 	vk::destroy(pipeline, pAllocator);
2077 }
2078 
vkCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)2079 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout)
2080 {
2081 	TRACE("(VkDevice device = %p, const VkPipelineLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineLayout* pPipelineLayout = %p)",
2082 	      device, pCreateInfo, pAllocator, pPipelineLayout);
2083 
2084 	if(pCreateInfo->flags != 0)
2085 	{
2086 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2087 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2088 	}
2089 
2090 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2091 	while(nextInfo)
2092 	{
2093 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2094 		nextInfo = nextInfo->pNext;
2095 	}
2096 
2097 	return vk::PipelineLayout::Create(pAllocator, pCreateInfo, pPipelineLayout);
2098 }
2099 
vkDestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * pAllocator)2100 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator)
2101 {
2102 	TRACE("(VkDevice device = %p, VkPipelineLayout pipelineLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2103 	      device, static_cast<void *>(pipelineLayout), pAllocator);
2104 
2105 	vk::release(pipelineLayout, pAllocator);
2106 }
2107 
vkCreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2108 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSampler *pSampler)
2109 {
2110 	TRACE("(VkDevice device = %p, const VkSamplerCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSampler* pSampler = %p)",
2111 	      device, pCreateInfo, pAllocator, pSampler);
2112 
2113 	if(pCreateInfo->flags != 0)
2114 	{
2115 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2116 	}
2117 
2118 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2119 	const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
2120 	VkSamplerFilteringPrecisionModeGOOGLE filteringPrecision = VK_SAMPLER_FILTERING_PRECISION_MODE_LOW_GOOGLE;
2121 
2122 	while(extensionCreateInfo)
2123 	{
2124 		switch(static_cast<long>(extensionCreateInfo->sType))
2125 		{
2126 			case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
2127 			{
2128 				const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo = reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
2129 				ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
2130 			}
2131 			break;
2132 #if !defined(__ANDROID__)
2133 			case VK_STRUCTURE_TYPE_SAMPLER_FILTERING_PRECISION_GOOGLE:
2134 			{
2135 				const VkSamplerFilteringPrecisionGOOGLE *filteringInfo =
2136 				    reinterpret_cast<const VkSamplerFilteringPrecisionGOOGLE *>(extensionCreateInfo);
2137 				filteringPrecision = filteringInfo->samplerFilteringPrecisionMode;
2138 			}
2139 			break;
2140 #endif
2141 			default:
2142 				LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2143 				break;
2144 		}
2145 
2146 		extensionCreateInfo = extensionCreateInfo->pNext;
2147 	}
2148 
2149 	vk::SamplerState samplerState(pCreateInfo, ycbcrConversion, filteringPrecision);
2150 	uint32_t samplerID = vk::Cast(device)->indexSampler(samplerState);
2151 
2152 	VkResult result = vk::Sampler::Create(pAllocator, pCreateInfo, pSampler, samplerState, samplerID);
2153 
2154 	if(*pSampler == VK_NULL_HANDLE)
2155 	{
2156 		ASSERT(result != VK_SUCCESS);
2157 		vk::Cast(device)->removeSampler(samplerState);
2158 	}
2159 
2160 	return result;
2161 }
2162 
vkDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)2163 VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator)
2164 {
2165 	TRACE("(VkDevice device = %p, VkSampler sampler = %p, const VkAllocationCallbacks* pAllocator = %p)",
2166 	      device, static_cast<void *>(sampler), pAllocator);
2167 
2168 	if(sampler != VK_NULL_HANDLE)
2169 	{
2170 		vk::Cast(device)->removeSampler(*vk::Cast(sampler));
2171 
2172 		vk::destroy(sampler, pAllocator);
2173 	}
2174 }
2175 
vkCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)2176 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout)
2177 {
2178 	TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorSetLayout* pSetLayout = %p)",
2179 	      device, pCreateInfo, pAllocator, pSetLayout);
2180 
2181 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2182 
2183 	while(extensionCreateInfo)
2184 	{
2185 		switch(extensionCreateInfo->sType)
2186 		{
2187 			case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
2188 				ASSERT(!vk::Cast(device)->hasExtension(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME));
2189 				break;
2190 			default:
2191 				LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2192 				break;
2193 		}
2194 
2195 		extensionCreateInfo = extensionCreateInfo->pNext;
2196 	}
2197 
2198 	return vk::DescriptorSetLayout::Create(pAllocator, pCreateInfo, pSetLayout);
2199 }
2200 
vkDestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)2201 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks *pAllocator)
2202 {
2203 	TRACE("(VkDevice device = %p, VkDescriptorSetLayout descriptorSetLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2204 	      device, static_cast<void *>(descriptorSetLayout), pAllocator);
2205 
2206 	vk::destroy(descriptorSetLayout, pAllocator);
2207 }
2208 
vkCreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)2209 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool)
2210 {
2211 	TRACE("(VkDevice device = %p, const VkDescriptorPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorPool* pDescriptorPool = %p)",
2212 	      device, pCreateInfo, pAllocator, pDescriptorPool);
2213 
2214 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
2215 	while(extInfo)
2216 	{
2217 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2218 		extInfo = extInfo->pNext;
2219 	}
2220 
2221 	return vk::DescriptorPool::Create(pAllocator, pCreateInfo, pDescriptorPool);
2222 }
2223 
vkDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)2224 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator)
2225 {
2226 	TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2227 	      device, static_cast<void *>(descriptorPool), pAllocator);
2228 
2229 	vk::destroy(descriptorPool, pAllocator);
2230 }
2231 
vkResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)2232 VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags)
2233 {
2234 	TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, VkDescriptorPoolResetFlags flags = 0x%x)",
2235 	      device, static_cast<void *>(descriptorPool), int(flags));
2236 
2237 	if(flags != 0)
2238 	{
2239 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2240 		UNSUPPORTED("flags %d", int(flags));
2241 	}
2242 
2243 	return vk::Cast(descriptorPool)->reset();
2244 }
2245 
vkAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)2246 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets)
2247 {
2248 	TRACE("(VkDevice device = %p, const VkDescriptorSetAllocateInfo* pAllocateInfo = %p, VkDescriptorSet* pDescriptorSets = %p)",
2249 	      device, pAllocateInfo, pDescriptorSets);
2250 
2251 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pAllocateInfo->pNext);
2252 	while(extInfo)
2253 	{
2254 		LOG_TRAP("pAllocateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2255 		extInfo = extInfo->pNext;
2256 	}
2257 
2258 	return vk::Cast(pAllocateInfo->descriptorPool)->allocateSets(pAllocateInfo->descriptorSetCount, pAllocateInfo->pSetLayouts, pDescriptorSets);
2259 }
2260 
vkFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)2261 VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets)
2262 {
2263 	TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p)",
2264 	      device, static_cast<void *>(descriptorPool), descriptorSetCount, pDescriptorSets);
2265 
2266 	vk::Cast(descriptorPool)->freeSets(descriptorSetCount, pDescriptorSets);
2267 
2268 	return VK_SUCCESS;
2269 }
2270 
vkUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2271 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies)
2272 {
2273 	TRACE("(VkDevice device = %p, uint32_t descriptorWriteCount = %d, const VkWriteDescriptorSet* pDescriptorWrites = %p, uint32_t descriptorCopyCount = %d, const VkCopyDescriptorSet* pDescriptorCopies = %p)",
2274 	      device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2275 
2276 	vk::Cast(device)->updateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2277 }
2278 
vkCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)2279 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer)
2280 {
2281 	TRACE("(VkDevice device = %p, const VkFramebufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFramebuffer* pFramebuffer = %p)",
2282 	      device, pCreateInfo, pAllocator, pFramebuffer);
2283 
2284 	return vk::Framebuffer::Create(pAllocator, pCreateInfo, pFramebuffer);
2285 }
2286 
vkDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)2287 VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator)
2288 {
2289 	TRACE("(VkDevice device = %p, VkFramebuffer framebuffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
2290 	      device, static_cast<void *>(framebuffer), pAllocator);
2291 
2292 	vk::destroy(framebuffer, pAllocator);
2293 }
2294 
vkCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2295 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2296 {
2297 	TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2298 	      device, pCreateInfo, pAllocator, pRenderPass);
2299 
2300 	if(pCreateInfo->flags != 0)
2301 	{
2302 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2303 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2304 	}
2305 
2306 	ValidateRenderPassPNextChain(device, pCreateInfo);
2307 
2308 	return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2309 }
2310 
vkCreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2311 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2312 {
2313 	TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2314 	      device, pCreateInfo, pAllocator, pRenderPass);
2315 
2316 	if(pCreateInfo->flags != 0)
2317 	{
2318 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2319 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2320 	}
2321 
2322 	ValidateRenderPassPNextChain(device, pCreateInfo);
2323 
2324 	return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2325 }
2326 
vkDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)2327 VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator)
2328 {
2329 	TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, const VkAllocationCallbacks* pAllocator = %p)",
2330 	      device, static_cast<void *>(renderPass), pAllocator);
2331 
2332 	vk::destroy(renderPass, pAllocator);
2333 }
2334 
vkGetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)2335 VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity)
2336 {
2337 	TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, VkExtent2D* pGranularity = %p)",
2338 	      device, static_cast<void *>(renderPass), pGranularity);
2339 
2340 	vk::Cast(renderPass)->getRenderAreaGranularity(pGranularity);
2341 }
2342 
vkCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)2343 VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool)
2344 {
2345 	TRACE("(VkDevice device = %p, const VkCommandPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkCommandPool* pCommandPool = %p)",
2346 	      device, pCreateInfo, pAllocator, pCommandPool);
2347 
2348 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2349 	while(nextInfo)
2350 	{
2351 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2352 		nextInfo = nextInfo->pNext;
2353 	}
2354 
2355 	return vk::CommandPool::Create(pAllocator, pCreateInfo, pCommandPool);
2356 }
2357 
vkDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)2358 VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator)
2359 {
2360 	TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2361 	      device, static_cast<void *>(commandPool), pAllocator);
2362 
2363 	vk::destroy(commandPool, pAllocator);
2364 }
2365 
vkResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)2366 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags)
2367 {
2368 	TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolResetFlags flags = %d)",
2369 	      device, static_cast<void *>(commandPool), int(flags));
2370 
2371 	return vk::Cast(commandPool)->reset(flags);
2372 }
2373 
vkAllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)2374 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, VkCommandBuffer *pCommandBuffers)
2375 {
2376 	TRACE("(VkDevice device = %p, const VkCommandBufferAllocateInfo* pAllocateInfo = %p, VkCommandBuffer* pCommandBuffers = %p)",
2377 	      device, pAllocateInfo, pCommandBuffers);
2378 
2379 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
2380 	while(nextInfo)
2381 	{
2382 		LOG_TRAP("pAllocateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2383 		nextInfo = nextInfo->pNext;
2384 	}
2385 
2386 	return vk::Cast(pAllocateInfo->commandPool)->allocateCommandBuffers(vk::Cast(device), pAllocateInfo->level, pAllocateInfo->commandBufferCount, pCommandBuffers);
2387 }
2388 
vkFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)2389 VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
2390 {
2391 	TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
2392 	      device, static_cast<void *>(commandPool), int(commandBufferCount), pCommandBuffers);
2393 
2394 	vk::Cast(commandPool)->freeCommandBuffers(commandBufferCount, pCommandBuffers);
2395 }
2396 
vkBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)2397 VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo)
2398 {
2399 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkCommandBufferBeginInfo* pBeginInfo = %p)",
2400 	      commandBuffer, pBeginInfo);
2401 
2402 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pBeginInfo->pNext);
2403 	while(nextInfo)
2404 	{
2405 		LOG_TRAP("pBeginInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2406 		nextInfo = nextInfo->pNext;
2407 	}
2408 
2409 	return vk::Cast(commandBuffer)->begin(pBeginInfo->flags, pBeginInfo->pInheritanceInfo);
2410 }
2411 
vkEndCommandBuffer(VkCommandBuffer commandBuffer)2412 VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer)
2413 {
2414 	TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
2415 
2416 	return vk::Cast(commandBuffer)->end();
2417 }
2418 
vkResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)2419 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags)
2420 {
2421 	TRACE("(VkCommandBuffer commandBuffer = %p, VkCommandBufferResetFlags flags = %d)", commandBuffer, int(flags));
2422 
2423 	return vk::Cast(commandBuffer)->reset(flags);
2424 }
2425 
vkCmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)2426 VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline)
2427 {
2428 	TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipeline pipeline = %p)",
2429 	      commandBuffer, int(pipelineBindPoint), static_cast<void *>(pipeline));
2430 
2431 	vk::Cast(commandBuffer)->bindPipeline(pipelineBindPoint, vk::Cast(pipeline));
2432 }
2433 
vkCmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)2434 VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports)
2435 {
2436 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstViewport = %d, uint32_t viewportCount = %d, const VkViewport* pViewports = %p)",
2437 	      commandBuffer, int(firstViewport), int(viewportCount), pViewports);
2438 
2439 	vk::Cast(commandBuffer)->setViewport(firstViewport, viewportCount, pViewports);
2440 }
2441 
vkCmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)2442 VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors)
2443 {
2444 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstScissor = %d, uint32_t scissorCount = %d, const VkRect2D* pScissors = %p)",
2445 	      commandBuffer, int(firstScissor), int(scissorCount), pScissors);
2446 
2447 	vk::Cast(commandBuffer)->setScissor(firstScissor, scissorCount, pScissors);
2448 }
2449 
vkCmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)2450 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
2451 {
2452 	TRACE("(VkCommandBuffer commandBuffer = %p, float lineWidth = %f)", commandBuffer, lineWidth);
2453 
2454 	vk::Cast(commandBuffer)->setLineWidth(lineWidth);
2455 }
2456 
vkCmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)2457 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor)
2458 {
2459 	TRACE("(VkCommandBuffer commandBuffer = %p, float depthBiasConstantFactor = %f, float depthBiasClamp = %f, float depthBiasSlopeFactor = %f)",
2460 	      commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2461 
2462 	vk::Cast(commandBuffer)->setDepthBias(depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2463 }
2464 
vkCmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])2465 VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
2466 {
2467 	TRACE("(VkCommandBuffer commandBuffer = %p, const float blendConstants[4] = {%f, %f, %f, %f})",
2468 	      commandBuffer, blendConstants[0], blendConstants[1], blendConstants[2], blendConstants[3]);
2469 
2470 	vk::Cast(commandBuffer)->setBlendConstants(blendConstants);
2471 }
2472 
vkCmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)2473 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
2474 {
2475 	TRACE("(VkCommandBuffer commandBuffer = %p, float minDepthBounds = %f, float maxDepthBounds = %f)",
2476 	      commandBuffer, minDepthBounds, maxDepthBounds);
2477 
2478 	vk::Cast(commandBuffer)->setDepthBounds(minDepthBounds, maxDepthBounds);
2479 }
2480 
vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)2481 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask)
2482 {
2483 	TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t compareMask = %d)",
2484 	      commandBuffer, int(faceMask), int(compareMask));
2485 
2486 	vk::Cast(commandBuffer)->setStencilCompareMask(faceMask, compareMask);
2487 }
2488 
vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)2489 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask)
2490 {
2491 	TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t writeMask = %d)",
2492 	      commandBuffer, int(faceMask), int(writeMask));
2493 
2494 	vk::Cast(commandBuffer)->setStencilWriteMask(faceMask, writeMask);
2495 }
2496 
vkCmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)2497 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference)
2498 {
2499 	TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t reference = %d)",
2500 	      commandBuffer, int(faceMask), int(reference));
2501 
2502 	vk::Cast(commandBuffer)->setStencilReference(faceMask, reference);
2503 }
2504 
vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)2505 VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets)
2506 {
2507 	TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipelineLayout layout = %p, uint32_t firstSet = %d, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p, uint32_t dynamicOffsetCount = %d, const uint32_t* pDynamicOffsets = %p)",
2508 	      commandBuffer, int(pipelineBindPoint), static_cast<void *>(layout), int(firstSet), int(descriptorSetCount), pDescriptorSets, int(dynamicOffsetCount), pDynamicOffsets);
2509 
2510 	vk::Cast(commandBuffer)->bindDescriptorSets(pipelineBindPoint, vk::Cast(layout), firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
2511 }
2512 
vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)2513 VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType)
2514 {
2515 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkIndexType indexType = %d)",
2516 	      commandBuffer, static_cast<void *>(buffer), int(offset), int(indexType));
2517 
2518 	vk::Cast(commandBuffer)->bindIndexBuffer(vk::Cast(buffer), offset, indexType);
2519 }
2520 
vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)2521 VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets)
2522 {
2523 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstBinding = %d, uint32_t bindingCount = %d, const VkBuffer* pBuffers = %p, const VkDeviceSize* pOffsets = %p)",
2524 	      commandBuffer, int(firstBinding), int(bindingCount), pBuffers, pOffsets);
2525 
2526 	vk::Cast(commandBuffer)->bindVertexBuffers(firstBinding, bindingCount, pBuffers, pOffsets);
2527 }
2528 
vkCmdDraw(VkCommandBuffer commandBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)2529 VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance)
2530 {
2531 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t vertexCount = %d, uint32_t instanceCount = %d, uint32_t firstVertex = %d, uint32_t firstInstance = %d)",
2532 	      commandBuffer, int(vertexCount), int(instanceCount), int(firstVertex), int(firstInstance));
2533 
2534 	vk::Cast(commandBuffer)->draw(vertexCount, instanceCount, firstVertex, firstInstance);
2535 }
2536 
vkCmdDrawIndexed(VkCommandBuffer commandBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)2537 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
2538 {
2539 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t indexCount = %d, uint32_t instanceCount = %d, uint32_t firstIndex = %d, int32_t vertexOffset = %d, uint32_t firstInstance = %d)",
2540 	      commandBuffer, int(indexCount), int(instanceCount), int(firstIndex), int(vertexOffset), int(firstInstance));
2541 
2542 	vk::Cast(commandBuffer)->drawIndexed(indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2543 }
2544 
vkCmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)2545 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
2546 {
2547 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
2548 	      commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
2549 
2550 	vk::Cast(commandBuffer)->drawIndirect(vk::Cast(buffer), offset, drawCount, stride);
2551 }
2552 
vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)2553 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
2554 {
2555 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
2556 	      commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
2557 
2558 	vk::Cast(commandBuffer)->drawIndexedIndirect(vk::Cast(buffer), offset, drawCount, stride);
2559 }
2560 
vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)2561 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
2562 {
2563 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
2564 	      commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
2565 	UNSUPPORTED("VK_KHR_draw_indirect_count");
2566 }
2567 
vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)2568 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
2569 {
2570 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
2571 	      commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
2572 	UNSUPPORTED("VK_KHR_draw_indirect_count");
2573 }
2574 
vkCmdDispatch(VkCommandBuffer commandBuffer,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)2575 VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
2576 {
2577 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t groupCountX = %d, uint32_t groupCountY = %d, uint32_t groupCountZ = %d)",
2578 	      commandBuffer, int(groupCountX), int(groupCountY), int(groupCountZ));
2579 
2580 	vk::Cast(commandBuffer)->dispatch(groupCountX, groupCountY, groupCountZ);
2581 }
2582 
vkCmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset)2583 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
2584 {
2585 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d)",
2586 	      commandBuffer, static_cast<void *>(buffer), int(offset));
2587 
2588 	vk::Cast(commandBuffer)->dispatchIndirect(vk::Cast(buffer), offset);
2589 }
2590 
vkCmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)2591 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions)
2592 {
2593 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferCopy* pRegions = %p)",
2594 	      commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
2595 
2596 	vk::Cast(commandBuffer)->copyBuffer(vk::Cast(srcBuffer), vk::Cast(dstBuffer), regionCount, pRegions);
2597 }
2598 
vkCmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)2599 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions)
2600 {
2601 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageCopy* pRegions = %p)",
2602 	      commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
2603 
2604 	vk::Cast(commandBuffer)->copyImage(vk::Cast(srcImage), srcImageLayout, vk::Cast(dstImage), dstImageLayout, regionCount, pRegions);
2605 }
2606 
vkCmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)2607 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter)
2608 {
2609 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageBlit* pRegions = %p, VkFilter filter = %d)",
2610 	      commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions, filter);
2611 
2612 	vk::Cast(commandBuffer)->blitImage(vk::Cast(srcImage), srcImageLayout, vk::Cast(dstImage), dstImageLayout, regionCount, pRegions, filter);
2613 }
2614 
vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)2615 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions)
2616 {
2617 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
2618 	      commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
2619 
2620 	vk::Cast(commandBuffer)->copyBufferToImage(vk::Cast(srcBuffer), vk::Cast(dstImage), dstImageLayout, regionCount, pRegions);
2621 }
2622 
vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)2623 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions)
2624 {
2625 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
2626 	      commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
2627 
2628 	vk::Cast(commandBuffer)->copyImageToBuffer(vk::Cast(srcImage), srcImageLayout, vk::Cast(dstBuffer), regionCount, pRegions);
2629 }
2630 
vkCmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)2631 VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData)
2632 {
2633 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize dataSize = %d, const void* pData = %p)",
2634 	      commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(dataSize), pData);
2635 
2636 	vk::Cast(commandBuffer)->updateBuffer(vk::Cast(dstBuffer), dstOffset, dataSize, pData);
2637 }
2638 
vkCmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)2639 VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data)
2640 {
2641 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize size = %d, uint32_t data = %d)",
2642 	      commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(size), data);
2643 
2644 	vk::Cast(commandBuffer)->fillBuffer(vk::Cast(dstBuffer), dstOffset, size, data);
2645 }
2646 
vkCmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)2647 VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
2648 {
2649 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearColorValue* pColor = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
2650 	      commandBuffer, static_cast<void *>(image), int(imageLayout), pColor, int(rangeCount), pRanges);
2651 
2652 	vk::Cast(commandBuffer)->clearColorImage(vk::Cast(image), imageLayout, pColor, rangeCount, pRanges);
2653 }
2654 
vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)2655 VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
2656 {
2657 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearDepthStencilValue* pDepthStencil = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
2658 	      commandBuffer, static_cast<void *>(image), int(imageLayout), pDepthStencil, int(rangeCount), pRanges);
2659 
2660 	vk::Cast(commandBuffer)->clearDepthStencilImage(vk::Cast(image), imageLayout, pDepthStencil, rangeCount, pRanges);
2661 }
2662 
vkCmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)2663 VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects)
2664 {
2665 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t attachmentCount = %d, const VkClearAttachment* pAttachments = %p, uint32_t rectCount = %d, const VkClearRect* pRects = %p)",
2666 	      commandBuffer, int(attachmentCount), pAttachments, int(rectCount), pRects);
2667 
2668 	vk::Cast(commandBuffer)->clearAttachments(attachmentCount, pAttachments, rectCount, pRects);
2669 }
2670 
vkCmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)2671 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions)
2672 {
2673 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageResolve* pRegions = %p)",
2674 	      commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstImage), int(dstImageLayout), regionCount, pRegions);
2675 
2676 	vk::Cast(commandBuffer)->resolveImage(vk::Cast(srcImage), srcImageLayout, vk::Cast(dstImage), dstImageLayout, regionCount, pRegions);
2677 }
2678 
vkCmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)2679 VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
2680 {
2681 	TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
2682 	      commandBuffer, static_cast<void *>(event), int(stageMask));
2683 
2684 	vk::Cast(commandBuffer)->setEvent(vk::Cast(event), stageMask);
2685 }
2686 
vkCmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)2687 VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
2688 {
2689 	TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
2690 	      commandBuffer, static_cast<void *>(event), int(stageMask));
2691 
2692 	vk::Cast(commandBuffer)->resetEvent(vk::Cast(event), stageMask);
2693 }
2694 
vkCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)2695 VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
2696 {
2697 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t eventCount = %d, const VkEvent* pEvents = %p, VkPipelineStageFlags srcStageMask = 0x%x, VkPipelineStageFlags dstStageMask = 0x%x, uint32_t memoryBarrierCount = %d, const VkMemoryBarrier* pMemoryBarriers = %p, uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
2698 	      commandBuffer, int(eventCount), pEvents, int(srcStageMask), int(dstStageMask), int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
2699 
2700 	vk::Cast(commandBuffer)->waitEvents(eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2701 }
2702 
vkCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)2703 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
2704 {
2705 	TRACE(
2706 	    "(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlags srcStageMask = 0x%x, VkPipelineStageFlags dstStageMask = 0x%x, VkDependencyFlags dependencyFlags = %d, uint32_t memoryBarrierCount = %d, onst VkMemoryBarrier* pMemoryBarriers = %p,"
2707 	    " uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
2708 	    commandBuffer, int(srcStageMask), int(dstStageMask), dependencyFlags, int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
2709 
2710 	vk::Cast(commandBuffer)->pipelineBarrier(srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2711 }
2712 
vkCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags)2713 VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags)
2714 {
2715 	TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d, VkQueryControlFlags flags = %d)",
2716 	      commandBuffer, static_cast<void *>(queryPool), query, int(flags));
2717 
2718 	vk::Cast(commandBuffer)->beginQuery(vk::Cast(queryPool), query, flags);
2719 }
2720 
vkCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query)2721 VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query)
2722 {
2723 	TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d)",
2724 	      commandBuffer, static_cast<void *>(queryPool), int(query));
2725 
2726 	vk::Cast(commandBuffer)->endQuery(vk::Cast(queryPool), query);
2727 }
2728 
vkCmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)2729 VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
2730 {
2731 	TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
2732 	      commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount));
2733 
2734 	vk::Cast(commandBuffer)->resetQueryPool(vk::Cast(queryPool), firstQuery, queryCount);
2735 }
2736 
vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query)2737 VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query)
2738 {
2739 	TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlagBits pipelineStage = %d, VkQueryPool queryPool = %p, uint32_t query = %d)",
2740 	      commandBuffer, int(pipelineStage), static_cast<void *>(queryPool), int(query));
2741 
2742 	vk::Cast(commandBuffer)->writeTimestamp(pipelineStage, vk::Cast(queryPool), query);
2743 }
2744 
vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)2745 VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags)
2746 {
2747 	TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
2748 	      commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), static_cast<void *>(dstBuffer), int(dstOffset), int(stride), int(flags));
2749 
2750 	vk::Cast(commandBuffer)->copyQueryPoolResults(vk::Cast(queryPool), firstQuery, queryCount, vk::Cast(dstBuffer), dstOffset, stride, flags);
2751 }
2752 
vkCmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)2753 VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues)
2754 {
2755 	TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineLayout layout = %p, VkShaderStageFlags stageFlags = %d, uint32_t offset = %d, uint32_t size = %d, const void* pValues = %p)",
2756 	      commandBuffer, static_cast<void *>(layout), stageFlags, offset, size, pValues);
2757 
2758 	vk::Cast(commandBuffer)->pushConstants(vk::Cast(layout), stageFlags, offset, size, pValues);
2759 }
2760 
vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)2761 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents)
2762 {
2763 	VkSubpassBeginInfo subpassBeginInfo = { VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, nullptr, contents };
2764 	vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, &subpassBeginInfo);
2765 }
2766 
vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)2767 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, const VkSubpassBeginInfoKHR *pSubpassBeginInfo)
2768 {
2769 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderPassBeginInfo* pRenderPassBegin = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p)",
2770 	      commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
2771 
2772 	const VkBaseInStructure *renderPassBeginInfo = reinterpret_cast<const VkBaseInStructure *>(pRenderPassBegin->pNext);
2773 	const VkRenderPassAttachmentBeginInfo *attachmentBeginInfo = nullptr;
2774 	while(renderPassBeginInfo)
2775 	{
2776 		switch(renderPassBeginInfo->sType)
2777 		{
2778 			case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
2779 				// This extension controls which render area is used on which physical device,
2780 				// in order to distribute rendering between multiple physical devices.
2781 				// SwiftShader only has a single physical device, so this extension does nothing in this case.
2782 				break;
2783 			case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO:
2784 				attachmentBeginInfo = reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>(renderPassBeginInfo);
2785 				break;
2786 			default:
2787 				LOG_TRAP("pRenderPassBegin->pNext sType = %s", vk::Stringify(renderPassBeginInfo->sType).c_str());
2788 				break;
2789 		}
2790 
2791 		renderPassBeginInfo = renderPassBeginInfo->pNext;
2792 	}
2793 
2794 	vk::Cast(commandBuffer)->beginRenderPass(vk::Cast(pRenderPassBegin->renderPass), vk::Cast(pRenderPassBegin->framebuffer), pRenderPassBegin->renderArea, pRenderPassBegin->clearValueCount, pRenderPassBegin->pClearValues, pSubpassBeginInfo->contents, attachmentBeginInfo);
2795 }
2796 
vkCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)2797 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
2798 {
2799 	TRACE("(VkCommandBuffer commandBuffer = %p, VkSubpassContents contents = %d)",
2800 	      commandBuffer, contents);
2801 
2802 	vk::Cast(commandBuffer)->nextSubpass(contents);
2803 }
2804 
vkCmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)2805 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const VkSubpassEndInfoKHR *pSubpassEndInfo)
2806 {
2807 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)",
2808 	      commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
2809 
2810 	vk::Cast(commandBuffer)->nextSubpass(pSubpassBeginInfo->contents);
2811 }
2812 
vkCmdEndRenderPass(VkCommandBuffer commandBuffer)2813 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer)
2814 {
2815 	TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
2816 
2817 	vk::Cast(commandBuffer)->endRenderPass();
2818 }
2819 
vkCmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)2820 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo)
2821 {
2822 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)", commandBuffer, pSubpassEndInfo);
2823 
2824 	vk::Cast(commandBuffer)->endRenderPass();
2825 }
2826 
vkCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)2827 VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
2828 {
2829 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
2830 	      commandBuffer, commandBufferCount, pCommandBuffers);
2831 
2832 	vk::Cast(commandBuffer)->executeCommands(commandBufferCount, pCommandBuffers);
2833 }
2834 
vkEnumerateInstanceVersion(uint32_t * pApiVersion)2835 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t *pApiVersion)
2836 {
2837 	TRACE("(uint32_t* pApiVersion = %p)", pApiVersion);
2838 	*pApiVersion = vk::API_VERSION;
2839 	return VK_SUCCESS;
2840 }
2841 
vkBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2842 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos)
2843 {
2844 	TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindBufferMemoryInfo* pBindInfos = %p)",
2845 	      device, bindInfoCount, pBindInfos);
2846 
2847 	for(uint32_t i = 0; i < bindInfoCount; i++)
2848 	{
2849 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pBindInfos[i].pNext);
2850 		while(extInfo)
2851 		{
2852 			LOG_TRAP("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
2853 			extInfo = extInfo->pNext;
2854 		}
2855 
2856 		if(!vk::Cast(pBindInfos[i].buffer)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
2857 		{
2858 			UNSUPPORTED("vkBindBufferMemory2 with invalid external memory");
2859 			return VK_ERROR_INVALID_EXTERNAL_HANDLE;
2860 		}
2861 	}
2862 
2863 	for(uint32_t i = 0; i < bindInfoCount; i++)
2864 	{
2865 		vk::Cast(pBindInfos[i].buffer)->bind(vk::Cast(pBindInfos[i].memory), pBindInfos[i].memoryOffset);
2866 	}
2867 
2868 	return VK_SUCCESS;
2869 }
2870 
vkBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2871 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo *pBindInfos)
2872 {
2873 	TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindImageMemoryInfo* pBindInfos = %p)",
2874 	      device, bindInfoCount, pBindInfos);
2875 
2876 	for(uint32_t i = 0; i < bindInfoCount; i++)
2877 	{
2878 		if(!vk::Cast(pBindInfos[i].image)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
2879 		{
2880 			UNSUPPORTED("vkBindImageMemory2 with invalid external memory");
2881 			return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2882 		}
2883 	}
2884 
2885 	for(uint32_t i = 0; i < bindInfoCount; i++)
2886 	{
2887 		vk::DeviceMemory *memory = vk::Cast(pBindInfos[i].memory);
2888 		VkDeviceSize offset = pBindInfos[i].memoryOffset;
2889 
2890 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pBindInfos[i].pNext);
2891 		while(extInfo)
2892 		{
2893 			switch(extInfo->sType)
2894 			{
2895 				case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
2896 					/* Do nothing */
2897 					break;
2898 
2899 #ifndef __ANDROID__
2900 				case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
2901 				{
2902 					auto swapchainInfo = reinterpret_cast<VkBindImageMemorySwapchainInfoKHR const *>(extInfo);
2903 					memory = vk::Cast(swapchainInfo->swapchain)->getImage(swapchainInfo->imageIndex).getImageMemory();
2904 					offset = 0;
2905 				}
2906 				break;
2907 #endif
2908 
2909 				default:
2910 					LOG_TRAP("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
2911 					break;
2912 			}
2913 			extInfo = extInfo->pNext;
2914 		}
2915 
2916 		vk::Cast(pBindInfos[i].image)->bind(memory, offset);
2917 	}
2918 
2919 	return VK_SUCCESS;
2920 }
2921 
vkGetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)2922 VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
2923 {
2924 	TRACE("(VkDevice device = %p, uint32_t heapIndex = %d, uint32_t localDeviceIndex = %d, uint32_t remoteDeviceIndex = %d, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures = %p)",
2925 	      device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
2926 
2927 	ASSERT(localDeviceIndex != remoteDeviceIndex);                 // "localDeviceIndex must not equal remoteDeviceIndex"
2928 	UNSUPPORTED("remoteDeviceIndex: %d", int(remoteDeviceIndex));  // Only one physical device is supported, and since the device indexes can't be equal, this should never be called.
2929 }
2930 
vkCmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)2931 VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
2932 {
2933 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t deviceMask = %d", commandBuffer, deviceMask);
2934 
2935 	vk::Cast(commandBuffer)->setDeviceMask(deviceMask);
2936 }
2937 
vkCmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)2938 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
2939 {
2940 	TRACE("(VkCommandBuffer commandBuffer = %p, baseGroupX = %u, baseGroupY = %u, baseGroupZ = %u, groupCountX = %u, groupCountY = %u, groupCountZ = %u)",
2941 	      commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
2942 
2943 	vk::Cast(commandBuffer)->dispatchBase(baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
2944 }
2945 
vkResetQueryPool(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)2946 VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
2947 {
2948 	TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
2949 	      device, static_cast<void *>(queryPool), firstQuery, queryCount);
2950 	vk::Cast(queryPool)->reset(firstQuery, queryCount);
2951 }
2952 
vkEnumeratePhysicalDeviceGroups(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)2953 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
2954 {
2955 	TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceGroupCount = %p, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties = %p)",
2956 	      instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
2957 
2958 	return vk::Cast(instance)->getPhysicalDeviceGroups(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
2959 }
2960 
vkGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)2961 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
2962 {
2963 	TRACE("(VkDevice device = %p, const VkImageMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
2964 	      device, pInfo, pMemoryRequirements);
2965 
2966 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
2967 	while(extInfo)
2968 	{
2969 		LOG_TRAP("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2970 		extInfo = extInfo->pNext;
2971 	}
2972 
2973 	VkBaseOutStructure *extensionRequirements = reinterpret_cast<VkBaseOutStructure *>(pMemoryRequirements->pNext);
2974 	while(extensionRequirements)
2975 	{
2976 		switch(extensionRequirements->sType)
2977 		{
2978 			case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
2979 			{
2980 				auto requirements = reinterpret_cast<VkMemoryDedicatedRequirements *>(extensionRequirements);
2981 				vk::Cast(device)->getRequirements(requirements);
2982 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
2983 				if(vk::Cast(pInfo->image)->getSupportedExternalMemoryHandleTypes() == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
2984 				{
2985 					requirements->prefersDedicatedAllocation = VK_TRUE;
2986 					requirements->requiresDedicatedAllocation = VK_TRUE;
2987 				}
2988 #endif
2989 			}
2990 			break;
2991 			default:
2992 				LOG_TRAP("pMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
2993 				break;
2994 		}
2995 
2996 		extensionRequirements = extensionRequirements->pNext;
2997 	}
2998 
2999 	vkGetImageMemoryRequirements(device, pInfo->image, &(pMemoryRequirements->memoryRequirements));
3000 }
3001 
vkGetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3002 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
3003 {
3004 	TRACE("(VkDevice device = %p, const VkBufferMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
3005 	      device, pInfo, pMemoryRequirements);
3006 
3007 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
3008 	while(extInfo)
3009 	{
3010 		LOG_TRAP("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3011 		extInfo = extInfo->pNext;
3012 	}
3013 
3014 	VkBaseOutStructure *extensionRequirements = reinterpret_cast<VkBaseOutStructure *>(pMemoryRequirements->pNext);
3015 	while(extensionRequirements)
3016 	{
3017 		switch(extensionRequirements->sType)
3018 		{
3019 			case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
3020 			{
3021 				auto requirements = reinterpret_cast<VkMemoryDedicatedRequirements *>(extensionRequirements);
3022 				vk::Cast(device)->getRequirements(requirements);
3023 			}
3024 			break;
3025 			default:
3026 				LOG_TRAP("pMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3027 				break;
3028 		}
3029 
3030 		extensionRequirements = extensionRequirements->pNext;
3031 	}
3032 
3033 	vkGetBufferMemoryRequirements(device, pInfo->buffer, &(pMemoryRequirements->memoryRequirements));
3034 }
3035 
vkGetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)3036 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
3037 {
3038 	TRACE("(VkDevice device = %p, const VkImageSparseMemoryRequirementsInfo2* pInfo = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements = %p)",
3039 	      device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
3040 
3041 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
3042 	while(extInfo)
3043 	{
3044 		LOG_TRAP("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3045 		extInfo = extInfo->pNext;
3046 	}
3047 
3048 	auto extensionRequirements = reinterpret_cast<VkBaseInStructure const *>(pSparseMemoryRequirements->pNext);
3049 	while(extensionRequirements)
3050 	{
3051 		LOG_TRAP("pSparseMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3052 		extensionRequirements = extensionRequirements->pNext;
3053 	}
3054 
3055 	// The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
3056 	// "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
3057 	*pSparseMemoryRequirementCount = 0;
3058 }
3059 
vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)3060 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 *pFeatures)
3061 {
3062 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures2* pFeatures = %p)", physicalDevice, pFeatures);
3063 
3064 	vk::Cast(physicalDevice)->getFeatures2(pFeatures);
3065 }
3066 
vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)3067 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 *pProperties)
3068 {
3069 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties2* pProperties = %p)", physicalDevice, pProperties);
3070 
3071 	VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pProperties->pNext);
3072 	while(extensionProperties)
3073 	{
3074 		// Casting to a long since some structures, such as
3075 		// VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID and
3076 		// VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT
3077 		// are not enumerated in the official Vulkan header
3078 		switch((long)(extensionProperties->sType))
3079 		{
3080 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
3081 			{
3082 				auto properties = reinterpret_cast<VkPhysicalDeviceIDProperties *>(extensionProperties);
3083 				vk::Cast(physicalDevice)->getProperties(properties);
3084 			}
3085 			break;
3086 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
3087 			{
3088 				auto properties = reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>(extensionProperties);
3089 				vk::Cast(physicalDevice)->getProperties(properties);
3090 			}
3091 			break;
3092 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
3093 			{
3094 				auto properties = reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>(extensionProperties);
3095 				vk::Cast(physicalDevice)->getProperties(properties);
3096 			}
3097 			break;
3098 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
3099 			{
3100 				auto properties = reinterpret_cast<VkPhysicalDevicePointClippingProperties *>(extensionProperties);
3101 				vk::Cast(physicalDevice)->getProperties(properties);
3102 			}
3103 			break;
3104 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
3105 			{
3106 				auto properties = reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>(extensionProperties);
3107 				vk::Cast(physicalDevice)->getProperties(properties);
3108 			}
3109 			break;
3110 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
3111 			{
3112 				auto properties = reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>(extensionProperties);
3113 				vk::Cast(physicalDevice)->getProperties(properties);
3114 			}
3115 			break;
3116 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
3117 				// Explicitly ignored, since VK_EXT_sample_locations is not supported
3118 				ASSERT(!hasDeviceExtension(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME));
3119 				break;
3120 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
3121 			{
3122 				auto properties = reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(extensionProperties);
3123 				vk::Cast(physicalDevice)->getProperties(properties);
3124 			}
3125 			break;
3126 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR:
3127 			{
3128 				auto properties = reinterpret_cast<VkPhysicalDeviceDriverPropertiesKHR *>(extensionProperties);
3129 				vk::Cast(physicalDevice)->getProperties(properties);
3130 			}
3131 			break;
3132 #ifdef __ANDROID__
3133 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID:
3134 			{
3135 				auto properties = reinterpret_cast<VkPhysicalDevicePresentationPropertiesANDROID *>(extensionProperties);
3136 				vk::Cast(physicalDevice)->getProperties(properties);
3137 			}
3138 			break;
3139 #endif
3140 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
3141 			{
3142 				auto properties = reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>(extensionProperties);
3143 				vk::Cast(physicalDevice)->getProperties(properties);
3144 			}
3145 			break;
3146 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT:
3147 			{
3148 				auto properties = reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>(extensionProperties);
3149 				vk::Cast(physicalDevice)->getProperties(properties);
3150 			}
3151 			break;
3152 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR:
3153 			{
3154 				auto properties = reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>(extensionProperties);
3155 				vk::Cast(physicalDevice)->getProperties(properties);
3156 			}
3157 			break;
3158 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
3159 			{
3160 				auto properties = reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>(extensionProperties);
3161 				vk::Cast(physicalDevice)->getProperties(properties);
3162 			}
3163 			break;
3164 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES:
3165 			{
3166 				auto properties = reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>(extensionProperties);
3167 				vk::Cast(physicalDevice)->getProperties(properties);
3168 			}
3169 			break;
3170 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES:
3171 			{
3172 				auto properties = reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>(extensionProperties);
3173 				vk::Cast(physicalDevice)->getProperties(properties);
3174 			}
3175 			break;
3176 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
3177 			{
3178 				auto properties = reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>(extensionProperties);
3179 				vk::Cast(physicalDevice)->getProperties(properties);
3180 			}
3181 			break;
3182 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES:
3183 			{
3184 				auto properties = reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>(extensionProperties);
3185 				vk::Cast(physicalDevice)->getProperties(properties);
3186 			}
3187 			break;
3188 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES:
3189 			{
3190 				auto properties = reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>(extensionProperties);
3191 				vk::Cast(physicalDevice)->getProperties(properties);
3192 			}
3193 			break;
3194 			default:
3195 				// "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
3196 				LOG_TRAP("pProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3197 				break;
3198 		}
3199 
3200 		extensionProperties = extensionProperties->pNext;
3201 	}
3202 
3203 	vkGetPhysicalDeviceProperties(physicalDevice, &(pProperties->properties));
3204 }
3205 
vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)3206 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 *pFormatProperties)
3207 {
3208 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties2* pFormatProperties = %p)",
3209 	      physicalDevice, format, pFormatProperties);
3210 
3211 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pFormatProperties->pNext);
3212 	while(extInfo)
3213 	{
3214 		LOG_TRAP("pFormatProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3215 		extInfo = extInfo->pNext;
3216 	}
3217 
3218 	vkGetPhysicalDeviceFormatProperties(physicalDevice, format, &(pFormatProperties->formatProperties));
3219 }
3220 
checkFormatUsage(VkImageUsageFlags usage,VkFormatFeatureFlags features)3221 static bool checkFormatUsage(VkImageUsageFlags usage, VkFormatFeatureFlags features)
3222 {
3223 	// Check for usage conflict with features
3224 	if((usage & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT))
3225 	{
3226 		return false;
3227 	}
3228 
3229 	if((usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
3230 	{
3231 		return false;
3232 	}
3233 
3234 	if((usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
3235 	{
3236 		return false;
3237 	}
3238 
3239 	if((usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
3240 	{
3241 		return false;
3242 	}
3243 
3244 	if((usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) && !(features & (VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)))
3245 	{
3246 		return false;
3247 	}
3248 
3249 	if((usage & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT))
3250 	{
3251 		return false;
3252 	}
3253 
3254 	if((usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_DST_BIT))
3255 	{
3256 		return false;
3257 	}
3258 
3259 	return true;
3260 }
3261 
vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3262 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VkImageFormatProperties2 *pImageFormatProperties)
3263 {
3264 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo = %p, VkImageFormatProperties2* pImageFormatProperties = %p)",
3265 	      physicalDevice, pImageFormatInfo, pImageFormatProperties);
3266 
3267 	// "If the combination of parameters to vkGetPhysicalDeviceImageFormatProperties is not supported by the implementation
3268 	//  for use in vkCreateImage, then all members of VkImageFormatProperties will be filled with zero."
3269 	memset(&pImageFormatProperties->imageFormatProperties, 0, sizeof(VkImageFormatProperties));
3270 
3271 	const VkBaseInStructure *extensionFormatInfo = reinterpret_cast<const VkBaseInStructure *>(pImageFormatInfo->pNext);
3272 
3273 	const VkExternalMemoryHandleTypeFlagBits *handleType = nullptr;
3274 	VkImageUsageFlags stencilUsage = 0;
3275 	while(extensionFormatInfo)
3276 	{
3277 		switch(extensionFormatInfo->sType)
3278 		{
3279 			case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR:
3280 			{
3281 				// Explicitly ignored, since VK_KHR_image_format_list is not supported
3282 				ASSERT(!hasDeviceExtension(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME));
3283 			}
3284 			break;
3285 			case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
3286 			{
3287 				const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionFormatInfo);
3288 				stencilUsage = stencilUsageInfo->stencilUsage;
3289 			}
3290 			break;
3291 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
3292 			{
3293 				const VkPhysicalDeviceExternalImageFormatInfo *imageFormatInfo = reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>(extensionFormatInfo);
3294 				handleType = &(imageFormatInfo->handleType);
3295 			}
3296 			break;
3297 			case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
3298 			{
3299 				// Explicitly ignored, since VK_EXT_image_drm_format_modifier is not supported
3300 				ASSERT(!hasDeviceExtension(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME));
3301 			}
3302 			break;
3303 			default:
3304 				LOG_TRAP("pImageFormatInfo->pNext sType = %s", vk::Stringify(extensionFormatInfo->sType).c_str());
3305 				break;
3306 		}
3307 
3308 		extensionFormatInfo = extensionFormatInfo->pNext;
3309 	}
3310 
3311 	VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pImageFormatProperties->pNext);
3312 
3313 #ifdef __ANDROID__
3314 	bool hasAHBUsage = false;
3315 #endif
3316 
3317 	while(extensionProperties)
3318 	{
3319 		switch(extensionProperties->sType)
3320 		{
3321 			case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
3322 			{
3323 				auto properties = reinterpret_cast<VkExternalImageFormatProperties *>(extensionProperties);
3324 				vk::Cast(physicalDevice)->getProperties(handleType, properties);
3325 			}
3326 			break;
3327 			case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
3328 			{
3329 				auto properties = reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>(extensionProperties);
3330 				vk::Cast(physicalDevice)->getProperties(properties);
3331 			}
3332 			break;
3333 			case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
3334 			{
3335 				// Explicitly ignored, since VK_AMD_texture_gather_bias_lod is not supported
3336 				ASSERT(!hasDeviceExtension(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME));
3337 			}
3338 			break;
3339 #ifdef __ANDROID__
3340 			case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
3341 			{
3342 				auto properties = reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>(extensionProperties);
3343 				vk::Cast(physicalDevice)->getProperties(pImageFormatInfo, properties);
3344 				hasAHBUsage = true;
3345 			}
3346 			break;
3347 #endif
3348 			default:
3349 				LOG_TRAP("pImageFormatProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3350 				break;
3351 		}
3352 
3353 		extensionProperties = extensionProperties->pNext;
3354 	}
3355 
3356 	VkFormat format = pImageFormatInfo->format;
3357 	VkImageType type = pImageFormatInfo->type;
3358 	VkImageTiling tiling = pImageFormatInfo->tiling;
3359 	VkImageUsageFlags usage = pImageFormatInfo->usage;
3360 	VkImageCreateFlags flags = pImageFormatInfo->flags;
3361 
3362 	VkFormatProperties properties;
3363 	vk::PhysicalDevice::GetFormatProperties(format, &properties);
3364 
3365 	VkFormatFeatureFlags features;
3366 	switch(tiling)
3367 	{
3368 		case VK_IMAGE_TILING_LINEAR:
3369 			features = properties.linearTilingFeatures;
3370 			break;
3371 
3372 		case VK_IMAGE_TILING_OPTIMAL:
3373 			features = properties.optimalTilingFeatures;
3374 			break;
3375 
3376 		default:
3377 			UNSUPPORTED("VkImageTiling %d", int(tiling));
3378 			features = 0;
3379 	}
3380 
3381 	if(features == 0)
3382 	{
3383 		return VK_ERROR_FORMAT_NOT_SUPPORTED;
3384 	}
3385 
3386 	// Reject any usage or separate stencil usage that is not compatible with the specified format.
3387 	if(!checkFormatUsage(usage, features))
3388 	{
3389 		return VK_ERROR_FORMAT_NOT_SUPPORTED;
3390 	}
3391 	// If stencilUsage is 0 then no separate usage was provided and it takes on the same value as usage,
3392 	// which has already been checked. So only check non-zero stencilUsage.
3393 	if(stencilUsage != 0 && !checkFormatUsage(stencilUsage, features))
3394 	{
3395 		return VK_ERROR_FORMAT_NOT_SUPPORTED;
3396 	}
3397 
3398 	auto allRecognizedUsageBits = VK_IMAGE_USAGE_SAMPLED_BIT |
3399 	                              VK_IMAGE_USAGE_STORAGE_BIT |
3400 	                              VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
3401 	                              VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
3402 	                              VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
3403 	                              VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
3404 	                              VK_IMAGE_USAGE_TRANSFER_DST_BIT |
3405 	                              VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
3406 	ASSERT(!(usage & ~(allRecognizedUsageBits)));
3407 
3408 	if(usage & VK_IMAGE_USAGE_SAMPLED_BIT)
3409 	{
3410 		if(tiling == VK_IMAGE_TILING_LINEAR)
3411 		{
3412 			// TODO(b/171299814): Compressed formats and cube maps are not supported for sampling using VK_IMAGE_TILING_LINEAR; otherwise, sampling
3413 			// in linear tiling is always supported as long as it can be sampled when using VK_IMAGE_TILING_OPTIMAL.
3414 			if(!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) ||
3415 			   vk::Format(format).isCompressed() ||
3416 			   (flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT))
3417 			{
3418 				return VK_ERROR_FORMAT_NOT_SUPPORTED;
3419 			}
3420 		}
3421 		else if(!(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT))
3422 		{
3423 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
3424 		}
3425 	}
3426 
3427 	// "Images created with tiling equal to VK_IMAGE_TILING_LINEAR have further restrictions on their limits and capabilities
3428 	//  compared to images created with tiling equal to VK_IMAGE_TILING_OPTIMAL."
3429 	if(tiling == VK_IMAGE_TILING_LINEAR)
3430 	{
3431 		if(type != VK_IMAGE_TYPE_2D)
3432 		{
3433 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
3434 		}
3435 
3436 		if(vk::Format(format).isDepth() || vk::Format(format).isStencil())
3437 		{
3438 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
3439 		}
3440 	}
3441 
3442 	// "Images created with a format from one of those listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views
3443 	//  have further restrictions on their limits and capabilities compared to images created with other formats."
3444 	if(vk::Format(format).isYcbcrFormat())
3445 	{
3446 		if(type != VK_IMAGE_TYPE_2D)
3447 		{
3448 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
3449 		}
3450 	}
3451 
3452 	vk::Cast(physicalDevice)->getImageFormatProperties(format, type, tiling, usage, flags, &pImageFormatProperties->imageFormatProperties);
3453 
3454 #ifdef __ANDROID__
3455 	if(hasAHBUsage)
3456 	{
3457 		// AHardwareBuffer_lock may only be called with a single layer.
3458 		pImageFormatProperties->imageFormatProperties.maxArrayLayers = 1;
3459 		pImageFormatProperties->imageFormatProperties.maxMipLevels = 1;
3460 	}
3461 #endif
3462 
3463 	return VK_SUCCESS;
3464 }
3465 
vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)3466 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties)
3467 {
3468 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties2* pQueueFamilyProperties = %p)",
3469 	      physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3470 
3471 	if(pQueueFamilyProperties)
3472 	{
3473 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pQueueFamilyProperties->pNext);
3474 		while(extInfo)
3475 		{
3476 			LOG_TRAP("pQueueFamilyProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3477 			extInfo = extInfo->pNext;
3478 		}
3479 	}
3480 
3481 	if(!pQueueFamilyProperties)
3482 	{
3483 		*pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
3484 	}
3485 	else
3486 	{
3487 		vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
3488 	}
3489 }
3490 
vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)3491 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
3492 {
3493 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties2* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
3494 
3495 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pMemoryProperties->pNext);
3496 	while(extInfo)
3497 	{
3498 		LOG_TRAP("pMemoryProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3499 		extInfo = extInfo->pNext;
3500 	}
3501 
3502 	vkGetPhysicalDeviceMemoryProperties(physicalDevice, &(pMemoryProperties->memoryProperties));
3503 }
3504 
vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)3505 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VkSparseImageFormatProperties2 *pProperties)
3506 {
3507 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo = %p, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties2* pProperties = %p)",
3508 	      physicalDevice, pFormatInfo, pPropertyCount, pProperties);
3509 
3510 	if(pProperties)
3511 	{
3512 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pProperties->pNext);
3513 		while(extInfo)
3514 		{
3515 			LOG_TRAP("pProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3516 			extInfo = extInfo->pNext;
3517 		}
3518 	}
3519 
3520 	// We do not support sparse images.
3521 	*pPropertyCount = 0;
3522 }
3523 
vkTrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)3524 VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags)
3525 {
3526 	TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolTrimFlags flags = %d)",
3527 	      device, static_cast<void *>(commandPool), flags);
3528 
3529 	if(flags != 0)
3530 	{
3531 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
3532 		UNSUPPORTED("flags %d", int(flags));
3533 	}
3534 
3535 	vk::Cast(commandPool)->trim(flags);
3536 }
3537 
vkGetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)3538 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue)
3539 {
3540 	TRACE("(VkDevice device = %p, const VkDeviceQueueInfo2* pQueueInfo = %p, VkQueue* pQueue = %p)",
3541 	      device, pQueueInfo, pQueue);
3542 
3543 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pQueueInfo->pNext);
3544 	while(extInfo)
3545 	{
3546 		LOG_TRAP("pQueueInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3547 		extInfo = extInfo->pNext;
3548 	}
3549 
3550 	if(pQueueInfo->flags != 0)
3551 	{
3552 		// The only flag that can be set here is VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
3553 		// According to the Vulkan 1.2.132 spec, 4.3.1. Queue Family Properties:
3554 		// "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT specifies that the device queue is a
3555 		//  protected-capable queue. If the protected memory feature is not enabled,
3556 		//  the VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT bit of flags must not be set."
3557 		UNSUPPORTED("VkPhysicalDeviceVulkan11Features::protectedMemory");
3558 	}
3559 
3560 	vkGetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
3561 }
3562 
vkCreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)3563 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSamplerYcbcrConversion *pYcbcrConversion)
3564 {
3565 	TRACE("(VkDevice device = %p, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSamplerYcbcrConversion* pYcbcrConversion = %p)",
3566 	      device, pCreateInfo, pAllocator, pYcbcrConversion);
3567 
3568 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
3569 	while(extInfo)
3570 	{
3571 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3572 		extInfo = extInfo->pNext;
3573 	}
3574 
3575 	return vk::SamplerYcbcrConversion::Create(pAllocator, pCreateInfo, pYcbcrConversion);
3576 }
3577 
vkDestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)3578 VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks *pAllocator)
3579 {
3580 	TRACE("(VkDevice device = %p, VkSamplerYcbcrConversion ycbcrConversion = %p, const VkAllocationCallbacks* pAllocator = %p)",
3581 	      device, static_cast<void *>(ycbcrConversion), pAllocator);
3582 
3583 	vk::destroy(ycbcrConversion, pAllocator);
3584 }
3585 
vkCreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)3586 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
3587 {
3588 	TRACE("(VkDevice device = %p, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate = %p)",
3589 	      device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
3590 
3591 	if(pCreateInfo->flags != 0)
3592 	{
3593 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
3594 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
3595 	}
3596 
3597 	if(pCreateInfo->templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
3598 	{
3599 		UNSUPPORTED("pCreateInfo->templateType %d", int(pCreateInfo->templateType));
3600 	}
3601 
3602 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
3603 	while(extInfo)
3604 	{
3605 		LOG_TRAP("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3606 		extInfo = extInfo->pNext;
3607 	}
3608 
3609 	return vk::DescriptorUpdateTemplate::Create(pAllocator, pCreateInfo, pDescriptorUpdateTemplate);
3610 }
3611 
vkDestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)3612 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks *pAllocator)
3613 {
3614 	TRACE("(VkDevice device = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const VkAllocationCallbacks* pAllocator = %p)",
3615 	      device, static_cast<void *>(descriptorUpdateTemplate), pAllocator);
3616 
3617 	vk::destroy(descriptorUpdateTemplate, pAllocator);
3618 }
3619 
vkUpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)3620 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData)
3621 {
3622 	TRACE("(VkDevice device = %p, VkDescriptorSet descriptorSet = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const void* pData = %p)",
3623 	      device, static_cast<void *>(descriptorSet), static_cast<void *>(descriptorUpdateTemplate), pData);
3624 
3625 	vk::Cast(descriptorUpdateTemplate)->updateDescriptorSet(vk::Cast(device), descriptorSet, pData);
3626 }
3627 
vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)3628 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VkExternalBufferProperties *pExternalBufferProperties)
3629 {
3630 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo = %p, VkExternalBufferProperties* pExternalBufferProperties = %p)",
3631 	      physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
3632 
3633 	vk::Cast(physicalDevice)->getProperties(pExternalBufferInfo, pExternalBufferProperties);
3634 }
3635 
vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)3636 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VkExternalFenceProperties *pExternalFenceProperties)
3637 {
3638 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo = %p, VkExternalFenceProperties* pExternalFenceProperties = %p)",
3639 	      physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
3640 
3641 	vk::Cast(physicalDevice)->getProperties(pExternalFenceInfo, pExternalFenceProperties);
3642 }
3643 
vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)3644 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
3645 {
3646 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo = %p, VkExternalSemaphoreProperties* pExternalSemaphoreProperties = %p)",
3647 	      physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
3648 
3649 	vk::Cast(physicalDevice)->getProperties(pExternalSemaphoreInfo, pExternalSemaphoreProperties);
3650 }
3651 
vkGetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)3652 VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, VkDescriptorSetLayoutSupport *pSupport)
3653 {
3654 	TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, VkDescriptorSetLayoutSupport* pSupport = %p)",
3655 	      device, pCreateInfo, pSupport);
3656 
3657 	vk::Cast(device)->getDescriptorSetLayoutSupport(pCreateInfo, pSupport);
3658 }
3659 
vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)3660 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern)
3661 {
3662 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t lineStippleFactor = %u, uint16_t lineStipplePattern = %u)",
3663 	      commandBuffer, lineStippleFactor, lineStipplePattern);
3664 
3665 	UNSUPPORTED("VkPhysicalDeviceLineRasterizationFeaturesEXT::stippled*Lines");
3666 }
3667 
vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)3668 VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
3669 {
3670 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
3671 	      commandBuffer, pLabelInfo);
3672 
3673 	vk::Cast(commandBuffer)->beginDebugUtilsLabel(pLabelInfo);
3674 }
3675 
vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)3676 VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)
3677 {
3678 	TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
3679 
3680 	vk::Cast(commandBuffer)->endDebugUtilsLabel();
3681 }
3682 
vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)3683 VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
3684 {
3685 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
3686 	      commandBuffer, pLabelInfo);
3687 
3688 	vk::Cast(commandBuffer)->insertDebugUtilsLabel(pLabelInfo);
3689 }
3690 
vkCreateDebugUtilsMessengerEXT(VkInstance instance,const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDebugUtilsMessengerEXT * pMessenger)3691 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT *pMessenger)
3692 {
3693 	TRACE("(VkInstance instance = %p, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDebugUtilsMessengerEXT* pMessenger = %p)",
3694 	      instance, pCreateInfo, pAllocator, pMessenger);
3695 
3696 	if(pCreateInfo->flags != 0)
3697 	{
3698 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
3699 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
3700 	}
3701 
3702 	return vk::DebugUtilsMessenger::Create(pAllocator, pCreateInfo, pMessenger);
3703 }
3704 
vkDestroyDebugUtilsMessengerEXT(VkInstance instance,VkDebugUtilsMessengerEXT messenger,const VkAllocationCallbacks * pAllocator)3705 VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks *pAllocator)
3706 {
3707 	TRACE("(VkInstance instance = %p, VkDebugUtilsMessengerEXT messenger = %p, const VkAllocationCallbacks* pAllocator = %p)",
3708 	      instance, static_cast<void *>(messenger), pAllocator);
3709 
3710 	vk::destroy(messenger, pAllocator);
3711 }
3712 
vkQueueBeginDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)3713 VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
3714 {
3715 	TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
3716 	      queue, pLabelInfo);
3717 
3718 	vk::Cast(queue)->beginDebugUtilsLabel(pLabelInfo);
3719 }
3720 
vkQueueEndDebugUtilsLabelEXT(VkQueue queue)3721 VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT(VkQueue queue)
3722 {
3723 	TRACE("(VkQueue queue = %p)", queue);
3724 
3725 	vk::Cast(queue)->endDebugUtilsLabel();
3726 }
3727 
vkQueueInsertDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)3728 VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
3729 {
3730 	TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
3731 	      queue, pLabelInfo);
3732 
3733 	vk::Cast(queue)->insertDebugUtilsLabel(pLabelInfo);
3734 }
3735 
vkSetDebugUtilsObjectNameEXT(VkDevice device,const VkDebugUtilsObjectNameInfoEXT * pNameInfo)3736 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo)
3737 {
3738 	TRACE("(VkDevice device = %p, const VkDebugUtilsObjectNameInfoEXT* pNameInfo = %p)",
3739 	      device, pNameInfo);
3740 
3741 	return vk::Cast(device)->setDebugUtilsObjectName(pNameInfo);
3742 }
3743 
vkSetDebugUtilsObjectTagEXT(VkDevice device,const VkDebugUtilsObjectTagInfoEXT * pTagInfo)3744 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo)
3745 {
3746 	TRACE("(VkDevice device = %p, const VkDebugUtilsObjectTagInfoEXT* pTagInfo = %p)",
3747 	      device, pTagInfo);
3748 
3749 	return vk::Cast(device)->setDebugUtilsObjectTag(pTagInfo);
3750 }
3751 
vkSubmitDebugUtilsMessageEXT(VkInstance instance,VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData)3752 VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData)
3753 {
3754 	TRACE("(VkInstance instance = %p, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity = %d, VkDebugUtilsMessageTypeFlagsEXT messageTypes = %d, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData = %p)",
3755 	      instance, messageSeverity, messageTypes, pCallbackData);
3756 
3757 	vk::Cast(instance)->submitDebugUtilsMessage(messageSeverity, messageTypes, pCallbackData);
3758 }
3759 
3760 #ifdef VK_USE_PLATFORM_XCB_KHR
vkCreateXcbSurfaceKHR(VkInstance instance,const VkXcbSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3761 VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3762 {
3763 	TRACE("(VkInstance instance = %p, VkXcbSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3764 	      instance, pCreateInfo, pAllocator, pSurface);
3765 
3766 	// VUID-VkXcbSurfaceCreateInfoKHR-connection-01310 : connection must point to a valid X11 xcb_connection_t
3767 	ASSERT(pCreateInfo->connection);
3768 
3769 	return vk::XcbSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3770 }
3771 
vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id)3772 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t *connection, xcb_visualid_t visual_id)
3773 {
3774 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, xcb_connection_t* connection = %p, xcb_visualid_t visual_id = %d)",
3775 	      physicalDevice, int(queueFamilyIndex), connection, int(visual_id));
3776 
3777 	return VK_TRUE;
3778 }
3779 #endif
3780 
3781 #ifdef VK_USE_PLATFORM_XLIB_KHR
vkCreateXlibSurfaceKHR(VkInstance instance,const VkXlibSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3782 VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3783 {
3784 	TRACE("(VkInstance instance = %p, VkXlibSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3785 	      instance, pCreateInfo, pAllocator, pSurface);
3786 
3787 	// VUID-VkXlibSurfaceCreateInfoKHR-dpy-01313: dpy must point to a valid Xlib Display
3788 	ASSERT(pCreateInfo->dpy);
3789 
3790 	return vk::XlibSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3791 }
3792 
vkGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,Display * dpy,VisualID visualID)3793 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display *dpy, VisualID visualID)
3794 {
3795 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, Display* dpy = %p, VisualID visualID = %lu)",
3796 	      physicalDevice, int(queueFamilyIndex), dpy, visualID);
3797 
3798 	return VK_TRUE;
3799 }
3800 #endif
3801 
3802 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
vkCreateWaylandSurfaceKHR(VkInstance instance,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3803 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3804 {
3805 	TRACE("(VkInstance instance = %p, VkWaylandSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3806 	      instance, pCreateInfo, pAllocator, pSurface);
3807 
3808 	return vk::WaylandSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3809 }
3810 
vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * display)3811 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display *display)
3812 {
3813 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, struct wl_display* display = %p)",
3814 	      physicalDevice, int(queueFamilyIndex), display);
3815 
3816 	return VK_TRUE;
3817 }
3818 #endif
3819 
3820 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
vkCreateDirectFBSurfaceEXT(VkInstance instance,const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3821 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3822 {
3823 	TRACE("(VkInstance instance = %p, VkDirectFBSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3824 	      instance, pCreateInfo, pAllocator, pSurface);
3825 
3826 	return vk::DirectFBSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
3827 }
3828 
vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,IDirectFB * dfb)3829 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB *dfb)
3830 {
3831 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, IDirectFB* dfb = %p)",
3832 	      physicalDevice, int(queueFamilyIndex), dfb);
3833 
3834 	return VK_TRUE;
3835 }
3836 #endif
3837 
3838 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)3839 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode)
3840 {
3841 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, VkDisplayModeCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkDisplayModeKHR* pModei = %p)",
3842 	      physicalDevice, static_cast<void *>(display), pCreateInfo, pAllocator, pMode);
3843 
3844 	return VK_SUCCESS;
3845 }
3846 
vkCreateDisplayPlaneSurfaceKHR(VkInstance instance,const VkDisplaySurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3847 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3848 {
3849 	TRACE("(VkInstance instance = %p, VkDisplaySurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3850 	      instance, pCreateInfo, pAllocator, pSurface);
3851 
3852 	return vk::DisplaySurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3853 }
3854 
vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,uint32_t * pPropertyCount,VkDisplayModePropertiesKHR * pProperties)3855 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties)
3856 {
3857 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, uint32_t* pPropertyCount = %p, VkDisplayModePropertiesKHR* pProperties = %p)",
3858 	      physicalDevice, static_cast<void *>(display), pPropertyCount, pProperties);
3859 
3860 	return vk::DisplaySurfaceKHR::GetDisplayModeProperties(pPropertyCount, pProperties);
3861 }
3862 
vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkDisplayModeKHR mode,uint32_t planeIndex,VkDisplayPlaneCapabilitiesKHR * pCapabilities)3863 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities)
3864 {
3865 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayModeKHR mode = %p, uint32_t planeIndex = %d, VkDisplayPlaneCapabilitiesKHR* pCapabilities = %p)",
3866 	      physicalDevice, static_cast<void *>(mode), planeIndex, pCapabilities);
3867 
3868 	return vk::DisplaySurfaceKHR::GetDisplayPlaneCapabilities(pCapabilities);
3869 }
3870 
vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice,uint32_t planeIndex,uint32_t * pDisplayCount,VkDisplayKHR * pDisplays)3871 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount, VkDisplayKHR *pDisplays)
3872 {
3873 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t planeIndex = %d, uint32_t* pDisplayCount = %p, VkDisplayKHR* pDisplays = %p)",
3874 	      physicalDevice, planeIndex, pDisplayCount, pDisplays);
3875 
3876 	return vk::DisplaySurfaceKHR::GetDisplayPlaneSupportedDisplays(pDisplayCount, pDisplays);
3877 }
3878 
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPlanePropertiesKHR * pProperties)3879 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPlanePropertiesKHR *pProperties)
3880 {
3881 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPlanePropertiesKHR* pProperties = %p)",
3882 	      physicalDevice, pPropertyCount, pProperties);
3883 
3884 	return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayPlaneProperties(pPropertyCount, pProperties);
3885 }
3886 
vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPropertiesKHR * pProperties)3887 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPropertiesKHR *pProperties)
3888 {
3889 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPropertiesKHR* pProperties = %p)",
3890 	      physicalDevice, pPropertyCount, pProperties);
3891 
3892 	return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayProperties(pPropertyCount, pProperties);
3893 }
3894 #endif
3895 
3896 #ifdef VK_USE_PLATFORM_MACOS_MVK
vkCreateMacOSSurfaceMVK(VkInstance instance,const VkMacOSSurfaceCreateInfoMVK * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3897 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3898 {
3899 	TRACE("(VkInstance instance = %p, VkMacOSSurfaceCreateInfoMVK* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3900 	      instance, pCreateInfo, pAllocator, pSurface);
3901 
3902 	return vk::MacOSSurfaceMVK::Create(pAllocator, pCreateInfo, pSurface);
3903 }
3904 #endif
3905 
3906 #ifdef VK_USE_PLATFORM_METAL_EXT
vkCreateMetalSurfaceEXT(VkInstance instance,const VkMetalSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3907 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3908 {
3909 	TRACE("(VkInstance instance = %p, VkMetalSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3910 	      instance, pCreateInfo, pAllocator, pSurface);
3911 
3912 	return vk::MetalSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
3913 }
3914 #endif
3915 
3916 #ifdef VK_USE_PLATFORM_WIN32_KHR
vkCreateWin32SurfaceKHR(VkInstance instance,const VkWin32SurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3917 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3918 {
3919 	TRACE("(VkInstance instance = %p, VkWin32SurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3920 	      instance, pCreateInfo, pAllocator, pSurface);
3921 
3922 	return vk::Win32SurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3923 }
3924 
vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex)3925 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex)
3926 {
3927 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d)",
3928 	      physicalDevice, queueFamilyIndex);
3929 	return VK_TRUE;
3930 }
3931 #endif
3932 
vkCreateHeadlessSurfaceEXT(VkInstance instance,const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3933 VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3934 {
3935 	TRACE("(VkInstance instance = %p, VkHeadlessSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3936 	      instance, pCreateInfo, pAllocator, pSurface);
3937 
3938 	return vk::HeadlessSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3939 }
3940 
3941 #ifndef __ANDROID__
vkDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)3942 VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator)
3943 {
3944 	TRACE("(VkInstance instance = %p, VkSurfaceKHR surface = %p, const VkAllocationCallbacks* pAllocator = %p)",
3945 	      instance, static_cast<void *>(surface), pAllocator);
3946 
3947 	vk::destroy(surface, pAllocator);
3948 }
3949 
vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported)3950 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32 *pSupported)
3951 {
3952 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, VkSurface surface = %p, VKBool32* pSupported = %p)",
3953 	      physicalDevice, int(queueFamilyIndex), static_cast<void *>(surface), pSupported);
3954 
3955 	*pSupported = VK_TRUE;
3956 	return VK_SUCCESS;
3957 }
3958 
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * pSurfaceCapabilities)3959 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities)
3960 {
3961 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities = %p)",
3962 	      physicalDevice, static_cast<void *>(surface), pSurfaceCapabilities);
3963 
3964 	return vk::Cast(surface)->getSurfaceCapabilities(pSurfaceCapabilities);
3965 }
3966 
vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)3967 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pSurfaceFormatCount, VkSurfaceFormatKHR *pSurfaceFormats)
3968 {
3969 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p. uint32_t* pSurfaceFormatCount = %p, VkSurfaceFormatKHR* pSurfaceFormats = %p)",
3970 	      physicalDevice, static_cast<void *>(surface), pSurfaceFormatCount, pSurfaceFormats);
3971 
3972 	if(!pSurfaceFormats)
3973 	{
3974 		*pSurfaceFormatCount = vk::Cast(surface)->getSurfaceFormatsCount();
3975 		return VK_SUCCESS;
3976 	}
3977 
3978 	return vk::Cast(surface)->getSurfaceFormats(pSurfaceFormatCount, pSurfaceFormats);
3979 }
3980 
vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)3981 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pPresentModeCount, VkPresentModeKHR *pPresentModes)
3982 {
3983 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p uint32_t* pPresentModeCount = %p, VkPresentModeKHR* pPresentModes = %p)",
3984 	      physicalDevice, static_cast<void *>(surface), pPresentModeCount, pPresentModes);
3985 
3986 	if(!pPresentModes)
3987 	{
3988 		*pPresentModeCount = vk::Cast(surface)->getPresentModeCount();
3989 		return VK_SUCCESS;
3990 	}
3991 
3992 	return vk::Cast(surface)->getPresentModes(pPresentModeCount, pPresentModes);
3993 }
3994 
vkCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain)3995 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain)
3996 {
3997 	TRACE("(VkDevice device = %p, const VkSwapchainCreateInfoKHR* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSwapchainKHR* pSwapchain = %p)",
3998 	      device, pCreateInfo, pAllocator, pSwapchain);
3999 
4000 	if(pCreateInfo->oldSwapchain)
4001 	{
4002 		vk::Cast(pCreateInfo->oldSwapchain)->retire();
4003 	}
4004 
4005 	if(vk::Cast(pCreateInfo->surface)->hasAssociatedSwapchain())
4006 	{
4007 		return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
4008 	}
4009 
4010 	VkResult status = vk::SwapchainKHR::Create(pAllocator, pCreateInfo, pSwapchain);
4011 
4012 	if(status != VK_SUCCESS)
4013 	{
4014 		return status;
4015 	}
4016 
4017 	auto swapchain = vk::Cast(*pSwapchain);
4018 	status = swapchain->createImages(device, pCreateInfo);
4019 
4020 	if(status != VK_SUCCESS)
4021 	{
4022 		vk::destroy(*pSwapchain, pAllocator);
4023 		return status;
4024 	}
4025 
4026 	vk::Cast(pCreateInfo->surface)->associateSwapchain(swapchain);
4027 
4028 	return VK_SUCCESS;
4029 }
4030 
vkDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)4031 VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator)
4032 {
4033 	TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, const VkAllocationCallbacks* pAllocator = %p)",
4034 	      device, static_cast<void *>(swapchain), pAllocator);
4035 
4036 	vk::destroy(swapchain, pAllocator);
4037 }
4038 
vkGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages)4039 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages)
4040 {
4041 	TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint32_t* pSwapchainImageCount = %p, VkImage* pSwapchainImages = %p)",
4042 	      device, static_cast<void *>(swapchain), pSwapchainImageCount, pSwapchainImages);
4043 
4044 	if(!pSwapchainImages)
4045 	{
4046 		*pSwapchainImageCount = vk::Cast(swapchain)->getImageCount();
4047 		return VK_SUCCESS;
4048 	}
4049 
4050 	return vk::Cast(swapchain)->getImages(pSwapchainImageCount, pSwapchainImages);
4051 }
4052 
vkAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)4053 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex)
4054 {
4055 	TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint64_t timeout = %" PRIu64 ", VkSemaphore semaphore = %p, VkFence fence = %p, uint32_t* pImageIndex = %p)",
4056 	      device, static_cast<void *>(swapchain), timeout, static_cast<void *>(semaphore), static_cast<void *>(fence), pImageIndex);
4057 
4058 	return vk::Cast(swapchain)->getNextImage(timeout, vk::DynamicCast<vk::BinarySemaphore>(semaphore), vk::Cast(fence), pImageIndex);
4059 }
4060 
vkQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo)4061 VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo)
4062 {
4063 	TRACE("(VkQueue queue = %p, const VkPresentInfoKHR* pPresentInfo = %p)",
4064 	      queue, pPresentInfo);
4065 
4066 	return vk::Cast(queue)->present(pPresentInfo);
4067 }
4068 
vkAcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)4069 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo, uint32_t *pImageIndex)
4070 {
4071 	TRACE("(VkDevice device = %p, const VkAcquireNextImageInfoKHR *pAcquireInfo = %p, uint32_t *pImageIndex = %p",
4072 	      device, pAcquireInfo, pImageIndex);
4073 
4074 	return vk::Cast(pAcquireInfo->swapchain)->getNextImage(pAcquireInfo->timeout, vk::DynamicCast<vk::BinarySemaphore>(pAcquireInfo->semaphore), vk::Cast(pAcquireInfo->fence), pImageIndex);
4075 }
4076 
vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device,VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities)4077 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities)
4078 {
4079 	TRACE("(VkDevice device = %p, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities = %p)",
4080 	      device, pDeviceGroupPresentCapabilities);
4081 
4082 	for(int i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; i++)
4083 	{
4084 		// The only real physical device in the presentation group is device 0,
4085 		// and it can present to itself.
4086 		pDeviceGroupPresentCapabilities->presentMask[i] = (i == 0) ? 1 : 0;
4087 	}
4088 
4089 	pDeviceGroupPresentCapabilities->modes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4090 
4091 	return VK_SUCCESS;
4092 }
4093 
vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device,VkSurfaceKHR surface,VkDeviceGroupPresentModeFlagsKHR * pModes)4094 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR *pModes)
4095 {
4096 	TRACE("(VkDevice device = %p, VkSurfaceKHR surface = %p, VkDeviceGroupPresentModeFlagsKHR *pModes = %p)",
4097 	      device, static_cast<void *>(surface), pModes);
4098 
4099 	*pModes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4100 	return VK_SUCCESS;
4101 }
4102 
vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pRectCount,VkRect2D * pRects)4103 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pRectCount, VkRect2D *pRects)
4104 {
4105 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, uint32_t* pRectCount = %p, VkRect2D* pRects = %p)",
4106 	      physicalDevice, static_cast<void *>(surface), pRectCount, pRects);
4107 
4108 	return vk::Cast(surface)->getPresentRectangles(pRectCount, pRects);
4109 }
4110 
4111 #endif  // ! __ANDROID__
4112 
4113 #ifdef __ANDROID__
4114 
vkGetSwapchainGrallocUsage2ANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)4115 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsage2ANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainUsage, uint64_t *grallocConsumerUsage, uint64_t *grallocProducerUsage)
4116 {
4117 	TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, VkSwapchainImageUsageFlagsANDROID swapchainUsage = %d, uint64_t* grallocConsumerUsage = %p, uin64_t* grallocProducerUsage = %p)",
4118 	      device, format, imageUsage, swapchainUsage, grallocConsumerUsage, grallocProducerUsage);
4119 
4120 	*grallocConsumerUsage = 0;
4121 	*grallocProducerUsage = GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN;
4122 
4123 	return VK_SUCCESS;
4124 }
4125 
vkGetSwapchainGrallocUsageANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)4126 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsageANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, int *grallocUsage)
4127 {
4128 	TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, int* grallocUsage = %p)",
4129 	      device, format, imageUsage, grallocUsage);
4130 
4131 	*grallocUsage = GRALLOC_USAGE_SW_WRITE_OFTEN;
4132 
4133 	return VK_SUCCESS;
4134 }
4135 
vkAcquireImageANDROID(VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)4136 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireImageANDROID(VkDevice device, VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence)
4137 {
4138 	TRACE("(VkDevice device = %p, VkImage image = %p, int nativeFenceFd = %d, VkSemaphore semaphore = %p, VkFence fence = %p)",
4139 	      device, static_cast<void *>(image), nativeFenceFd, static_cast<void *>(semaphore), static_cast<void *>(fence));
4140 
4141 	if(nativeFenceFd >= 0)
4142 	{
4143 		sync_wait(nativeFenceFd, -1);
4144 		close(nativeFenceFd);
4145 	}
4146 
4147 	if(fence != VK_NULL_HANDLE)
4148 	{
4149 		vk::Cast(fence)->complete();
4150 	}
4151 
4152 	if(semaphore != VK_NULL_HANDLE)
4153 	{
4154 		vk::DynamicCast<vk::BinarySemaphore>(semaphore)->signal();
4155 	}
4156 
4157 	return VK_SUCCESS;
4158 }
4159 
vkQueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)4160 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSignalReleaseImageANDROID(VkQueue queue, uint32_t waitSemaphoreCount, const VkSemaphore *pWaitSemaphores, VkImage image, int *pNativeFenceFd)
4161 {
4162 	TRACE("(VkQueue queue = %p, uint32_t waitSemaphoreCount = %d, const VkSemaphore* pWaitSemaphores = %p, VkImage image = %p, int* pNativeFenceFd = %p)",
4163 	      queue, waitSemaphoreCount, pWaitSemaphores, static_cast<void *>(image), pNativeFenceFd);
4164 
4165 	// This is a hack to deal with screen tearing for now.
4166 	// Need to correctly implement threading using VkSemaphore
4167 	// to get rid of it. b/132458423
4168 	vkQueueWaitIdle(queue);
4169 
4170 	*pNativeFenceFd = -1;
4171 
4172 	return vk::Cast(image)->prepareForExternalUseANDROID();
4173 }
4174 #endif  // __ANDROID__
4175 }
4176