• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //    http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "VkBuffer.hpp"
16 #include "VkBufferView.hpp"
17 #include "VkCommandBuffer.hpp"
18 #include "VkCommandPool.hpp"
19 #include "VkConfig.hpp"
20 #include "VkDebugUtilsMessenger.hpp"
21 #include "VkDescriptorPool.hpp"
22 #include "VkDescriptorSetLayout.hpp"
23 #include "VkDescriptorUpdateTemplate.hpp"
24 #include "VkDestroy.hpp"
25 #include "VkDevice.hpp"
26 #include "VkDeviceMemory.hpp"
27 #include "VkEvent.hpp"
28 #include "VkFence.hpp"
29 #include "VkFramebuffer.hpp"
30 #include "VkGetProcAddress.hpp"
31 #include "VkImage.hpp"
32 #include "VkImageView.hpp"
33 #include "VkInstance.hpp"
34 #include "VkPhysicalDevice.hpp"
35 #include "VkPipeline.hpp"
36 #include "VkPipelineCache.hpp"
37 #include "VkPipelineLayout.hpp"
38 #include "VkQueryPool.hpp"
39 #include "VkQueue.hpp"
40 #include "VkRenderPass.hpp"
41 #include "VkSampler.hpp"
42 #include "VkSemaphore.hpp"
43 #include "VkShaderModule.hpp"
44 #include "VkStringify.hpp"
45 #include "VkStructConversion.hpp"
46 #include "VkTimelineSemaphore.hpp"
47 
48 #include "Reactor/Nucleus.hpp"
49 #include "System/CPUID.hpp"
50 #include "System/Debug.hpp"
51 #include "WSI/HeadlessSurfaceKHR.hpp"
52 #include "WSI/VkSwapchainKHR.hpp"
53 
54 #if defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_MACOS_MVK)
55 #	include "WSI/MetalSurface.hpp"
56 #endif
57 
58 #ifdef VK_USE_PLATFORM_XCB_KHR
59 #	include "WSI/XcbSurfaceKHR.hpp"
60 #endif
61 
62 #ifdef VK_USE_PLATFORM_XLIB_KHR
63 #	include "WSI/XlibSurfaceKHR.hpp"
64 #endif
65 
66 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
67 #	include "WSI/WaylandSurfaceKHR.hpp"
68 #endif
69 
70 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
71 #	include "WSI/DirectFBSurfaceEXT.hpp"
72 #endif
73 
74 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
75 #	include "WSI/DisplaySurfaceKHR.hpp"
76 #endif
77 
78 #ifdef VK_USE_PLATFORM_WIN32_KHR
79 #	include "WSI/Win32SurfaceKHR.hpp"
80 #endif
81 
82 #include "marl/mutex.h"
83 #include "marl/scheduler.h"
84 #include "marl/thread.h"
85 #include "marl/tsa.h"
86 
87 #ifdef __ANDROID__
88 #	include "commit.h"
89 #	include "System/GrallocAndroid.hpp"
90 #	include <android/log.h>
91 #	include <hardware/gralloc1.h>
92 #	include <sync/sync.h>
93 #	ifdef SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
94 #		include "VkDeviceMemoryExternalAndroid.hpp"
95 #	endif
96 #endif
97 
98 #include <algorithm>
99 #include <cinttypes>
100 #include <cstring>
101 #include <functional>
102 #include <map>
103 #include <string>
104 
105 namespace {
106 
107 // Enable commit_id.py and #include commit.h for other platforms.
108 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
logBuildVersionInformation()109 void logBuildVersionInformation()
110 {
111 	// TODO(b/144093703): Don't call __android_log_print() directly
112 	__android_log_print(ANDROID_LOG_INFO, "SwiftShader", "SwiftShader Version: %s", SWIFTSHADER_VERSION_STRING);
113 }
114 #endif  // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
115 
116 // setReactorDefaultConfig() sets the default configuration for Vulkan's use of
117 // Reactor.
setReactorDefaultConfig()118 void setReactorDefaultConfig()
119 {
120 	auto cfg = rr::Config::Edit()
121 	               .set(rr::Optimization::Level::Default)
122 	               .clearOptimizationPasses()
123 	               .add(rr::Optimization::Pass::ScalarReplAggregates)
124 	               .add(rr::Optimization::Pass::SCCP)
125 	               .add(rr::Optimization::Pass::CFGSimplification)
126 	               .add(rr::Optimization::Pass::EarlyCSEPass)
127 	               .add(rr::Optimization::Pass::CFGSimplification)
128 	               .add(rr::Optimization::Pass::InstructionCombining);
129 
130 	rr::Nucleus::adjustDefaultConfig(cfg);
131 }
132 
getOrCreateScheduler()133 std::shared_ptr<marl::Scheduler> getOrCreateScheduler()
134 {
135 	struct Scheduler
136 	{
137 		marl::mutex mutex;
138 		std::weak_ptr<marl::Scheduler> weakptr GUARDED_BY(mutex);
139 	};
140 
141 	static Scheduler scheduler;  // TODO(b/208256248): Avoid exit-time destructor.
142 
143 	marl::lock lock(scheduler.mutex);
144 	auto sptr = scheduler.weakptr.lock();
145 	if(!sptr)
146 	{
147 		marl::Scheduler::Config cfg;
148 		cfg.setWorkerThreadCount(std::min<size_t>(marl::Thread::numLogicalCPUs(), 16));
149 		cfg.setWorkerThreadInitializer([](int) {
150 			sw::CPUID::setFlushToZero(true);
151 			sw::CPUID::setDenormalsAreZero(true);
152 		});
153 		sptr = std::make_shared<marl::Scheduler>(cfg);
154 		scheduler.weakptr = sptr;
155 	}
156 	return sptr;
157 }
158 
159 // initializeLibrary() is called by vkCreateInstance() to perform one-off global
160 // initialization of the swiftshader driver.
initializeLibrary()161 void initializeLibrary()
162 {
163 	static bool doOnce = [] {
164 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
165 		logBuildVersionInformation();
166 #endif  // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
167 		setReactorDefaultConfig();
168 		return true;
169 	}();
170 	(void)doOnce;
171 }
172 
173 template<class T>
ValidateRenderPassPNextChain(VkDevice device,const T * pCreateInfo)174 void ValidateRenderPassPNextChain(VkDevice device, const T *pCreateInfo)
175 {
176 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
177 
178 	while(extensionCreateInfo)
179 	{
180 		switch(extensionCreateInfo->sType)
181 		{
182 		case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
183 			{
184 				const VkRenderPassInputAttachmentAspectCreateInfo *inputAttachmentAspectCreateInfo = reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>(extensionCreateInfo);
185 
186 				for(uint32_t i = 0; i < inputAttachmentAspectCreateInfo->aspectReferenceCount; i++)
187 				{
188 					const auto &aspectReference = inputAttachmentAspectCreateInfo->pAspectReferences[i];
189 					ASSERT(aspectReference.subpass < pCreateInfo->subpassCount);
190 					const auto &subpassDescription = pCreateInfo->pSubpasses[aspectReference.subpass];
191 					ASSERT(aspectReference.inputAttachmentIndex < subpassDescription.inputAttachmentCount);
192 					const auto &attachmentReference = subpassDescription.pInputAttachments[aspectReference.inputAttachmentIndex];
193 					if(attachmentReference.attachment != VK_ATTACHMENT_UNUSED)
194 					{
195 						// If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, for any
196 						// element of the pInputAttachments member of any element of pSubpasses where the attachment member
197 						// is not VK_ATTACHMENT_UNUSED, the aspectMask member of the corresponding element of
198 						// VkRenderPassInputAttachmentAspectCreateInfo::pAspectReferences must only include aspects that are
199 						// present in images of the format specified by the element of pAttachments at attachment
200 						vk::Format format(pCreateInfo->pAttachments[attachmentReference.attachment].format);
201 						bool isDepth = format.isDepth();
202 						bool isStencil = format.isStencil();
203 						ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) || (!isDepth && !isStencil));
204 						ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) || isDepth);
205 						ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) || isStencil);
206 					}
207 				}
208 			}
209 			break;
210 		case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
211 			{
212 				const VkRenderPassMultiviewCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>(extensionCreateInfo);
213 				ASSERT((multiviewCreateInfo->subpassCount == 0) || (multiviewCreateInfo->subpassCount == pCreateInfo->subpassCount));
214 				ASSERT((multiviewCreateInfo->dependencyCount == 0) || (multiviewCreateInfo->dependencyCount == pCreateInfo->dependencyCount));
215 
216 				bool zeroMask = (multiviewCreateInfo->pViewMasks[0] == 0);
217 				for(uint32_t i = 1; i < multiviewCreateInfo->subpassCount; i++)
218 				{
219 					ASSERT((multiviewCreateInfo->pViewMasks[i] == 0) == zeroMask);
220 				}
221 
222 				if(zeroMask)
223 				{
224 					ASSERT(multiviewCreateInfo->correlationMaskCount == 0);
225 				}
226 
227 				for(uint32_t i = 0; i < multiviewCreateInfo->dependencyCount; i++)
228 				{
229 					const auto &dependency = pCreateInfo->pDependencies[i];
230 					if(multiviewCreateInfo->pViewOffsets[i] != 0)
231 					{
232 						ASSERT(dependency.srcSubpass != dependency.dstSubpass);
233 						ASSERT(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT);
234 					}
235 					if(zeroMask)
236 					{
237 						ASSERT(!(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT));
238 					}
239 				}
240 
241 				// If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo,
242 				// each element of its pViewMask member must not include a bit at a position
243 				// greater than the value of VkPhysicalDeviceLimits::maxFramebufferLayers
244 				// pViewMask is a 32 bit value. If maxFramebufferLayers > 32, it's impossible
245 				// for pViewMask to contain a bit at an illegal position
246 				// Note: Verify pViewMask values instead if we hit this assert
247 				ASSERT(vk::Cast(device)->getPhysicalDevice()->getProperties().limits.maxFramebufferLayers >= 32);
248 			}
249 			break;
250 		case VK_STRUCTURE_TYPE_MAX_ENUM:
251 			// dEQP tests that this value is ignored.
252 			break;
253 		default:
254 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
255 			break;
256 		}
257 
258 		extensionCreateInfo = extensionCreateInfo->pNext;
259 	}
260 }
261 
262 }  // namespace
263 
264 extern "C" {
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)265 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
266 {
267 	TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
268 
269 	return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
270 }
271 
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)272 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
273 {
274 	*pSupportedVersion = 3;
275 	return VK_SUCCESS;
276 }
277 
278 #if VK_USE_PLATFORM_FUCHSIA
279 
280 // This symbol must be exported by a Fuchsia Vulkan ICD. The Vulkan loader will
281 // call it, passing the address of a global function pointer that can later be
282 // used at runtime to connect to Fuchsia FIDL services, as required by certain
283 // extensions. See https://fxbug.dev/13095 for more details.
284 //
285 // NOTE: This entry point has not been upstreamed to Khronos yet, which reserves
286 //       all symbols starting with vk_icd. See https://fxbug.dev/13074 which
287 //       tracks upstreaming progress.
vk_icdInitializeConnectToServiceCallback(PFN_vkConnectToService callback)288 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdInitializeConnectToServiceCallback(
289     PFN_vkConnectToService callback)
290 {
291 	TRACE("(callback = %p)", callback);
292 	vk::icdFuchsiaServiceConnectCallback = callback;
293 	return VK_SUCCESS;
294 }
295 
296 #endif  // VK_USE_PLATFORM_FUCHSIA
297 
298 struct ExtensionProperties : public VkExtensionProperties
299 {
__anonc2bb5f3a0402ExtensionProperties300 	std::function<bool()> isSupported = [] { return true; };
301 };
302 
303 // TODO(b/208256248): Avoid exit-time destructor.
304 static const ExtensionProperties instanceExtensionProperties[] = {
305 	{ { VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION } },
306 	{ { VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION } },
307 	{ { VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION } },
308 	{ { VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION } },
309 	{ { VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION } },
310 	{ { VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION } },
311 	{ { VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, VK_EXT_HEADLESS_SURFACE_SPEC_VERSION } },
312 #ifndef __ANDROID__
313 	{ { VK_KHR_SURFACE_EXTENSION_NAME, VK_KHR_SURFACE_SPEC_VERSION } },
314 #endif
315 #ifdef VK_USE_PLATFORM_XCB_KHR
__anonc2bb5f3a0502null316 	{ { VK_KHR_XCB_SURFACE_EXTENSION_NAME, VK_KHR_XCB_SURFACE_SPEC_VERSION }, [] { return vk::XcbSurfaceKHR::isSupported(); } },
317 #endif
318 #ifdef VK_USE_PLATFORM_XLIB_KHR
__anonc2bb5f3a0602null319 	{ { VK_KHR_XLIB_SURFACE_EXTENSION_NAME, VK_KHR_XLIB_SURFACE_SPEC_VERSION }, [] { return vk::XlibSurfaceKHR::isSupported(); } },
320 #endif
321 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
322 	{ { VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, VK_KHR_WAYLAND_SURFACE_SPEC_VERSION } },
323 #endif
324 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
325 	{ { VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME, VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION } },
326 #endif
327 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
328 	{ { VK_KHR_DISPLAY_EXTENSION_NAME, VK_KHR_DISPLAY_SPEC_VERSION } },
329 #endif
330 #ifdef VK_USE_PLATFORM_MACOS_MVK
331 	{ { VK_MVK_MACOS_SURFACE_EXTENSION_NAME, VK_MVK_MACOS_SURFACE_SPEC_VERSION } },
332 #endif
333 #ifdef VK_USE_PLATFORM_METAL_EXT
334 	{ { VK_EXT_METAL_SURFACE_EXTENSION_NAME, VK_EXT_METAL_SURFACE_SPEC_VERSION } },
335 #endif
336 #ifdef VK_USE_PLATFORM_WIN32_KHR
337 	{ { VK_KHR_WIN32_SURFACE_EXTENSION_NAME, VK_KHR_WIN32_SURFACE_SPEC_VERSION } },
338 #endif
339 };
340 
341 // TODO(b/208256248): Avoid exit-time destructor.
342 static const ExtensionProperties deviceExtensionProperties[] = {
343 	{ { VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION } },
344 	// Vulkan 1.1 promoted extensions
345 	{ { VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, VK_KHR_BIND_MEMORY_2_SPEC_VERSION } },
346 	{ { VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION } },
347 	{ { VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION } },
348 	{ { VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION } },
349 	{ { VK_KHR_DEVICE_GROUP_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_SPEC_VERSION } },
350 	{ { VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_SPEC_VERSION } },
351 	{ { VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION } },
352 	{ { VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
353 	{ { VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION } },
354 	{ { VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_KHR_MAINTENANCE1_SPEC_VERSION } },
355 	{ { VK_KHR_MAINTENANCE2_EXTENSION_NAME, VK_KHR_MAINTENANCE2_SPEC_VERSION } },
356 	{ { VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_KHR_MAINTENANCE3_SPEC_VERSION } },
357 	{ { VK_KHR_MULTIVIEW_EXTENSION_NAME, VK_KHR_MULTIVIEW_SPEC_VERSION } },
358 	{ { VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME, VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION } },
359 	{ { VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION } },
360 	{ { VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION } },
361 	{ { VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME, VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION } },
362 	{ { VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION } },
363 	{ { VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME, VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION } },
364 	// Only 1.1 core version of this is supported. The extension has additional requirements
365 	//{{ VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION }},
366 	{ { VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION } },
367 	// Only 1.1 core version of this is supported. The extension has additional requirements
368 	//{{ VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, VK_KHR_VARIABLE_POINTERS_SPEC_VERSION }},
369 	{ { VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION } },
370 	// The following extension is only used to add support for Bresenham lines
371 	{ { VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, VK_EXT_LINE_RASTERIZATION_SPEC_VERSION } },
372 	// The following extension is used by ANGLE to emulate blitting the stencil buffer
373 	{ { VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION } },
374 	{ { VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME, VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION } },
375 	// Useful for D3D emulation
376 	{ { VK_EXT_4444_FORMATS_EXTENSION_NAME, VK_EXT_4444_FORMATS_SPEC_VERSION } },
377 	// Used by ANGLE to support GL_KHR_blend_equation_advanced
378 	{ { VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION } },
379 #ifndef __ANDROID__
380 	// We fully support the KHR_swapchain v70 additions, so just track the spec version.
381 	{ { VK_KHR_SWAPCHAIN_EXTENSION_NAME, VK_KHR_SWAPCHAIN_SPEC_VERSION } },
382 #else
383 	// We only support V7 of this extension. Missing functionality: in V8,
384 	// it becomes possible to pass a VkNativeBufferANDROID structure to
385 	// vkBindImageMemory2. Android's swapchain implementation does this in
386 	// order to support passing VkBindImageMemorySwapchainInfoKHR
387 	// (from KHR_swapchain v70) to vkBindImageMemory2.
388 	{ { VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME, 7 } },
389 #endif
390 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
391 	{ { VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION } },
392 #endif
393 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
394 	{ { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION } },
395 #endif
396 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
397 	{ { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION } },
398 #endif
399 
400 	{ { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION } },
401 
402 #if VK_USE_PLATFORM_FUCHSIA
403 	{ { VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
404 	{ { VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION } },
405 #endif
406 	{ { VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, VK_EXT_PROVOKING_VERTEX_SPEC_VERSION } },
407 #if !defined(__ANDROID__)
408 	{ { VK_GOOGLE_SAMPLER_FILTERING_PRECISION_EXTENSION_NAME, VK_GOOGLE_SAMPLER_FILTERING_PRECISION_SPEC_VERSION } },
409 #endif
410 	{ { VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION } },
411 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
412 	{ { VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME, VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION } },
413 #endif  // SWIFTSHADER_DEVICE_MEMORY_REPORT
414 	// Vulkan 1.2 promoted extensions
415 	{ { VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, VK_EXT_HOST_QUERY_RESET_SPEC_VERSION } },
416 	{ { VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME, VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION } },
417 	{ { VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME, VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION } },
418 	{ { VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION } },
419 	{ { VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION } },
420 	{ { VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME, VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION } },
421 	{ { VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME, VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION } },
422 	{ { VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION } },
423 	{ { VK_KHR_SPIRV_1_4_EXTENSION_NAME, VK_KHR_SPIRV_1_4_SPEC_VERSION } },
424 	{ { VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION } },
425 	{ { VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION } },
426 	// Other extensions
427 	{ { VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME, VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION } },
428 	{ { VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME, VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION } },
429 	{ { VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME, VK_KHR_COPY_COMMANDS_2_SPEC_VERSION } },
430 	{ { VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME, VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION } },
431 };
432 
numSupportedExtensions(const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)433 static uint32_t numSupportedExtensions(const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
434 {
435 	uint32_t count = 0;
436 
437 	for(uint32_t i = 0; i < extensionPropertiesCount; i++)
438 	{
439 		if(extensionProperties[i].isSupported())
440 		{
441 			count++;
442 		}
443 	}
444 
445 	return count;
446 }
447 
numInstanceSupportedExtensions()448 static uint32_t numInstanceSupportedExtensions()
449 {
450 	return numSupportedExtensions(instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
451 }
452 
numDeviceSupportedExtensions()453 static uint32_t numDeviceSupportedExtensions()
454 {
455 	return numSupportedExtensions(deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
456 }
457 
hasExtension(const char * extensionName,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)458 static bool hasExtension(const char *extensionName, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
459 {
460 	for(uint32_t i = 0; i < extensionPropertiesCount; i++)
461 	{
462 		if(strcmp(extensionName, extensionProperties[i].extensionName) == 0)
463 		{
464 			return extensionProperties[i].isSupported();
465 		}
466 	}
467 
468 	return false;
469 }
470 
hasInstanceExtension(const char * extensionName)471 static bool hasInstanceExtension(const char *extensionName)
472 {
473 	return hasExtension(extensionName, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
474 }
475 
hasDeviceExtension(const char * extensionName)476 static bool hasDeviceExtension(const char *extensionName)
477 {
478 	return hasExtension(extensionName, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
479 }
480 
copyExtensions(VkExtensionProperties * pProperties,uint32_t toCopy,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)481 static void copyExtensions(VkExtensionProperties *pProperties, uint32_t toCopy, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
482 {
483 	for(uint32_t i = 0, j = 0; i < toCopy; i++, j++)
484 	{
485 		while((j < extensionPropertiesCount) && !extensionProperties[j].isSupported())
486 		{
487 			j++;
488 		}
489 		if(j < extensionPropertiesCount)
490 		{
491 			pProperties[i] = extensionProperties[j];
492 		}
493 	}
494 }
495 
copyInstanceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)496 static void copyInstanceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
497 {
498 	copyExtensions(pProperties, toCopy, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
499 }
500 
copyDeviceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)501 static void copyDeviceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
502 {
503 	copyExtensions(pProperties, toCopy, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
504 }
505 
vkCreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)506 VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance)
507 {
508 	TRACE("(const VkInstanceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkInstance* pInstance = %p)",
509 	      pCreateInfo, pAllocator, pInstance);
510 
511 	initializeLibrary();
512 
513 	if(pCreateInfo->flags != 0)
514 	{
515 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
516 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
517 	}
518 
519 	if(pCreateInfo->enabledLayerCount != 0)
520 	{
521 		UNIMPLEMENTED("b/148240133: pCreateInfo->enabledLayerCount != 0");  // FIXME(b/148240133)
522 	}
523 
524 	for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
525 	{
526 		if(!hasInstanceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
527 		{
528 			return VK_ERROR_EXTENSION_NOT_PRESENT;
529 		}
530 	}
531 
532 	VkDebugUtilsMessengerEXT messenger = { VK_NULL_HANDLE };
533 	if(pCreateInfo->pNext)
534 	{
535 		const VkBaseInStructure *createInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
536 		switch(createInfo->sType)
537 		{
538 		case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
539 			{
540 				const VkDebugUtilsMessengerCreateInfoEXT *debugUtilsMessengerCreateInfoEXT = reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>(createInfo);
541 				VkResult result = vk::DebugUtilsMessenger::Create(pAllocator, debugUtilsMessengerCreateInfoEXT, &messenger);
542 				if(result != VK_SUCCESS)
543 				{
544 					return result;
545 				}
546 			}
547 			break;
548 		case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
549 			// According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
550 			// "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
551 			//  VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
552 			//  internal use by the loader, and do not have corresponding
553 			//  Vulkan structures in this Specification."
554 			break;
555 		default:
556 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(createInfo->sType).c_str());
557 			break;
558 		}
559 	}
560 
561 	*pInstance = VK_NULL_HANDLE;
562 	VkPhysicalDevice physicalDevice = VK_NULL_HANDLE;
563 
564 	VkResult result = vk::DispatchablePhysicalDevice::Create(pAllocator, pCreateInfo, &physicalDevice);
565 	if(result != VK_SUCCESS)
566 	{
567 		vk::destroy(messenger, pAllocator);
568 		return result;
569 	}
570 
571 	result = vk::DispatchableInstance::Create(pAllocator, pCreateInfo, pInstance, physicalDevice, vk::Cast(messenger));
572 	if(result != VK_SUCCESS)
573 	{
574 		vk::destroy(messenger, pAllocator);
575 		vk::destroy(physicalDevice, pAllocator);
576 		return result;
577 	}
578 
579 	return result;
580 }
581 
vkDestroyInstance(VkInstance instance,const VkAllocationCallbacks * pAllocator)582 VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator)
583 {
584 	TRACE("(VkInstance instance = %p, const VkAllocationCallbacks* pAllocator = %p)", instance, pAllocator);
585 
586 	vk::destroy(instance, pAllocator);
587 }
588 
vkEnumeratePhysicalDevices(VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)589 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices)
590 {
591 	TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceCount = %p, VkPhysicalDevice* pPhysicalDevices = %p)",
592 	      instance, pPhysicalDeviceCount, pPhysicalDevices);
593 
594 	return vk::Cast(instance)->getPhysicalDevices(pPhysicalDeviceCount, pPhysicalDevices);
595 }
596 
vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)597 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures)
598 {
599 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures* pFeatures = %p)",
600 	      physicalDevice, pFeatures);
601 
602 	*pFeatures = vk::Cast(physicalDevice)->getFeatures();
603 }
604 
vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)605 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties)
606 {
607 	TRACE("GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties* pFormatProperties = %p)",
608 	      physicalDevice, (int)format, pFormatProperties);
609 
610 	vk::PhysicalDevice::GetFormatProperties(format, pFormatProperties);
611 }
612 
vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)613 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties)
614 {
615 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkImageTiling tiling = %d, VkImageUsageFlags usage = %d, VkImageCreateFlags flags = %d, VkImageFormatProperties* pImageFormatProperties = %p)",
616 	      physicalDevice, (int)format, (int)type, (int)tiling, usage, flags, pImageFormatProperties);
617 
618 	VkPhysicalDeviceImageFormatInfo2 info2 = {};
619 	info2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
620 	info2.pNext = nullptr;
621 	info2.format = format;
622 	info2.type = type;
623 	info2.tiling = tiling;
624 	info2.usage = usage;
625 	info2.flags = flags;
626 
627 	VkImageFormatProperties2 properties2 = {};
628 	properties2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
629 	properties2.pNext = nullptr;
630 
631 	VkResult result = vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, &info2, &properties2);
632 
633 	*pImageFormatProperties = properties2.imageFormatProperties;
634 
635 	return result;
636 }
637 
vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)638 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties)
639 {
640 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties* pProperties = %p)",
641 	      physicalDevice, pProperties);
642 
643 	*pProperties = vk::Cast(physicalDevice)->getProperties();
644 }
645 
vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)646 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties)
647 {
648 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties* pQueueFamilyProperties = %p))", physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
649 
650 	if(!pQueueFamilyProperties)
651 	{
652 		*pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
653 	}
654 	else
655 	{
656 		vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
657 	}
658 }
659 
vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)660 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties)
661 {
662 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
663 
664 	*pMemoryProperties = vk::PhysicalDevice::GetMemoryProperties();
665 }
666 
vkGetInstanceProcAddr(VkInstance instance,const char * pName)667 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *pName)
668 {
669 	TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
670 
671 	return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
672 }
673 
vkGetDeviceProcAddr(VkDevice device,const char * pName)674 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char *pName)
675 {
676 	TRACE("(VkDevice device = %p, const char* pName = %p)", device, pName);
677 
678 	return vk::GetDeviceProcAddr(vk::Cast(device), pName);
679 }
680 
vkCreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)681 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice)
682 {
683 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkDeviceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDevice* pDevice = %p)",
684 	      physicalDevice, pCreateInfo, pAllocator, pDevice);
685 
686 	if(pCreateInfo->flags != 0)
687 	{
688 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
689 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
690 	}
691 
692 	if(pCreateInfo->enabledLayerCount != 0)
693 	{
694 		// "The ppEnabledLayerNames and enabledLayerCount members of VkDeviceCreateInfo are deprecated and their values must be ignored by implementations."
695 		UNSUPPORTED("pCreateInfo->enabledLayerCount != 0");
696 	}
697 
698 	for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
699 	{
700 		if(!hasDeviceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
701 		{
702 			return VK_ERROR_EXTENSION_NOT_PRESENT;
703 		}
704 	}
705 
706 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
707 
708 	const VkPhysicalDeviceFeatures *enabledFeatures = pCreateInfo->pEnabledFeatures;
709 
710 	while(extensionCreateInfo)
711 	{
712 		// Casting to a long since some structures, such as
713 		// VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT
714 		// are not enumerated in the official Vulkan header
715 		switch((long)(extensionCreateInfo->sType))
716 		{
717 		case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
718 			// According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
719 			// "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
720 			//  VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
721 			//  internal use by the loader, and do not have corresponding
722 			//  Vulkan structures in this Specification."
723 			break;
724 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
725 			{
726 				ASSERT(!pCreateInfo->pEnabledFeatures);  // "If the pNext chain includes a VkPhysicalDeviceFeatures2 structure, then pEnabledFeatures must be NULL"
727 
728 				const VkPhysicalDeviceFeatures2 *physicalDeviceFeatures2 = reinterpret_cast<const VkPhysicalDeviceFeatures2 *>(extensionCreateInfo);
729 
730 				enabledFeatures = &physicalDeviceFeatures2->features;
731 			}
732 			break;
733 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
734 			{
735 				const VkPhysicalDeviceSamplerYcbcrConversionFeatures *samplerYcbcrConversionFeatures = reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(extensionCreateInfo);
736 
737 				// YCbCr conversion is supported.
738 				// samplerYcbcrConversionFeatures->samplerYcbcrConversion can be VK_TRUE or VK_FALSE.
739 				// No action needs to be taken on our end in either case; it's the apps responsibility that
740 				// "To create a sampler Y'CbCr conversion, the samplerYcbcrConversion feature must be enabled."
741 				(void)samplerYcbcrConversionFeatures->samplerYcbcrConversion;
742 			}
743 			break;
744 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
745 			{
746 				const VkPhysicalDevice16BitStorageFeatures *storage16BitFeatures = reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>(extensionCreateInfo);
747 
748 				if(storage16BitFeatures->storageBuffer16BitAccess != VK_FALSE ||
749 				   storage16BitFeatures->uniformAndStorageBuffer16BitAccess != VK_FALSE ||
750 				   storage16BitFeatures->storagePushConstant16 != VK_FALSE ||
751 				   storage16BitFeatures->storageInputOutput16 != VK_FALSE)
752 				{
753 					return VK_ERROR_FEATURE_NOT_PRESENT;
754 				}
755 			}
756 			break;
757 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES:
758 			{
759 				const VkPhysicalDeviceVariablePointerFeatures *variablePointerFeatures = reinterpret_cast<const VkPhysicalDeviceVariablePointerFeatures *>(extensionCreateInfo);
760 
761 				if(variablePointerFeatures->variablePointersStorageBuffer != VK_FALSE ||
762 				   variablePointerFeatures->variablePointers != VK_FALSE)
763 				{
764 					return VK_ERROR_FEATURE_NOT_PRESENT;
765 				}
766 			}
767 			break;
768 		case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
769 			{
770 				const VkDeviceGroupDeviceCreateInfo *groupDeviceCreateInfo = reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>(extensionCreateInfo);
771 
772 				if((groupDeviceCreateInfo->physicalDeviceCount != 1) ||
773 				   (groupDeviceCreateInfo->pPhysicalDevices[0] != physicalDevice))
774 				{
775 					return VK_ERROR_FEATURE_NOT_PRESENT;
776 				}
777 			}
778 			break;
779 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
780 			{
781 				const VkPhysicalDeviceMultiviewFeatures *multiviewFeatures = reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>(extensionCreateInfo);
782 
783 				if(multiviewFeatures->multiviewGeometryShader ||
784 				   multiviewFeatures->multiviewTessellationShader)
785 				{
786 					return VK_ERROR_FEATURE_NOT_PRESENT;
787 				}
788 			}
789 			break;
790 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
791 			{
792 				const VkPhysicalDeviceShaderDrawParametersFeatures *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>(extensionCreateInfo);
793 
794 				if(shaderDrawParametersFeatures->shaderDrawParameters)
795 				{
796 					return VK_ERROR_FEATURE_NOT_PRESENT;
797 				}
798 			}
799 			break;
800 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR:
801 			{
802 				const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(extensionCreateInfo);
803 
804 				// Separate depth and stencil layouts is already supported
805 				(void)(shaderDrawParametersFeatures->separateDepthStencilLayouts);
806 			}
807 			break;
808 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
809 			{
810 				const auto *lineRasterizationFeatures = reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>(extensionCreateInfo);
811 				bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(lineRasterizationFeatures);
812 				if(!hasFeatures)
813 				{
814 					return VK_ERROR_FEATURE_NOT_PRESENT;
815 				}
816 			}
817 			break;
818 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT:
819 			{
820 				const VkPhysicalDeviceProvokingVertexFeaturesEXT *provokingVertexFeatures = reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>(extensionCreateInfo);
821 				bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(provokingVertexFeatures);
822 				if(!hasFeatures)
823 				{
824 					return VK_ERROR_FEATURE_NOT_PRESENT;
825 				}
826 			}
827 			break;
828 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT:
829 			{
830 				const VkPhysicalDeviceImageRobustnessFeaturesEXT *imageRobustnessFeatures = reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeaturesEXT *>(extensionCreateInfo);
831 
832 				// We currently always provide robust image accesses. When the feature is disabled, results are
833 				// undefined (for images with Dim != Buffer), so providing robustness is also acceptable.
834 				// TODO(b/159329067): Only provide robustness when requested.
835 				(void)imageRobustnessFeatures->robustImageAccess;
836 			}
837 			break;
838 		// For unsupported structures, check that we don't expose the corresponding extension string:
839 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT:
840 			ASSERT(!hasDeviceExtension(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME));
841 			break;
842 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR:
843 			{
844 				const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *imagelessFramebufferFeatures = reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(extensionCreateInfo);
845 				// Always provide Imageless Framebuffers
846 				(void)imagelessFramebufferFeatures->imagelessFramebuffer;
847 			}
848 			break;
849 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES:
850 			{
851 				const VkPhysicalDeviceScalarBlockLayoutFeatures *scalarBlockLayoutFeatures = reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>(extensionCreateInfo);
852 
853 				// VK_EXT_scalar_block_layout is supported, allowing C-like structure layout for SPIR-V blocks.
854 				(void)scalarBlockLayoutFeatures->scalarBlockLayout;
855 			}
856 			break;
857 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
858 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT:
859 			{
860 				const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *deviceMemoryReportFeatures = reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>(extensionCreateInfo);
861 				(void)deviceMemoryReportFeatures->deviceMemoryReport;
862 			}
863 			break;
864 #endif  // SWIFTSHADER_DEVICE_MEMORY_REPORT
865 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES:
866 			{
867 				const VkPhysicalDeviceHostQueryResetFeatures *hostQueryResetFeatures = reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>(extensionCreateInfo);
868 
869 				// VK_EXT_host_query_reset is always enabled.
870 				(void)hostQueryResetFeatures->hostQueryReset;
871 			}
872 			break;
873 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT:
874 			{
875 				const VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT *pipelineCreationCacheControlFeatures = reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT *>(extensionCreateInfo);
876 
877 				// VK_EXT_pipeline_creation_cache_control is always enabled.
878 				(void)pipelineCreationCacheControlFeatures->pipelineCreationCacheControl;
879 			}
880 			break;
881 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES:
882 			{
883 				const auto *tsFeatures = reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>(extensionCreateInfo);
884 
885 				// VK_KHR_timeline_semaphores is always enabled
886 				(void)tsFeatures->timelineSemaphore;
887 			}
888 			break;
889 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT:
890 			{
891 				const auto *customBorderColorFeatures = reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT *>(extensionCreateInfo);
892 
893 				// VK_EXT_custom_border_color is always enabled
894 				(void)customBorderColorFeatures->customBorderColors;
895 				(void)customBorderColorFeatures->customBorderColorWithoutFormat;
896 			}
897 			break;
898 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
899 			{
900 				const auto *vk11Features = reinterpret_cast<const VkPhysicalDeviceVulkan11Features *>(extensionCreateInfo);
901 				bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk11Features);
902 				if(!hasFeatures)
903 				{
904 					return VK_ERROR_FEATURE_NOT_PRESENT;
905 				}
906 			}
907 			break;
908 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
909 			{
910 				const auto *vk12Features = reinterpret_cast<const VkPhysicalDeviceVulkan12Features *>(extensionCreateInfo);
911 				bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk12Features);
912 				if(!hasFeatures)
913 				{
914 					return VK_ERROR_FEATURE_NOT_PRESENT;
915 				}
916 			}
917 			break;
918 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
919 			{
920 				const auto *depthClipFeatures = reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(extensionCreateInfo);
921 				bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(depthClipFeatures);
922 				if(!hasFeatures)
923 				{
924 					return VK_ERROR_FEATURE_NOT_PRESENT;
925 				}
926 			}
927 			break;
928 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
929 			{
930 				const auto *blendOpFeatures = reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(extensionCreateInfo);
931 				bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(blendOpFeatures);
932 				if(!hasFeatures)
933 				{
934 					return VK_ERROR_FEATURE_NOT_PRESENT;
935 				}
936 			}
937 			break;
938 		// These structs are supported, but no behavior changes based on their feature bools
939 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES:
940 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES:
941 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT:
942 			break;
943 		default:
944 			// "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
945 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
946 			break;
947 		}
948 
949 		extensionCreateInfo = extensionCreateInfo->pNext;
950 	}
951 
952 	ASSERT(pCreateInfo->queueCreateInfoCount > 0);
953 
954 	if(enabledFeatures)
955 	{
956 		if(!vk::Cast(physicalDevice)->hasFeatures(*enabledFeatures))
957 		{
958 			return VK_ERROR_FEATURE_NOT_PRESENT;
959 		}
960 	}
961 
962 	uint32_t queueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
963 
964 	for(uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++)
965 	{
966 		const VkDeviceQueueCreateInfo &queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
967 		if(queueCreateInfo.flags != 0)
968 		{
969 			UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d]->flags %d", i, queueCreateInfo.flags);
970 		}
971 
972 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(queueCreateInfo.pNext);
973 		while(extInfo)
974 		{
975 			UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
976 			extInfo = extInfo->pNext;
977 		}
978 
979 		ASSERT(queueCreateInfo.queueFamilyIndex < queueFamilyPropertyCount);
980 		(void)queueFamilyPropertyCount;  // Silence unused variable warning
981 	}
982 
983 	auto scheduler = getOrCreateScheduler();
984 	return vk::DispatchableDevice::Create(pAllocator, pCreateInfo, pDevice, vk::Cast(physicalDevice), enabledFeatures, scheduler);
985 }
986 
vkDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)987 VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator)
988 {
989 	TRACE("(VkDevice device = %p, const VkAllocationCallbacks* pAllocator = %p)", device, pAllocator);
990 
991 	vk::destroy(device, pAllocator);
992 }
993 
vkEnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)994 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
995 {
996 	TRACE("(const char* pLayerName = %p, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)",
997 	      pLayerName, pPropertyCount, pProperties);
998 
999 	uint32_t extensionPropertiesCount = numInstanceSupportedExtensions();
1000 
1001 	if(!pProperties)
1002 	{
1003 		*pPropertyCount = extensionPropertiesCount;
1004 		return VK_SUCCESS;
1005 	}
1006 
1007 	auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
1008 	copyInstanceExtensions(pProperties, toCopy);
1009 
1010 	*pPropertyCount = toCopy;
1011 	return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
1012 }
1013 
vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1014 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
1015 {
1016 	TRACE("(VkPhysicalDevice physicalDevice = %p, const char* pLayerName, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
1017 
1018 	uint32_t extensionPropertiesCount = numDeviceSupportedExtensions();
1019 
1020 	if(!pProperties)
1021 	{
1022 		*pPropertyCount = extensionPropertiesCount;
1023 		return VK_SUCCESS;
1024 	}
1025 
1026 	auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
1027 	copyDeviceExtensions(pProperties, toCopy);
1028 
1029 	*pPropertyCount = toCopy;
1030 	return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
1031 }
1032 
vkEnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)1033 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties)
1034 {
1035 	TRACE("(uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", pPropertyCount, pProperties);
1036 
1037 	if(!pProperties)
1038 	{
1039 		*pPropertyCount = 0;
1040 		return VK_SUCCESS;
1041 	}
1042 
1043 	return VK_SUCCESS;
1044 }
1045 
vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)1046 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkLayerProperties *pProperties)
1047 {
1048 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
1049 
1050 	if(!pProperties)
1051 	{
1052 		*pPropertyCount = 0;
1053 		return VK_SUCCESS;
1054 	}
1055 
1056 	return VK_SUCCESS;
1057 }
1058 
vkGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)1059 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue)
1060 {
1061 	TRACE("(VkDevice device = %p, uint32_t queueFamilyIndex = %d, uint32_t queueIndex = %d, VkQueue* pQueue = %p)",
1062 	      device, queueFamilyIndex, queueIndex, pQueue);
1063 
1064 	*pQueue = vk::Cast(device)->getQueue(queueFamilyIndex, queueIndex);
1065 }
1066 
vkQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)1067 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence)
1068 {
1069 	TRACE("(VkQueue queue = %p, uint32_t submitCount = %d, const VkSubmitInfo* pSubmits = %p, VkFence fence = %p)",
1070 	      queue, submitCount, pSubmits, static_cast<void *>(fence));
1071 
1072 	return vk::Cast(queue)->submit(submitCount, pSubmits, vk::Cast(fence));
1073 }
1074 
vkQueueWaitIdle(VkQueue queue)1075 VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue)
1076 {
1077 	TRACE("(VkQueue queue = %p)", queue);
1078 
1079 	return vk::Cast(queue)->waitIdle();
1080 }
1081 
vkDeviceWaitIdle(VkDevice device)1082 VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device)
1083 {
1084 	TRACE("(VkDevice device = %p)", device);
1085 
1086 	return vk::Cast(device)->waitIdle();
1087 }
1088 
vkAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1089 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory)
1090 {
1091 	TRACE("(VkDevice device = %p, const VkMemoryAllocateInfo* pAllocateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDeviceMemory* pMemory = %p)",
1092 	      device, pAllocateInfo, pAllocator, pMemory);
1093 
1094 	VkResult result = vk::DeviceMemory::Allocate(pAllocator, pAllocateInfo, pMemory, vk::Cast(device));
1095 
1096 	if(result != VK_SUCCESS)
1097 	{
1098 		vk::destroy(*pMemory, pAllocator);
1099 		*pMemory = VK_NULL_HANDLE;
1100 	}
1101 
1102 	return result;
1103 }
1104 
vkFreeMemory(VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)1105 VKAPI_ATTR void VKAPI_CALL vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator)
1106 {
1107 	TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, const VkAllocationCallbacks* pAllocator = %p)",
1108 	      device, static_cast<void *>(memory), pAllocator);
1109 
1110 	vk::destroy(memory, pAllocator);
1111 }
1112 
1113 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
vkGetMemoryFdKHR(VkDevice device,const VkMemoryGetFdInfoKHR * getFdInfo,int * pFd)1114 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR *getFdInfo, int *pFd)
1115 {
1116 	TRACE("(VkDevice device = %p, const VkMemoryGetFdInfoKHR* getFdInfo = %p, int* pFd = %p",
1117 	      device, getFdInfo, pFd);
1118 
1119 	if(getFdInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1120 	{
1121 		UNSUPPORTED("pGetFdInfo->handleType %u", getFdInfo->handleType);
1122 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1123 	}
1124 	return vk::Cast(getFdInfo->memory)->exportFd(pFd);
1125 }
1126 
vkGetMemoryFdPropertiesKHR(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)1127 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR *pMemoryFdProperties)
1128 {
1129 	TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, int fd = %d, VkMemoryFdPropertiesKHR* pMemoryFdProperties = %p)",
1130 	      device, handleType, fd, pMemoryFdProperties);
1131 
1132 	if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1133 	{
1134 		UNSUPPORTED("handleType %u", handleType);
1135 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1136 	}
1137 
1138 	if(fd < 0)
1139 	{
1140 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1141 	}
1142 
1143 	const VkPhysicalDeviceMemoryProperties &memoryProperties =
1144 	    vk::PhysicalDevice::GetMemoryProperties();
1145 
1146 	// All SwiftShader memory types support this!
1147 	pMemoryFdProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1148 
1149 	return VK_SUCCESS;
1150 }
1151 #endif  // SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
1152 #if VK_USE_PLATFORM_FUCHSIA
vkGetMemoryZirconHandleFUCHSIA(VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pGetHandleInfo,zx_handle_t * pHandle)1153 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA *pGetHandleInfo, zx_handle_t *pHandle)
1154 {
1155 	TRACE("(VkDevice device = %p, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetHandleInfo = %p, zx_handle_t* pHandle = %p",
1156 	      device, pGetHandleInfo, pHandle);
1157 
1158 	if(pGetHandleInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
1159 	{
1160 		UNSUPPORTED("pGetHandleInfo->handleType %u", pGetHandleInfo->handleType);
1161 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1162 	}
1163 	return vk::Cast(pGetHandleInfo->memory)->exportHandle(pHandle);
1164 }
1165 
vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,zx_handle_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties)1166 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t handle, VkMemoryZirconHandlePropertiesFUCHSIA *pMemoryZirconHandleProperties)
1167 {
1168 	TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, zx_handle_t handle = %d, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties = %p)",
1169 	      device, handleType, handle, pMemoryZirconHandleProperties);
1170 
1171 	if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
1172 	{
1173 		UNSUPPORTED("handleType %u", handleType);
1174 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1175 	}
1176 
1177 	if(handle == ZX_HANDLE_INVALID)
1178 	{
1179 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1180 	}
1181 
1182 	const VkPhysicalDeviceMemoryProperties &memoryProperties =
1183 	    vk::PhysicalDevice::GetMemoryProperties();
1184 
1185 	// All SwiftShader memory types support this!
1186 	pMemoryZirconHandleProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1187 
1188 	return VK_SUCCESS;
1189 }
1190 #endif  // VK_USE_PLATFORM_FUCHSIA
1191 
vkGetMemoryHostPointerPropertiesEXT(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties)1192 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void *pHostPointer, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties)
1193 {
1194 	TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, const void *pHostPointer = %p, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties = %p)",
1195 	      device, handleType, pHostPointer, pMemoryHostPointerProperties);
1196 
1197 	if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT && handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT)
1198 	{
1199 		UNSUPPORTED("handleType %u", handleType);
1200 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1201 	}
1202 	pMemoryHostPointerProperties->memoryTypeBits = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1203 
1204 	return VK_SUCCESS;
1205 }
1206 
1207 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1208 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo, struct AHardwareBuffer **pBuffer)
1209 {
1210 	TRACE("(VkDevice device = %p, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo = %p, struct AHardwareBuffer **pBuffer = %p)",
1211 	      device, pInfo, pBuffer);
1212 
1213 	return vk::Cast(pInfo->memory)->exportAndroidHardwareBuffer(pBuffer);
1214 }
1215 
vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)1216 VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties)
1217 {
1218 	TRACE("(VkDevice device = %p, const struct AHardwareBuffer *buffer = %p, VkAndroidHardwareBufferPropertiesANDROID *pProperties = %p)",
1219 	      device, buffer, pProperties);
1220 
1221 	return vk::DeviceMemory::GetAndroidHardwareBufferProperties(device, buffer, pProperties);
1222 }
1223 #endif  // SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
1224 
vkMapMemory(VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1225 VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData)
1226 {
1227 	TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize offset = %d, VkDeviceSize size = %d, VkMemoryMapFlags flags = %d, void** ppData = %p)",
1228 	      device, static_cast<void *>(memory), int(offset), int(size), flags, ppData);
1229 
1230 	if(flags != 0)
1231 	{
1232 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1233 		UNSUPPORTED("flags %d", int(flags));
1234 	}
1235 
1236 	return vk::Cast(memory)->map(offset, size, ppData);
1237 }
1238 
vkUnmapMemory(VkDevice device,VkDeviceMemory memory)1239 VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(VkDevice device, VkDeviceMemory memory)
1240 {
1241 	TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p)", device, static_cast<void *>(memory));
1242 
1243 	// Noop, memory will be released when the DeviceMemory object is released
1244 }
1245 
vkFlushMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1246 VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1247 {
1248 	TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1249 	      device, memoryRangeCount, pMemoryRanges);
1250 
1251 	// Noop, host and device memory are the same to SwiftShader
1252 
1253 	return VK_SUCCESS;
1254 }
1255 
vkInvalidateMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1256 VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1257 {
1258 	TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1259 	      device, memoryRangeCount, pMemoryRanges);
1260 
1261 	// Noop, host and device memory are the same to SwiftShader
1262 
1263 	return VK_SUCCESS;
1264 }
1265 
vkGetDeviceMemoryCommitment(VkDevice pDevice,VkDeviceMemory pMemory,VkDeviceSize * pCommittedMemoryInBytes)1266 VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(VkDevice pDevice, VkDeviceMemory pMemory, VkDeviceSize *pCommittedMemoryInBytes)
1267 {
1268 	TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize* pCommittedMemoryInBytes = %p)",
1269 	      pDevice, static_cast<void *>(pMemory), pCommittedMemoryInBytes);
1270 
1271 	auto memory = vk::Cast(pMemory);
1272 
1273 #if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
1274 	const auto &memoryProperties = vk::PhysicalDevice::GetMemoryProperties();
1275 	uint32_t typeIndex = memory->getMemoryTypeIndex();
1276 	ASSERT(typeIndex < memoryProperties.memoryTypeCount);
1277 	ASSERT(memoryProperties.memoryTypes[typeIndex].propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
1278 #endif
1279 
1280 	*pCommittedMemoryInBytes = memory->getCommittedMemoryInBytes();
1281 }
1282 
vkBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1283 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1284 {
1285 	TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1286 	      device, static_cast<void *>(buffer), static_cast<void *>(memory), int(memoryOffset));
1287 
1288 	if(!vk::Cast(buffer)->canBindToMemory(vk::Cast(memory)))
1289 	{
1290 		UNSUPPORTED("vkBindBufferMemory with invalid external memory");
1291 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1292 	}
1293 	vk::Cast(buffer)->bind(vk::Cast(memory), memoryOffset);
1294 	return VK_SUCCESS;
1295 }
1296 
vkBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)1297 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1298 {
1299 	TRACE("(VkDevice device = %p, VkImage image = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1300 	      device, static_cast<void *>(image), static_cast<void *>(memory), int(memoryOffset));
1301 
1302 	if(!vk::Cast(image)->canBindToMemory(vk::Cast(memory)))
1303 	{
1304 		UNSUPPORTED("vkBindImageMemory with invalid external memory");
1305 		return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1306 	}
1307 	vk::Cast(image)->bind(vk::Cast(memory), memoryOffset);
1308 	return VK_SUCCESS;
1309 }
1310 
vkGetBufferMemoryRequirements(VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)1311 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements)
1312 {
1313 	TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1314 	      device, static_cast<void *>(buffer), pMemoryRequirements);
1315 
1316 	*pMemoryRequirements = vk::Cast(buffer)->getMemoryRequirements();
1317 }
1318 
vkGetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)1319 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements)
1320 {
1321 	TRACE("(VkDevice device = %p, VkImage image = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1322 	      device, static_cast<void *>(image), pMemoryRequirements);
1323 
1324 	*pMemoryRequirements = vk::Cast(image)->getMemoryRequirements();
1325 }
1326 
vkGetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1327 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
1328 {
1329 	TRACE("(VkDevice device = %p, VkImage image = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements* pSparseMemoryRequirements = %p)",
1330 	      device, static_cast<void *>(image), pSparseMemoryRequirementCount, pSparseMemoryRequirements);
1331 
1332 	// The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
1333 	// "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
1334 	*pSparseMemoryRequirementCount = 0;
1335 }
1336 
vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkSampleCountFlagBits samples,VkImageUsageFlags usage,VkImageTiling tiling,uint32_t * pPropertyCount,VkSparseImageFormatProperties * pProperties)1337 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties)
1338 {
1339 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkSampleCountFlagBits samples = %d, VkImageUsageFlags usage = %d, VkImageTiling tiling = %d, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties* pProperties = %p)",
1340 	      physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
1341 
1342 	// We do not support sparse images.
1343 	*pPropertyCount = 0;
1344 }
1345 
vkQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1346 VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence)
1347 {
1348 	TRACE("()");
1349 	UNSUPPORTED("vkQueueBindSparse");
1350 	return VK_SUCCESS;
1351 }
1352 
vkCreateFence(VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)1353 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence)
1354 {
1355 	TRACE("(VkDevice device = %p, const VkFenceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFence* pFence = %p)",
1356 	      device, pCreateInfo, pAllocator, pFence);
1357 
1358 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1359 	while(nextInfo)
1360 	{
1361 		switch(nextInfo->sType)
1362 		{
1363 		case VK_STRUCTURE_TYPE_MAX_ENUM:
1364 			// dEQP tests that this value is ignored.
1365 			break;
1366 		default:
1367 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1368 			break;
1369 		}
1370 		nextInfo = nextInfo->pNext;
1371 	}
1372 
1373 	return vk::Fence::Create(pAllocator, pCreateInfo, pFence);
1374 }
1375 
vkDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)1376 VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator)
1377 {
1378 	TRACE("(VkDevice device = %p, VkFence fence = %p, const VkAllocationCallbacks* pAllocator = %p)",
1379 	      device, static_cast<void *>(fence), pAllocator);
1380 
1381 	vk::destroy(fence, pAllocator);
1382 }
1383 
vkResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)1384 VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences)
1385 {
1386 	TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p)",
1387 	      device, fenceCount, pFences);
1388 
1389 	for(uint32_t i = 0; i < fenceCount; i++)
1390 	{
1391 		vk::Cast(pFences[i])->reset();
1392 	}
1393 
1394 	return VK_SUCCESS;
1395 }
1396 
vkGetFenceStatus(VkDevice device,VkFence fence)1397 VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence)
1398 {
1399 	TRACE("(VkDevice device = %p, VkFence fence = %p)", device, static_cast<void *>(fence));
1400 
1401 	return vk::Cast(fence)->getStatus();
1402 }
1403 
vkWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)1404 VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout)
1405 {
1406 	TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p, VkBool32 waitAll = %d, uint64_t timeout = %" PRIu64 ")",
1407 	      device, int(fenceCount), pFences, int(waitAll), timeout);
1408 
1409 	return vk::Cast(device)->waitForFences(fenceCount, pFences, waitAll, timeout);
1410 }
1411 
vkCreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)1412 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore)
1413 {
1414 	TRACE("(VkDevice device = %p, const VkSemaphoreCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSemaphore* pSemaphore = %p)",
1415 	      device, pCreateInfo, pAllocator, pSemaphore);
1416 
1417 	if(pCreateInfo->flags != 0)
1418 	{
1419 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1420 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1421 	}
1422 
1423 	VkSemaphoreType type = VK_SEMAPHORE_TYPE_BINARY;
1424 	for(const auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1425 	    nextInfo != nullptr; nextInfo = nextInfo->pNext)
1426 	{
1427 		switch(nextInfo->sType)
1428 		{
1429 		case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
1430 			// Let the semaphore constructor handle this
1431 			break;
1432 		case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO:
1433 			{
1434 				const VkSemaphoreTypeCreateInfo *info = reinterpret_cast<const VkSemaphoreTypeCreateInfo *>(nextInfo);
1435 				type = info->semaphoreType;
1436 			}
1437 			break;
1438 		default:
1439 			WARN("nextInfo->sType = %s", vk::Stringify(nextInfo->sType).c_str());
1440 			break;
1441 		}
1442 	}
1443 
1444 	if(type == VK_SEMAPHORE_TYPE_BINARY)
1445 	{
1446 		return vk::BinarySemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1447 	}
1448 	else
1449 	{
1450 		return vk::TimelineSemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1451 	}
1452 }
1453 
vkDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)1454 VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator)
1455 {
1456 	TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, const VkAllocationCallbacks* pAllocator = %p)",
1457 	      device, static_cast<void *>(semaphore), pAllocator);
1458 
1459 	vk::destroy(semaphore, pAllocator);
1460 }
1461 
1462 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
vkGetSemaphoreFdKHR(VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)1463 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd)
1464 {
1465 	TRACE("(VkDevice device = %p, const VkSemaphoreGetFdInfoKHR* pGetFdInfo = %p, int* pFd = %p)",
1466 	      device, static_cast<const void *>(pGetFdInfo), static_cast<void *>(pFd));
1467 
1468 	if(pGetFdInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1469 	{
1470 		UNSUPPORTED("pGetFdInfo->handleType %d", int(pGetFdInfo->handleType));
1471 	}
1472 
1473 	auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetFdInfo->semaphore);
1474 	ASSERT(sem != nullptr);
1475 	return sem->exportFd(pFd);
1476 }
1477 
vkImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreInfo)1478 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreInfo)
1479 {
1480 	TRACE("(VkDevice device = %p, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreInfo = %p",
1481 	      device, static_cast<const void *>(pImportSemaphoreInfo));
1482 
1483 	if(pImportSemaphoreInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1484 	{
1485 		UNSUPPORTED("pImportSemaphoreInfo->handleType %d", int(pImportSemaphoreInfo->handleType));
1486 	}
1487 	bool temporaryImport = (pImportSemaphoreInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1488 
1489 	auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreInfo->semaphore);
1490 	ASSERT(sem != nullptr);
1491 	return sem->importFd(pImportSemaphoreInfo->fd, temporaryImport);
1492 }
1493 #endif  // SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
1494 
1495 #if VK_USE_PLATFORM_FUCHSIA
vkImportSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo)1496 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA(
1497     VkDevice device,
1498     const VkImportSemaphoreZirconHandleInfoFUCHSIA *pImportSemaphoreZirconHandleInfo)
1499 {
1500 	TRACE("(VkDevice device = %p, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo = %p)",
1501 	      device, pImportSemaphoreZirconHandleInfo);
1502 
1503 	if(pImportSemaphoreZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA)
1504 	{
1505 		UNSUPPORTED("pImportSemaphoreZirconHandleInfo->handleType %d", int(pImportSemaphoreZirconHandleInfo->handleType));
1506 	}
1507 	bool temporaryImport = (pImportSemaphoreZirconHandleInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1508 	auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreZirconHandleInfo->semaphore);
1509 	ASSERT(sem != nullptr);
1510 	return sem->importHandle(pImportSemaphoreZirconHandleInfo->zirconHandle, temporaryImport);
1511 }
1512 
vkGetSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle)1513 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA(
1514     VkDevice device,
1515     const VkSemaphoreGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo,
1516     zx_handle_t *pZirconHandle)
1517 {
1518 	TRACE("(VkDevice device = %p, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo = %p, zx_handle_t* pZirconHandle = %p)",
1519 	      device, static_cast<const void *>(pGetZirconHandleInfo), static_cast<void *>(pZirconHandle));
1520 
1521 	if(pGetZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA)
1522 	{
1523 		UNSUPPORTED("pGetZirconHandleInfo->handleType %d", int(pGetZirconHandleInfo->handleType));
1524 	}
1525 
1526 	auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetZirconHandleInfo->semaphore);
1527 	ASSERT(sem != nullptr);
1528 	return sem->exportHandle(pZirconHandle);
1529 }
1530 #endif  // VK_USE_PLATFORM_FUCHSIA
1531 
vkGetSemaphoreCounterValue(VkDevice device,VkSemaphore semaphore,uint64_t * pValue)1532 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue)
1533 {
1534 	TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, uint64_t* pValue = %p)",
1535 	      device, static_cast<void *>(semaphore), pValue);
1536 	*pValue = vk::DynamicCast<vk::TimelineSemaphore>(semaphore)->getCounterValue();
1537 	return VK_SUCCESS;
1538 }
1539 
vkSignalSemaphore(VkDevice device,const VkSemaphoreSignalInfo * pSignalInfo)1540 VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo)
1541 {
1542 	TRACE("(VkDevice device = %p, const VkSemaphoreSignalInfo *pSignalInfo = %p)",
1543 	      device, pSignalInfo);
1544 	vk::DynamicCast<vk::TimelineSemaphore>(pSignalInfo->semaphore)->signal(pSignalInfo->value);
1545 	return VK_SUCCESS;
1546 }
1547 
vkWaitSemaphores(VkDevice device,const VkSemaphoreWaitInfo * pWaitInfo,uint64_t timeout)1548 VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout)
1549 {
1550 	TRACE("(VkDevice device = %p, const VkSemaphoreWaitInfo *pWaitInfo = %p, uint64_t timeout = %" PRIu64 ")",
1551 	      device, pWaitInfo, timeout);
1552 	return vk::Cast(device)->waitForSemaphores(pWaitInfo, timeout);
1553 }
1554 
vkCreateEvent(VkDevice device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)1555 VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkEvent *pEvent)
1556 {
1557 	TRACE("(VkDevice device = %p, const VkEventCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkEvent* pEvent = %p)",
1558 	      device, pCreateInfo, pAllocator, pEvent);
1559 
1560 	if(pCreateInfo->flags != 0)
1561 	{
1562 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1563 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1564 	}
1565 
1566 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1567 	while(extInfo)
1568 	{
1569 		// Vulkan 1.2: "pNext must be NULL"
1570 		UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1571 		extInfo = extInfo->pNext;
1572 	}
1573 
1574 	return vk::Event::Create(pAllocator, pCreateInfo, pEvent);
1575 }
1576 
vkDestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * pAllocator)1577 VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator)
1578 {
1579 	TRACE("(VkDevice device = %p, VkEvent event = %p, const VkAllocationCallbacks* pAllocator = %p)",
1580 	      device, static_cast<void *>(event), pAllocator);
1581 
1582 	vk::destroy(event, pAllocator);
1583 }
1584 
vkGetEventStatus(VkDevice device,VkEvent event)1585 VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(VkDevice device, VkEvent event)
1586 {
1587 	TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1588 
1589 	return vk::Cast(event)->getStatus();
1590 }
1591 
vkSetEvent(VkDevice device,VkEvent event)1592 VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event)
1593 {
1594 	TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1595 
1596 	vk::Cast(event)->signal();
1597 
1598 	return VK_SUCCESS;
1599 }
1600 
vkResetEvent(VkDevice device,VkEvent event)1601 VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(VkDevice device, VkEvent event)
1602 {
1603 	TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1604 
1605 	vk::Cast(event)->reset();
1606 
1607 	return VK_SUCCESS;
1608 }
1609 
vkCreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkQueryPool * pQueryPool)1610 VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool)
1611 {
1612 	TRACE("(VkDevice device = %p, const VkQueryPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkQueryPool* pQueryPool = %p)",
1613 	      device, pCreateInfo, pAllocator, pQueryPool);
1614 
1615 	if(pCreateInfo->flags != 0)
1616 	{
1617 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1618 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1619 	}
1620 
1621 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1622 	while(extInfo)
1623 	{
1624 		UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1625 		extInfo = extInfo->pNext;
1626 	}
1627 
1628 	return vk::QueryPool::Create(pAllocator, pCreateInfo, pQueryPool);
1629 }
1630 
vkDestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * pAllocator)1631 VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator)
1632 {
1633 	TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
1634 	      device, static_cast<void *>(queryPool), pAllocator);
1635 
1636 	vk::destroy(queryPool, pAllocator);
1637 }
1638 
vkGetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)1639 VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags)
1640 {
1641 	TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, size_t dataSize = %d, void* pData = %p, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
1642 	      device, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), int(dataSize), pData, int(stride), flags);
1643 
1644 	return vk::Cast(queryPool)->getResults(firstQuery, queryCount, dataSize, pData, stride, flags);
1645 }
1646 
vkCreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1647 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer)
1648 {
1649 	TRACE("(VkDevice device = %p, const VkBufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBuffer* pBuffer = %p)",
1650 	      device, pCreateInfo, pAllocator, pBuffer);
1651 
1652 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1653 	while(nextInfo)
1654 	{
1655 		switch(nextInfo->sType)
1656 		{
1657 		case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1658 			// Do nothing. Should be handled by vk::Buffer::Create().
1659 			break;
1660 		case VK_STRUCTURE_TYPE_MAX_ENUM:
1661 			// dEQP tests that this value is ignored.
1662 			break;
1663 		default:
1664 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1665 			break;
1666 		}
1667 		nextInfo = nextInfo->pNext;
1668 	}
1669 
1670 	return vk::Buffer::Create(pAllocator, pCreateInfo, pBuffer);
1671 }
1672 
vkDestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)1673 VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator)
1674 {
1675 	TRACE("(VkDevice device = %p, VkBuffer buffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
1676 	      device, static_cast<void *>(buffer), pAllocator);
1677 
1678 	vk::destroy(buffer, pAllocator);
1679 }
1680 
vkGetBufferDeviceAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1681 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1682 {
1683 	TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1684 	      device, pInfo);
1685 	UNSUPPORTED("VK_KHR_buffer_device_address");
1686 	return 0;
1687 }
1688 
vkGetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1689 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1690 {
1691 	TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1692 	      device, pInfo);
1693 	UNSUPPORTED("VK_KHR_buffer_device_address");
1694 	return 0;
1695 }
1696 
vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)1697 VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo)
1698 {
1699 	TRACE("(VkDevice device = %p, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo = %p)",
1700 	      device, pInfo);
1701 	UNSUPPORTED("VK_KHR_buffer_device_address");
1702 	return 0;
1703 }
1704 
vkCreateBufferView(VkDevice device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView)1705 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBufferView *pView)
1706 {
1707 	TRACE("(VkDevice device = %p, const VkBufferViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBufferView* pView = %p)",
1708 	      device, pCreateInfo, pAllocator, pView);
1709 
1710 	if(pCreateInfo->flags != 0)
1711 	{
1712 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1713 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1714 	}
1715 
1716 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1717 	while(extInfo)
1718 	{
1719 		UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1720 		extInfo = extInfo->pNext;
1721 	}
1722 
1723 	return vk::BufferView::Create(pAllocator, pCreateInfo, pView);
1724 }
1725 
vkDestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)1726 VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator)
1727 {
1728 	TRACE("(VkDevice device = %p, VkBufferView bufferView = %p, const VkAllocationCallbacks* pAllocator = %p)",
1729 	      device, static_cast<void *>(bufferView), pAllocator);
1730 
1731 	vk::destroy(bufferView, pAllocator);
1732 }
1733 
vkCreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)1734 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImage *pImage)
1735 {
1736 	TRACE("(VkDevice device = %p, const VkImageCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImage* pImage = %p)",
1737 	      device, pCreateInfo, pAllocator, pImage);
1738 
1739 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1740 
1741 #ifdef __ANDROID__
1742 	vk::BackingMemory backmem;
1743 	bool swapchainImage = false;
1744 #endif
1745 
1746 	while(extensionCreateInfo)
1747 	{
1748 		switch((long)(extensionCreateInfo->sType))
1749 		{
1750 #ifdef __ANDROID__
1751 		case VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID:
1752 			{
1753 				const VkSwapchainImageCreateInfoANDROID *swapImageCreateInfo = reinterpret_cast<const VkSwapchainImageCreateInfoANDROID *>(extensionCreateInfo);
1754 				backmem.androidUsage = swapImageCreateInfo->usage;
1755 			}
1756 			break;
1757 		case VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID:
1758 			{
1759 				const VkNativeBufferANDROID *nativeBufferInfo = reinterpret_cast<const VkNativeBufferANDROID *>(extensionCreateInfo);
1760 				backmem.nativeHandle = nativeBufferInfo->handle;
1761 				backmem.stride = nativeBufferInfo->stride;
1762 				swapchainImage = true;
1763 			}
1764 			break;
1765 		case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
1766 			break;
1767 #endif
1768 		case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
1769 			// Do nothing. Should be handled by vk::Image::Create()
1770 			break;
1771 		case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
1772 			/* Do nothing. We don't actually need the swapchain handle yet; we'll do all the work in vkBindImageMemory2. */
1773 			break;
1774 		case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
1775 			// Do nothing. This extension tells the driver which image formats will be used
1776 			// by the application. Swiftshader is not impacted from lacking this information,
1777 			// so we don't need to track the format list.
1778 			break;
1779 		case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
1780 			{
1781 				// SwiftShader does not use an image's usage info for non-debug purposes outside of
1782 				// vkGetPhysicalDeviceImageFormatProperties2. This also applies to separate stencil usage.
1783 				const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionCreateInfo);
1784 				(void)stencilUsageInfo->stencilUsage;
1785 			}
1786 			break;
1787 		case VK_STRUCTURE_TYPE_MAX_ENUM:
1788 			// dEQP tests that this value is ignored.
1789 			break;
1790 		default:
1791 			// "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
1792 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1793 			break;
1794 		}
1795 
1796 		extensionCreateInfo = extensionCreateInfo->pNext;
1797 	}
1798 
1799 	VkResult result = vk::Image::Create(pAllocator, pCreateInfo, pImage, vk::Cast(device));
1800 
1801 #ifdef __ANDROID__
1802 	if(swapchainImage)
1803 	{
1804 		if(result != VK_SUCCESS)
1805 		{
1806 			return result;
1807 		}
1808 
1809 		vk::Image *image = vk::Cast(*pImage);
1810 		VkMemoryRequirements memRequirements = image->getMemoryRequirements();
1811 
1812 		VkMemoryAllocateInfo allocInfo = {};
1813 		allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1814 		allocInfo.allocationSize = memRequirements.size;
1815 		allocInfo.memoryTypeIndex = 0;
1816 
1817 		VkDeviceMemory devmem = { VK_NULL_HANDLE };
1818 		result = vkAllocateMemory(device, &allocInfo, pAllocator, &devmem);
1819 		if(result != VK_SUCCESS)
1820 		{
1821 			return result;
1822 		}
1823 
1824 		vkBindImageMemory(device, *pImage, devmem, 0);
1825 		backmem.externalMemory = true;
1826 
1827 		image->setBackingMemory(backmem);
1828 	}
1829 #endif
1830 
1831 	return result;
1832 }
1833 
vkDestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)1834 VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator)
1835 {
1836 	TRACE("(VkDevice device = %p, VkImage image = %p, const VkAllocationCallbacks* pAllocator = %p)",
1837 	      device, static_cast<void *>(image), pAllocator);
1838 
1839 #ifdef __ANDROID__
1840 	vk::Image *img = vk::Cast(image);
1841 	if(img && img->hasExternalMemory())
1842 	{
1843 		vk::destroy(img->getExternalMemory(), pAllocator);
1844 	}
1845 #endif
1846 
1847 	vk::destroy(image, pAllocator);
1848 }
1849 
vkGetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)1850 VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, VkSubresourceLayout *pLayout)
1851 {
1852 	TRACE("(VkDevice device = %p, VkImage image = %p, const VkImageSubresource* pSubresource = %p, VkSubresourceLayout* pLayout = %p)",
1853 	      device, static_cast<void *>(image), pSubresource, pLayout);
1854 
1855 	vk::Cast(image)->getSubresourceLayout(pSubresource, pLayout);
1856 }
1857 
vkCreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)1858 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImageView *pView)
1859 {
1860 	TRACE("(VkDevice device = %p, const VkImageViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImageView* pView = %p)",
1861 	      device, pCreateInfo, pAllocator, pView);
1862 
1863 	if(pCreateInfo->flags != 0)
1864 	{
1865 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1866 	}
1867 
1868 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1869 	const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
1870 
1871 	while(extensionCreateInfo)
1872 	{
1873 		switch(extensionCreateInfo->sType)
1874 		{
1875 		case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
1876 			{
1877 				const VkImageViewUsageCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkImageViewUsageCreateInfo *>(extensionCreateInfo);
1878 				ASSERT(!(~vk::Cast(pCreateInfo->image)->getUsage() & multiviewCreateInfo->usage));
1879 			}
1880 			break;
1881 		case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
1882 			{
1883 				const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo = reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
1884 				ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
1885 
1886 				if(ycbcrConversion)
1887 				{
1888 					ASSERT((pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_R) &&
1889 					       (pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_G) &&
1890 					       (pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_B) &&
1891 					       (pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_A));
1892 				}
1893 			}
1894 			break;
1895 		case VK_STRUCTURE_TYPE_MAX_ENUM:
1896 			// dEQP tests that this value is ignored.
1897 			break;
1898 		default:
1899 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1900 			break;
1901 		}
1902 
1903 		extensionCreateInfo = extensionCreateInfo->pNext;
1904 	}
1905 
1906 	VkResult result = vk::ImageView::Create(pAllocator, pCreateInfo, pView, ycbcrConversion);
1907 	if(result == VK_SUCCESS)
1908 	{
1909 		vk::Cast(device)->registerImageView(vk::Cast(*pView));
1910 	}
1911 
1912 	return result;
1913 }
1914 
vkDestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)1915 VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator)
1916 {
1917 	TRACE("(VkDevice device = %p, VkImageView imageView = %p, const VkAllocationCallbacks* pAllocator = %p)",
1918 	      device, static_cast<void *>(imageView), pAllocator);
1919 
1920 	vk::Cast(device)->unregisterImageView(vk::Cast(imageView));
1921 	vk::destroy(imageView, pAllocator);
1922 }
1923 
vkCreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)1924 VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule)
1925 {
1926 	TRACE("(VkDevice device = %p, const VkShaderModuleCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkShaderModule* pShaderModule = %p)",
1927 	      device, pCreateInfo, pAllocator, pShaderModule);
1928 
1929 	if(pCreateInfo->flags != 0)
1930 	{
1931 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1932 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1933 	}
1934 
1935 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1936 	while(nextInfo)
1937 	{
1938 		switch(nextInfo->sType)
1939 		{
1940 		case VK_STRUCTURE_TYPE_MAX_ENUM:
1941 			// dEQP tests that this value is ignored.
1942 			break;
1943 		default:
1944 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1945 			break;
1946 		}
1947 		nextInfo = nextInfo->pNext;
1948 	}
1949 
1950 	return vk::ShaderModule::Create(pAllocator, pCreateInfo, pShaderModule);
1951 }
1952 
vkDestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)1953 VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator)
1954 {
1955 	TRACE("(VkDevice device = %p, VkShaderModule shaderModule = %p, const VkAllocationCallbacks* pAllocator = %p)",
1956 	      device, static_cast<void *>(shaderModule), pAllocator);
1957 
1958 	vk::destroy(shaderModule, pAllocator);
1959 }
1960 
vkCreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)1961 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache)
1962 {
1963 	TRACE("(VkDevice device = %p, const VkPipelineCacheCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineCache* pPipelineCache = %p)",
1964 	      device, pCreateInfo, pAllocator, pPipelineCache);
1965 
1966 	if(pCreateInfo->flags != 0)
1967 	{
1968 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1969 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1970 	}
1971 
1972 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1973 	while(extInfo)
1974 	{
1975 		UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1976 		extInfo = extInfo->pNext;
1977 	}
1978 
1979 	return vk::PipelineCache::Create(pAllocator, pCreateInfo, pPipelineCache);
1980 }
1981 
vkDestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)1982 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator)
1983 {
1984 	TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, const VkAllocationCallbacks* pAllocator = %p)",
1985 	      device, static_cast<void *>(pipelineCache), pAllocator);
1986 
1987 	vk::destroy(pipelineCache, pAllocator);
1988 }
1989 
vkGetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)1990 VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData)
1991 {
1992 	TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, size_t* pDataSize = %p, void* pData = %p)",
1993 	      device, static_cast<void *>(pipelineCache), pDataSize, pData);
1994 
1995 	return vk::Cast(pipelineCache)->getData(pDataSize, pData);
1996 }
1997 
vkMergePipelineCaches(VkDevice device,VkPipelineCache dstCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)1998 VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches)
1999 {
2000 	TRACE("(VkDevice device = %p, VkPipelineCache dstCache = %p, uint32_t srcCacheCount = %d, const VkPipelineCache* pSrcCaches = %p)",
2001 	      device, static_cast<void *>(dstCache), int(srcCacheCount), pSrcCaches);
2002 
2003 	return vk::Cast(dstCache)->merge(srcCacheCount, pSrcCaches);
2004 }
2005 
vkCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2006 VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2007 {
2008 	TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkGraphicsPipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2009 	      device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2010 
2011 	memset(pPipelines, 0, sizeof(void *) * createInfoCount);
2012 
2013 	VkResult errorResult = VK_SUCCESS;
2014 	for(uint32_t i = 0; i < createInfoCount; i++)
2015 	{
2016 		VkResult result = vk::GraphicsPipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2017 
2018 		if(result == VK_SUCCESS)
2019 		{
2020 			result = static_cast<vk::GraphicsPipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2021 			if(result != VK_SUCCESS)
2022 			{
2023 				vk::destroy(pPipelines[i], pAllocator);
2024 			}
2025 		}
2026 
2027 		if(result != VK_SUCCESS)
2028 		{
2029 			// According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2030 			// "When an application attempts to create many pipelines in a single command,
2031 			//  it is possible that some subset may fail creation. In that case, the
2032 			//  corresponding entries in the pPipelines output array will be filled with
2033 			//  VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2034 			//  out of memory errors), the vkCreate*Pipelines commands will return an
2035 			//  error code. The implementation will attempt to create all pipelines, and
2036 			//  only return VK_NULL_HANDLE values for those that actually failed."
2037 			pPipelines[i] = VK_NULL_HANDLE;
2038 			errorResult = result;
2039 
2040 			// VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT specifies that control
2041 			// will be returned to the application on failure of the corresponding pipeline
2042 			// rather than continuing to create additional pipelines.
2043 			if(pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT)
2044 			{
2045 				return errorResult;
2046 			}
2047 		}
2048 	}
2049 
2050 	return errorResult;
2051 }
2052 
vkCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2053 VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2054 {
2055 	TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkComputePipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2056 	      device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2057 
2058 	memset(pPipelines, 0, sizeof(void *) * createInfoCount);
2059 
2060 	VkResult errorResult = VK_SUCCESS;
2061 	for(uint32_t i = 0; i < createInfoCount; i++)
2062 	{
2063 		VkResult result = vk::ComputePipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2064 
2065 		if(result == VK_SUCCESS)
2066 		{
2067 			result = static_cast<vk::ComputePipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2068 			if(result != VK_SUCCESS)
2069 			{
2070 				vk::destroy(pPipelines[i], pAllocator);
2071 			}
2072 		}
2073 
2074 		if(result != VK_SUCCESS)
2075 		{
2076 			// According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2077 			// "When an application attempts to create many pipelines in a single command,
2078 			//  it is possible that some subset may fail creation. In that case, the
2079 			//  corresponding entries in the pPipelines output array will be filled with
2080 			//  VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2081 			//  out of memory errors), the vkCreate*Pipelines commands will return an
2082 			//  error code. The implementation will attempt to create all pipelines, and
2083 			//  only return VK_NULL_HANDLE values for those that actually failed."
2084 			pPipelines[i] = VK_NULL_HANDLE;
2085 			errorResult = result;
2086 
2087 			// VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT specifies that control
2088 			// will be returned to the application on failure of the corresponding pipeline
2089 			// rather than continuing to create additional pipelines.
2090 			if(pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT)
2091 			{
2092 				return errorResult;
2093 			}
2094 		}
2095 	}
2096 
2097 	return errorResult;
2098 }
2099 
vkDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)2100 VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator)
2101 {
2102 	TRACE("(VkDevice device = %p, VkPipeline pipeline = %p, const VkAllocationCallbacks* pAllocator = %p)",
2103 	      device, static_cast<void *>(pipeline), pAllocator);
2104 
2105 	vk::destroy(pipeline, pAllocator);
2106 }
2107 
vkCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)2108 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout)
2109 {
2110 	TRACE("(VkDevice device = %p, const VkPipelineLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineLayout* pPipelineLayout = %p)",
2111 	      device, pCreateInfo, pAllocator, pPipelineLayout);
2112 
2113 	if(pCreateInfo->flags != 0)
2114 	{
2115 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2116 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2117 	}
2118 
2119 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2120 	while(nextInfo)
2121 	{
2122 		switch(nextInfo->sType)
2123 		{
2124 		case VK_STRUCTURE_TYPE_MAX_ENUM:
2125 			// dEQP tests that this value is ignored.
2126 			break;
2127 		default:
2128 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2129 			break;
2130 		}
2131 		nextInfo = nextInfo->pNext;
2132 	}
2133 
2134 	return vk::PipelineLayout::Create(pAllocator, pCreateInfo, pPipelineLayout);
2135 }
2136 
vkDestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * pAllocator)2137 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator)
2138 {
2139 	TRACE("(VkDevice device = %p, VkPipelineLayout pipelineLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2140 	      device, static_cast<void *>(pipelineLayout), pAllocator);
2141 
2142 	vk::release(pipelineLayout, pAllocator);
2143 }
2144 
vkCreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2145 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSampler *pSampler)
2146 {
2147 	TRACE("(VkDevice device = %p, const VkSamplerCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSampler* pSampler = %p)",
2148 	      device, pCreateInfo, pAllocator, pSampler);
2149 
2150 	if(pCreateInfo->flags != 0)
2151 	{
2152 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2153 	}
2154 
2155 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2156 	const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
2157 	VkSamplerFilteringPrecisionModeGOOGLE filteringPrecision = VK_SAMPLER_FILTERING_PRECISION_MODE_LOW_GOOGLE;
2158 	VkClearColorValue borderColor = {};
2159 
2160 	while(extensionCreateInfo)
2161 	{
2162 		switch(static_cast<long>(extensionCreateInfo->sType))
2163 		{
2164 		case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
2165 			{
2166 				const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo =
2167 				    reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
2168 				ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
2169 			}
2170 			break;
2171 #if !defined(__ANDROID__)
2172 		case VK_STRUCTURE_TYPE_SAMPLER_FILTERING_PRECISION_GOOGLE:
2173 			{
2174 				const VkSamplerFilteringPrecisionGOOGLE *filteringInfo =
2175 				    reinterpret_cast<const VkSamplerFilteringPrecisionGOOGLE *>(extensionCreateInfo);
2176 				filteringPrecision = filteringInfo->samplerFilteringPrecisionMode;
2177 			}
2178 			break;
2179 #endif
2180 		case VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT:
2181 			{
2182 				const VkSamplerCustomBorderColorCreateInfoEXT *borderColorInfo =
2183 				    reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT *>(extensionCreateInfo);
2184 
2185 				borderColor = borderColorInfo->customBorderColor;
2186 			}
2187 			break;
2188 		default:
2189 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2190 			break;
2191 		}
2192 
2193 		extensionCreateInfo = extensionCreateInfo->pNext;
2194 	}
2195 
2196 	vk::SamplerState samplerState(pCreateInfo, ycbcrConversion, filteringPrecision, borderColor);
2197 	uint32_t samplerID = vk::Cast(device)->indexSampler(samplerState);
2198 
2199 	VkResult result = vk::Sampler::Create(pAllocator, pCreateInfo, pSampler, samplerState, samplerID);
2200 
2201 	if(*pSampler == VK_NULL_HANDLE)
2202 	{
2203 		ASSERT(result != VK_SUCCESS);
2204 		vk::Cast(device)->removeSampler(samplerState);
2205 	}
2206 
2207 	return result;
2208 }
2209 
vkDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)2210 VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator)
2211 {
2212 	TRACE("(VkDevice device = %p, VkSampler sampler = %p, const VkAllocationCallbacks* pAllocator = %p)",
2213 	      device, static_cast<void *>(sampler), pAllocator);
2214 
2215 	if(sampler != VK_NULL_HANDLE)
2216 	{
2217 		vk::Cast(device)->removeSampler(*vk::Cast(sampler));
2218 
2219 		vk::destroy(sampler, pAllocator);
2220 	}
2221 }
2222 
vkCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)2223 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout)
2224 {
2225 	TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorSetLayout* pSetLayout = %p)",
2226 	      device, pCreateInfo, pAllocator, pSetLayout);
2227 
2228 	const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2229 
2230 	while(extensionCreateInfo)
2231 	{
2232 		switch(extensionCreateInfo->sType)
2233 		{
2234 		case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
2235 			ASSERT(!vk::Cast(device)->hasExtension(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME));
2236 			break;
2237 		default:
2238 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2239 			break;
2240 		}
2241 
2242 		extensionCreateInfo = extensionCreateInfo->pNext;
2243 	}
2244 
2245 	return vk::DescriptorSetLayout::Create(pAllocator, pCreateInfo, pSetLayout);
2246 }
2247 
vkDestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)2248 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks *pAllocator)
2249 {
2250 	TRACE("(VkDevice device = %p, VkDescriptorSetLayout descriptorSetLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2251 	      device, static_cast<void *>(descriptorSetLayout), pAllocator);
2252 
2253 	vk::destroy(descriptorSetLayout, pAllocator);
2254 }
2255 
vkCreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)2256 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool)
2257 {
2258 	TRACE("(VkDevice device = %p, const VkDescriptorPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorPool* pDescriptorPool = %p)",
2259 	      device, pCreateInfo, pAllocator, pDescriptorPool);
2260 
2261 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
2262 	while(extInfo)
2263 	{
2264 		UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2265 		extInfo = extInfo->pNext;
2266 	}
2267 
2268 	return vk::DescriptorPool::Create(pAllocator, pCreateInfo, pDescriptorPool);
2269 }
2270 
vkDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)2271 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator)
2272 {
2273 	TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2274 	      device, static_cast<void *>(descriptorPool), pAllocator);
2275 
2276 	vk::destroy(descriptorPool, pAllocator);
2277 }
2278 
vkResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)2279 VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags)
2280 {
2281 	TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, VkDescriptorPoolResetFlags flags = 0x%x)",
2282 	      device, static_cast<void *>(descriptorPool), int(flags));
2283 
2284 	if(flags != 0)
2285 	{
2286 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2287 		UNSUPPORTED("flags %d", int(flags));
2288 	}
2289 
2290 	return vk::Cast(descriptorPool)->reset();
2291 }
2292 
vkAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)2293 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets)
2294 {
2295 	TRACE("(VkDevice device = %p, const VkDescriptorSetAllocateInfo* pAllocateInfo = %p, VkDescriptorSet* pDescriptorSets = %p)",
2296 	      device, pAllocateInfo, pDescriptorSets);
2297 
2298 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pAllocateInfo->pNext);
2299 	while(extInfo)
2300 	{
2301 		UNSUPPORTED("pAllocateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2302 		extInfo = extInfo->pNext;
2303 	}
2304 
2305 	return vk::Cast(pAllocateInfo->descriptorPool)->allocateSets(pAllocateInfo->descriptorSetCount, pAllocateInfo->pSetLayouts, pDescriptorSets);
2306 }
2307 
vkFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)2308 VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets)
2309 {
2310 	TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p)",
2311 	      device, static_cast<void *>(descriptorPool), descriptorSetCount, pDescriptorSets);
2312 
2313 	vk::Cast(descriptorPool)->freeSets(descriptorSetCount, pDescriptorSets);
2314 
2315 	return VK_SUCCESS;
2316 }
2317 
vkUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2318 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies)
2319 {
2320 	TRACE("(VkDevice device = %p, uint32_t descriptorWriteCount = %d, const VkWriteDescriptorSet* pDescriptorWrites = %p, uint32_t descriptorCopyCount = %d, const VkCopyDescriptorSet* pDescriptorCopies = %p)",
2321 	      device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2322 
2323 	vk::Cast(device)->updateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2324 }
2325 
vkCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)2326 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer)
2327 {
2328 	TRACE("(VkDevice device = %p, const VkFramebufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFramebuffer* pFramebuffer = %p)",
2329 	      device, pCreateInfo, pAllocator, pFramebuffer);
2330 
2331 	return vk::Framebuffer::Create(pAllocator, pCreateInfo, pFramebuffer);
2332 }
2333 
vkDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)2334 VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator)
2335 {
2336 	TRACE("(VkDevice device = %p, VkFramebuffer framebuffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
2337 	      device, static_cast<void *>(framebuffer), pAllocator);
2338 
2339 	vk::destroy(framebuffer, pAllocator);
2340 }
2341 
vkCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2342 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2343 {
2344 	TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2345 	      device, pCreateInfo, pAllocator, pRenderPass);
2346 
2347 	if(pCreateInfo->flags != 0)
2348 	{
2349 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2350 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2351 	}
2352 
2353 	ValidateRenderPassPNextChain(device, pCreateInfo);
2354 
2355 	return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2356 }
2357 
vkCreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2358 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2359 {
2360 	TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2361 	      device, pCreateInfo, pAllocator, pRenderPass);
2362 
2363 	if(pCreateInfo->flags != 0)
2364 	{
2365 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2366 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2367 	}
2368 
2369 	ValidateRenderPassPNextChain(device, pCreateInfo);
2370 
2371 	return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2372 }
2373 
vkDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)2374 VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator)
2375 {
2376 	TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, const VkAllocationCallbacks* pAllocator = %p)",
2377 	      device, static_cast<void *>(renderPass), pAllocator);
2378 
2379 	vk::destroy(renderPass, pAllocator);
2380 }
2381 
vkGetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)2382 VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity)
2383 {
2384 	TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, VkExtent2D* pGranularity = %p)",
2385 	      device, static_cast<void *>(renderPass), pGranularity);
2386 
2387 	vk::Cast(renderPass)->getRenderAreaGranularity(pGranularity);
2388 }
2389 
vkCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)2390 VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool)
2391 {
2392 	TRACE("(VkDevice device = %p, const VkCommandPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkCommandPool* pCommandPool = %p)",
2393 	      device, pCreateInfo, pAllocator, pCommandPool);
2394 
2395 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2396 	while(nextInfo)
2397 	{
2398 		switch(nextInfo->sType)
2399 		{
2400 		case VK_STRUCTURE_TYPE_MAX_ENUM:
2401 			// dEQP tests that this value is ignored.
2402 			break;
2403 		default:
2404 			UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2405 			break;
2406 		}
2407 		nextInfo = nextInfo->pNext;
2408 	}
2409 
2410 	return vk::CommandPool::Create(pAllocator, pCreateInfo, pCommandPool);
2411 }
2412 
vkDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)2413 VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator)
2414 {
2415 	TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2416 	      device, static_cast<void *>(commandPool), pAllocator);
2417 
2418 	vk::destroy(commandPool, pAllocator);
2419 }
2420 
vkResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)2421 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags)
2422 {
2423 	TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolResetFlags flags = %d)",
2424 	      device, static_cast<void *>(commandPool), int(flags));
2425 
2426 	return vk::Cast(commandPool)->reset(flags);
2427 }
2428 
vkAllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)2429 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, VkCommandBuffer *pCommandBuffers)
2430 {
2431 	TRACE("(VkDevice device = %p, const VkCommandBufferAllocateInfo* pAllocateInfo = %p, VkCommandBuffer* pCommandBuffers = %p)",
2432 	      device, pAllocateInfo, pCommandBuffers);
2433 
2434 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
2435 	while(nextInfo)
2436 	{
2437 		switch(nextInfo->sType)
2438 		{
2439 		case VK_STRUCTURE_TYPE_MAX_ENUM:
2440 			// dEQP tests that this value is ignored.
2441 			break;
2442 		default:
2443 			UNSUPPORTED("pAllocateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2444 			break;
2445 		}
2446 		nextInfo = nextInfo->pNext;
2447 	}
2448 
2449 	return vk::Cast(pAllocateInfo->commandPool)->allocateCommandBuffers(vk::Cast(device), pAllocateInfo->level, pAllocateInfo->commandBufferCount, pCommandBuffers);
2450 }
2451 
vkFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)2452 VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
2453 {
2454 	TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
2455 	      device, static_cast<void *>(commandPool), int(commandBufferCount), pCommandBuffers);
2456 
2457 	vk::Cast(commandPool)->freeCommandBuffers(commandBufferCount, pCommandBuffers);
2458 }
2459 
vkBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)2460 VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo)
2461 {
2462 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkCommandBufferBeginInfo* pBeginInfo = %p)",
2463 	      commandBuffer, pBeginInfo);
2464 
2465 	auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pBeginInfo->pNext);
2466 	while(nextInfo)
2467 	{
2468 		switch(nextInfo->sType)
2469 		{
2470 		case VK_STRUCTURE_TYPE_MAX_ENUM:
2471 			// dEQP tests that this value is ignored.
2472 			break;
2473 		default:
2474 			UNSUPPORTED("pBeginInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2475 			break;
2476 		}
2477 		nextInfo = nextInfo->pNext;
2478 	}
2479 
2480 	return vk::Cast(commandBuffer)->begin(pBeginInfo->flags, pBeginInfo->pInheritanceInfo);
2481 }
2482 
vkEndCommandBuffer(VkCommandBuffer commandBuffer)2483 VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer)
2484 {
2485 	TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
2486 
2487 	return vk::Cast(commandBuffer)->end();
2488 }
2489 
vkResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)2490 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags)
2491 {
2492 	TRACE("(VkCommandBuffer commandBuffer = %p, VkCommandBufferResetFlags flags = %d)", commandBuffer, int(flags));
2493 
2494 	return vk::Cast(commandBuffer)->reset(flags);
2495 }
2496 
vkCmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)2497 VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline)
2498 {
2499 	TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipeline pipeline = %p)",
2500 	      commandBuffer, int(pipelineBindPoint), static_cast<void *>(pipeline));
2501 
2502 	vk::Cast(commandBuffer)->bindPipeline(pipelineBindPoint, vk::Cast(pipeline));
2503 }
2504 
vkCmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)2505 VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports)
2506 {
2507 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstViewport = %d, uint32_t viewportCount = %d, const VkViewport* pViewports = %p)",
2508 	      commandBuffer, int(firstViewport), int(viewportCount), pViewports);
2509 
2510 	vk::Cast(commandBuffer)->setViewport(firstViewport, viewportCount, pViewports);
2511 }
2512 
vkCmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)2513 VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors)
2514 {
2515 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstScissor = %d, uint32_t scissorCount = %d, const VkRect2D* pScissors = %p)",
2516 	      commandBuffer, int(firstScissor), int(scissorCount), pScissors);
2517 
2518 	vk::Cast(commandBuffer)->setScissor(firstScissor, scissorCount, pScissors);
2519 }
2520 
vkCmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)2521 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
2522 {
2523 	TRACE("(VkCommandBuffer commandBuffer = %p, float lineWidth = %f)", commandBuffer, lineWidth);
2524 
2525 	vk::Cast(commandBuffer)->setLineWidth(lineWidth);
2526 }
2527 
vkCmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)2528 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor)
2529 {
2530 	TRACE("(VkCommandBuffer commandBuffer = %p, float depthBiasConstantFactor = %f, float depthBiasClamp = %f, float depthBiasSlopeFactor = %f)",
2531 	      commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2532 
2533 	vk::Cast(commandBuffer)->setDepthBias(depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2534 }
2535 
vkCmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])2536 VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
2537 {
2538 	TRACE("(VkCommandBuffer commandBuffer = %p, const float blendConstants[4] = {%f, %f, %f, %f})",
2539 	      commandBuffer, blendConstants[0], blendConstants[1], blendConstants[2], blendConstants[3]);
2540 
2541 	vk::Cast(commandBuffer)->setBlendConstants(blendConstants);
2542 }
2543 
vkCmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)2544 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
2545 {
2546 	TRACE("(VkCommandBuffer commandBuffer = %p, float minDepthBounds = %f, float maxDepthBounds = %f)",
2547 	      commandBuffer, minDepthBounds, maxDepthBounds);
2548 
2549 	vk::Cast(commandBuffer)->setDepthBounds(minDepthBounds, maxDepthBounds);
2550 }
2551 
vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)2552 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask)
2553 {
2554 	TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t compareMask = %d)",
2555 	      commandBuffer, int(faceMask), int(compareMask));
2556 
2557 	vk::Cast(commandBuffer)->setStencilCompareMask(faceMask, compareMask);
2558 }
2559 
vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)2560 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask)
2561 {
2562 	TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t writeMask = %d)",
2563 	      commandBuffer, int(faceMask), int(writeMask));
2564 
2565 	vk::Cast(commandBuffer)->setStencilWriteMask(faceMask, writeMask);
2566 }
2567 
vkCmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)2568 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference)
2569 {
2570 	TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t reference = %d)",
2571 	      commandBuffer, int(faceMask), int(reference));
2572 
2573 	vk::Cast(commandBuffer)->setStencilReference(faceMask, reference);
2574 }
2575 
vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)2576 VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets)
2577 {
2578 	TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipelineLayout layout = %p, uint32_t firstSet = %d, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p, uint32_t dynamicOffsetCount = %d, const uint32_t* pDynamicOffsets = %p)",
2579 	      commandBuffer, int(pipelineBindPoint), static_cast<void *>(layout), int(firstSet), int(descriptorSetCount), pDescriptorSets, int(dynamicOffsetCount), pDynamicOffsets);
2580 
2581 	vk::Cast(commandBuffer)->bindDescriptorSets(pipelineBindPoint, vk::Cast(layout), firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
2582 }
2583 
vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)2584 VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType)
2585 {
2586 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkIndexType indexType = %d)",
2587 	      commandBuffer, static_cast<void *>(buffer), int(offset), int(indexType));
2588 
2589 	vk::Cast(commandBuffer)->bindIndexBuffer(vk::Cast(buffer), offset, indexType);
2590 }
2591 
vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)2592 VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets)
2593 {
2594 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstBinding = %d, uint32_t bindingCount = %d, const VkBuffer* pBuffers = %p, const VkDeviceSize* pOffsets = %p)",
2595 	      commandBuffer, int(firstBinding), int(bindingCount), pBuffers, pOffsets);
2596 
2597 	vk::Cast(commandBuffer)->bindVertexBuffers(firstBinding, bindingCount, pBuffers, pOffsets);
2598 }
2599 
vkCmdDraw(VkCommandBuffer commandBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)2600 VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance)
2601 {
2602 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t vertexCount = %d, uint32_t instanceCount = %d, uint32_t firstVertex = %d, uint32_t firstInstance = %d)",
2603 	      commandBuffer, int(vertexCount), int(instanceCount), int(firstVertex), int(firstInstance));
2604 
2605 	vk::Cast(commandBuffer)->draw(vertexCount, instanceCount, firstVertex, firstInstance);
2606 }
2607 
vkCmdDrawIndexed(VkCommandBuffer commandBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)2608 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
2609 {
2610 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t indexCount = %d, uint32_t instanceCount = %d, uint32_t firstIndex = %d, int32_t vertexOffset = %d, uint32_t firstInstance = %d)",
2611 	      commandBuffer, int(indexCount), int(instanceCount), int(firstIndex), int(vertexOffset), int(firstInstance));
2612 
2613 	vk::Cast(commandBuffer)->drawIndexed(indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2614 }
2615 
vkCmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)2616 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
2617 {
2618 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
2619 	      commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
2620 
2621 	vk::Cast(commandBuffer)->drawIndirect(vk::Cast(buffer), offset, drawCount, stride);
2622 }
2623 
vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)2624 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
2625 {
2626 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
2627 	      commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
2628 
2629 	vk::Cast(commandBuffer)->drawIndexedIndirect(vk::Cast(buffer), offset, drawCount, stride);
2630 }
2631 
vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)2632 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
2633 {
2634 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
2635 	      commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
2636 	UNSUPPORTED("VK_KHR_draw_indirect_count");
2637 }
2638 
vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)2639 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
2640 {
2641 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
2642 	      commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
2643 	UNSUPPORTED("VK_KHR_draw_indirect_count");
2644 }
2645 
vkCmdDispatch(VkCommandBuffer commandBuffer,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)2646 VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
2647 {
2648 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t groupCountX = %d, uint32_t groupCountY = %d, uint32_t groupCountZ = %d)",
2649 	      commandBuffer, int(groupCountX), int(groupCountY), int(groupCountZ));
2650 
2651 	vk::Cast(commandBuffer)->dispatch(groupCountX, groupCountY, groupCountZ);
2652 }
2653 
vkCmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset)2654 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
2655 {
2656 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d)",
2657 	      commandBuffer, static_cast<void *>(buffer), int(offset));
2658 
2659 	vk::Cast(commandBuffer)->dispatchIndirect(vk::Cast(buffer), offset);
2660 }
2661 
vkCmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)2662 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions)
2663 {
2664 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferCopy* pRegions = %p)",
2665 	      commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
2666 
2667 	vk::Cast(commandBuffer)->copyBuffer(vk::CopyBufferInfo(srcBuffer, dstBuffer, regionCount, pRegions));
2668 }
2669 
vkCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer,const VkCopyBufferInfo2KHR * pCopyBufferInfo)2670 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR *pCopyBufferInfo)
2671 {
2672 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyBufferInfo2KHR* pCopyBufferInfo = %p)",
2673 	      commandBuffer, pCopyBufferInfo);
2674 
2675 	vk::Cast(commandBuffer)->copyBuffer(*pCopyBufferInfo);
2676 }
2677 
vkCmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)2678 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions)
2679 {
2680 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageCopy* pRegions = %p)",
2681 	      commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
2682 
2683 	vk::Cast(commandBuffer)->copyImage(vk::CopyImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions));
2684 }
2685 
vkCmdCopyImage2KHR(VkCommandBuffer commandBuffer,const VkCopyImageInfo2KHR * pCopyImageInfo)2686 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR(VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR *pCopyImageInfo)
2687 {
2688 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyImageInfo2KHR* pCopyImageInfo = %p)",
2689 	      commandBuffer, pCopyImageInfo);
2690 
2691 	vk::Cast(commandBuffer)->copyImage(*pCopyImageInfo);
2692 }
2693 
vkCmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)2694 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter)
2695 {
2696 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageBlit* pRegions = %p, VkFilter filter = %d)",
2697 	      commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions, filter);
2698 
2699 	vk::Cast(commandBuffer)->blitImage(vk::BlitImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter));
2700 }
2701 
vkCmdBlitImage2KHR(VkCommandBuffer commandBuffer,const VkBlitImageInfo2KHR * pBlitImageInfo)2702 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR(VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR *pBlitImageInfo)
2703 {
2704 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkBlitImageInfo2KHR* pBlitImageInfo = %p)",
2705 	      commandBuffer, pBlitImageInfo);
2706 
2707 	vk::Cast(commandBuffer)->blitImage(*pBlitImageInfo);
2708 }
2709 
vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)2710 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions)
2711 {
2712 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
2713 	      commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
2714 
2715 	vk::Cast(commandBuffer)->copyBufferToImage(vk::CopyBufferToImageInfo(srcBuffer, dstImage, dstImageLayout, regionCount, pRegions));
2716 }
2717 
vkCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo)2718 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR *pCopyBufferToImageInfo)
2719 {
2720 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo = %p)",
2721 	      commandBuffer, pCopyBufferToImageInfo);
2722 
2723 	vk::Cast(commandBuffer)->copyBufferToImage(*pCopyBufferToImageInfo);
2724 }
2725 
vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)2726 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions)
2727 {
2728 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
2729 	      commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
2730 
2731 	vk::Cast(commandBuffer)->copyImageToBuffer(vk::CopyImageToBufferInfo(srcImage, srcImageLayout, dstBuffer, regionCount, pRegions));
2732 }
2733 
vkCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo)2734 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR *pCopyImageToBufferInfo)
2735 {
2736 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo = %p)",
2737 	      commandBuffer, pCopyImageToBufferInfo);
2738 
2739 	vk::Cast(commandBuffer)->copyImageToBuffer(*pCopyImageToBufferInfo);
2740 }
2741 
vkCmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)2742 VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData)
2743 {
2744 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize dataSize = %d, const void* pData = %p)",
2745 	      commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(dataSize), pData);
2746 
2747 	vk::Cast(commandBuffer)->updateBuffer(vk::Cast(dstBuffer), dstOffset, dataSize, pData);
2748 }
2749 
vkCmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)2750 VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data)
2751 {
2752 	TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize size = %d, uint32_t data = %d)",
2753 	      commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(size), data);
2754 
2755 	vk::Cast(commandBuffer)->fillBuffer(vk::Cast(dstBuffer), dstOffset, size, data);
2756 }
2757 
vkCmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)2758 VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
2759 {
2760 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearColorValue* pColor = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
2761 	      commandBuffer, static_cast<void *>(image), int(imageLayout), pColor, int(rangeCount), pRanges);
2762 
2763 	vk::Cast(commandBuffer)->clearColorImage(vk::Cast(image), imageLayout, pColor, rangeCount, pRanges);
2764 }
2765 
vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)2766 VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
2767 {
2768 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearDepthStencilValue* pDepthStencil = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
2769 	      commandBuffer, static_cast<void *>(image), int(imageLayout), pDepthStencil, int(rangeCount), pRanges);
2770 
2771 	vk::Cast(commandBuffer)->clearDepthStencilImage(vk::Cast(image), imageLayout, pDepthStencil, rangeCount, pRanges);
2772 }
2773 
vkCmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)2774 VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects)
2775 {
2776 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t attachmentCount = %d, const VkClearAttachment* pAttachments = %p, uint32_t rectCount = %d, const VkClearRect* pRects = %p)",
2777 	      commandBuffer, int(attachmentCount), pAttachments, int(rectCount), pRects);
2778 
2779 	vk::Cast(commandBuffer)->clearAttachments(attachmentCount, pAttachments, rectCount, pRects);
2780 }
2781 
vkCmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)2782 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions)
2783 {
2784 	TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageResolve* pRegions = %p)",
2785 	      commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstImage), int(dstImageLayout), regionCount, pRegions);
2786 
2787 	vk::Cast(commandBuffer)->resolveImage(vk::ResolveImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions));
2788 }
2789 
vkCmdResolveImage2KHR(VkCommandBuffer commandBuffer,const VkResolveImageInfo2KHR * pResolveImageInfo)2790 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR(VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR *pResolveImageInfo)
2791 {
2792 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkResolveImageInfo2KHR* pResolveImageInfo = %p)",
2793 	      commandBuffer, pResolveImageInfo);
2794 
2795 	vk::Cast(commandBuffer)->resolveImage(*pResolveImageInfo);
2796 }
2797 
vkCmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)2798 VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
2799 {
2800 	TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
2801 	      commandBuffer, static_cast<void *>(event), int(stageMask));
2802 
2803 	vk::Cast(commandBuffer)->setEvent(vk::Cast(event), stageMask);
2804 }
2805 
vkCmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)2806 VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
2807 {
2808 	TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
2809 	      commandBuffer, static_cast<void *>(event), int(stageMask));
2810 
2811 	vk::Cast(commandBuffer)->resetEvent(vk::Cast(event), stageMask);
2812 }
2813 
vkCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)2814 VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
2815 {
2816 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t eventCount = %d, const VkEvent* pEvents = %p, VkPipelineStageFlags srcStageMask = 0x%x, VkPipelineStageFlags dstStageMask = 0x%x, uint32_t memoryBarrierCount = %d, const VkMemoryBarrier* pMemoryBarriers = %p, uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
2817 	      commandBuffer, int(eventCount), pEvents, int(srcStageMask), int(dstStageMask), int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
2818 
2819 	vk::Cast(commandBuffer)->waitEvents(eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2820 }
2821 
vkCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)2822 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
2823 {
2824 	TRACE(
2825 	    "(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlags srcStageMask = 0x%x, VkPipelineStageFlags dstStageMask = 0x%x, VkDependencyFlags dependencyFlags = %d, uint32_t memoryBarrierCount = %d, onst VkMemoryBarrier* pMemoryBarriers = %p,"
2826 	    " uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
2827 	    commandBuffer, int(srcStageMask), int(dstStageMask), dependencyFlags, int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
2828 
2829 	vk::Cast(commandBuffer)->pipelineBarrier(srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
2830 }
2831 
vkCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags)2832 VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags)
2833 {
2834 	TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d, VkQueryControlFlags flags = %d)",
2835 	      commandBuffer, static_cast<void *>(queryPool), query, int(flags));
2836 
2837 	vk::Cast(commandBuffer)->beginQuery(vk::Cast(queryPool), query, flags);
2838 }
2839 
vkCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query)2840 VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query)
2841 {
2842 	TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d)",
2843 	      commandBuffer, static_cast<void *>(queryPool), int(query));
2844 
2845 	vk::Cast(commandBuffer)->endQuery(vk::Cast(queryPool), query);
2846 }
2847 
vkCmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)2848 VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
2849 {
2850 	TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
2851 	      commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount));
2852 
2853 	vk::Cast(commandBuffer)->resetQueryPool(vk::Cast(queryPool), firstQuery, queryCount);
2854 }
2855 
vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query)2856 VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query)
2857 {
2858 	TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlagBits pipelineStage = %d, VkQueryPool queryPool = %p, uint32_t query = %d)",
2859 	      commandBuffer, int(pipelineStage), static_cast<void *>(queryPool), int(query));
2860 
2861 	vk::Cast(commandBuffer)->writeTimestamp(pipelineStage, vk::Cast(queryPool), query);
2862 }
2863 
vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)2864 VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags)
2865 {
2866 	TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
2867 	      commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), static_cast<void *>(dstBuffer), int(dstOffset), int(stride), int(flags));
2868 
2869 	vk::Cast(commandBuffer)->copyQueryPoolResults(vk::Cast(queryPool), firstQuery, queryCount, vk::Cast(dstBuffer), dstOffset, stride, flags);
2870 }
2871 
vkCmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)2872 VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues)
2873 {
2874 	TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineLayout layout = %p, VkShaderStageFlags stageFlags = %d, uint32_t offset = %d, uint32_t size = %d, const void* pValues = %p)",
2875 	      commandBuffer, static_cast<void *>(layout), stageFlags, offset, size, pValues);
2876 
2877 	vk::Cast(commandBuffer)->pushConstants(vk::Cast(layout), stageFlags, offset, size, pValues);
2878 }
2879 
vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)2880 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents)
2881 {
2882 	VkSubpassBeginInfo subpassBeginInfo = { VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, nullptr, contents };
2883 	vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, &subpassBeginInfo);
2884 }
2885 
vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)2886 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, const VkSubpassBeginInfoKHR *pSubpassBeginInfo)
2887 {
2888 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderPassBeginInfo* pRenderPassBegin = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p)",
2889 	      commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
2890 
2891 	const VkBaseInStructure *renderPassBeginInfo = reinterpret_cast<const VkBaseInStructure *>(pRenderPassBegin->pNext);
2892 	const VkRenderPassAttachmentBeginInfo *attachmentBeginInfo = nullptr;
2893 	while(renderPassBeginInfo)
2894 	{
2895 		switch(renderPassBeginInfo->sType)
2896 		{
2897 		case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
2898 			// This extension controls which render area is used on which physical device,
2899 			// in order to distribute rendering between multiple physical devices.
2900 			// SwiftShader only has a single physical device, so this extension does nothing in this case.
2901 			break;
2902 		case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO:
2903 			attachmentBeginInfo = reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>(renderPassBeginInfo);
2904 			break;
2905 		case VK_STRUCTURE_TYPE_MAX_ENUM:
2906 			// dEQP tests that this value is ignored.
2907 			break;
2908 		default:
2909 			UNSUPPORTED("pRenderPassBegin->pNext sType = %s", vk::Stringify(renderPassBeginInfo->sType).c_str());
2910 			break;
2911 		}
2912 
2913 		renderPassBeginInfo = renderPassBeginInfo->pNext;
2914 	}
2915 
2916 	vk::Cast(commandBuffer)->beginRenderPass(vk::Cast(pRenderPassBegin->renderPass), vk::Cast(pRenderPassBegin->framebuffer), pRenderPassBegin->renderArea, pRenderPassBegin->clearValueCount, pRenderPassBegin->pClearValues, pSubpassBeginInfo->contents, attachmentBeginInfo);
2917 }
2918 
vkCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)2919 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
2920 {
2921 	TRACE("(VkCommandBuffer commandBuffer = %p, VkSubpassContents contents = %d)",
2922 	      commandBuffer, contents);
2923 
2924 	vk::Cast(commandBuffer)->nextSubpass(contents);
2925 }
2926 
vkCmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)2927 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const VkSubpassEndInfoKHR *pSubpassEndInfo)
2928 {
2929 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)",
2930 	      commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
2931 
2932 	vk::Cast(commandBuffer)->nextSubpass(pSubpassBeginInfo->contents);
2933 }
2934 
vkCmdEndRenderPass(VkCommandBuffer commandBuffer)2935 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer)
2936 {
2937 	TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
2938 
2939 	vk::Cast(commandBuffer)->endRenderPass();
2940 }
2941 
vkCmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)2942 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo)
2943 {
2944 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)", commandBuffer, pSubpassEndInfo);
2945 
2946 	vk::Cast(commandBuffer)->endRenderPass();
2947 }
2948 
vkCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)2949 VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
2950 {
2951 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
2952 	      commandBuffer, commandBufferCount, pCommandBuffers);
2953 
2954 	vk::Cast(commandBuffer)->executeCommands(commandBufferCount, pCommandBuffers);
2955 }
2956 
vkEnumerateInstanceVersion(uint32_t * pApiVersion)2957 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t *pApiVersion)
2958 {
2959 	TRACE("(uint32_t* pApiVersion = %p)", pApiVersion);
2960 	*pApiVersion = vk::API_VERSION;
2961 	return VK_SUCCESS;
2962 }
2963 
vkBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2964 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos)
2965 {
2966 	TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindBufferMemoryInfo* pBindInfos = %p)",
2967 	      device, bindInfoCount, pBindInfos);
2968 
2969 	for(uint32_t i = 0; i < bindInfoCount; i++)
2970 	{
2971 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pBindInfos[i].pNext);
2972 		while(extInfo)
2973 		{
2974 			UNSUPPORTED("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
2975 			extInfo = extInfo->pNext;
2976 		}
2977 
2978 		if(!vk::Cast(pBindInfos[i].buffer)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
2979 		{
2980 			UNSUPPORTED("vkBindBufferMemory2 with invalid external memory");
2981 			return VK_ERROR_INVALID_EXTERNAL_HANDLE;
2982 		}
2983 	}
2984 
2985 	for(uint32_t i = 0; i < bindInfoCount; i++)
2986 	{
2987 		vk::Cast(pBindInfos[i].buffer)->bind(vk::Cast(pBindInfos[i].memory), pBindInfos[i].memoryOffset);
2988 	}
2989 
2990 	return VK_SUCCESS;
2991 }
2992 
vkBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2993 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo *pBindInfos)
2994 {
2995 	TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindImageMemoryInfo* pBindInfos = %p)",
2996 	      device, bindInfoCount, pBindInfos);
2997 
2998 	for(uint32_t i = 0; i < bindInfoCount; i++)
2999 	{
3000 		if(!vk::Cast(pBindInfos[i].image)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
3001 		{
3002 			UNSUPPORTED("vkBindImageMemory2 with invalid external memory");
3003 			return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3004 		}
3005 	}
3006 
3007 	for(uint32_t i = 0; i < bindInfoCount; i++)
3008 	{
3009 		vk::DeviceMemory *memory = vk::Cast(pBindInfos[i].memory);
3010 		VkDeviceSize offset = pBindInfos[i].memoryOffset;
3011 
3012 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pBindInfos[i].pNext);
3013 		while(extInfo)
3014 		{
3015 			switch(extInfo->sType)
3016 			{
3017 			case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
3018 				/* Do nothing */
3019 				break;
3020 
3021 #ifndef __ANDROID__
3022 			case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
3023 				{
3024 					auto swapchainInfo = reinterpret_cast<VkBindImageMemorySwapchainInfoKHR const *>(extInfo);
3025 					memory = vk::Cast(swapchainInfo->swapchain)->getImage(swapchainInfo->imageIndex).getImageMemory();
3026 					offset = 0;
3027 				}
3028 				break;
3029 #endif
3030 
3031 			default:
3032 				UNSUPPORTED("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
3033 				break;
3034 			}
3035 			extInfo = extInfo->pNext;
3036 		}
3037 
3038 		vk::Cast(pBindInfos[i].image)->bind(memory, offset);
3039 	}
3040 
3041 	return VK_SUCCESS;
3042 }
3043 
vkGetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)3044 VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
3045 {
3046 	TRACE("(VkDevice device = %p, uint32_t heapIndex = %d, uint32_t localDeviceIndex = %d, uint32_t remoteDeviceIndex = %d, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures = %p)",
3047 	      device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
3048 
3049 	ASSERT(localDeviceIndex != remoteDeviceIndex);                 // "localDeviceIndex must not equal remoteDeviceIndex"
3050 	UNSUPPORTED("remoteDeviceIndex: %d", int(remoteDeviceIndex));  // Only one physical device is supported, and since the device indexes can't be equal, this should never be called.
3051 }
3052 
vkCmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)3053 VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
3054 {
3055 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t deviceMask = %d", commandBuffer, deviceMask);
3056 
3057 	vk::Cast(commandBuffer)->setDeviceMask(deviceMask);
3058 }
3059 
vkCmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)3060 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
3061 {
3062 	TRACE("(VkCommandBuffer commandBuffer = %p, baseGroupX = %u, baseGroupY = %u, baseGroupZ = %u, groupCountX = %u, groupCountY = %u, groupCountZ = %u)",
3063 	      commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
3064 
3065 	vk::Cast(commandBuffer)->dispatchBase(baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
3066 }
3067 
vkResetQueryPool(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)3068 VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
3069 {
3070 	TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
3071 	      device, static_cast<void *>(queryPool), firstQuery, queryCount);
3072 	vk::Cast(queryPool)->reset(firstQuery, queryCount);
3073 }
3074 
vkEnumeratePhysicalDeviceGroups(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)3075 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
3076 {
3077 	TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceGroupCount = %p, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties = %p)",
3078 	      instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3079 
3080 	return vk::Cast(instance)->getPhysicalDeviceGroups(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3081 }
3082 
vkGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3083 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
3084 {
3085 	TRACE("(VkDevice device = %p, const VkImageMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
3086 	      device, pInfo, pMemoryRequirements);
3087 
3088 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
3089 	while(extInfo)
3090 	{
3091 		UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3092 		extInfo = extInfo->pNext;
3093 	}
3094 
3095 	VkBaseOutStructure *extensionRequirements = reinterpret_cast<VkBaseOutStructure *>(pMemoryRequirements->pNext);
3096 	while(extensionRequirements)
3097 	{
3098 		switch(extensionRequirements->sType)
3099 		{
3100 		case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
3101 			{
3102 				auto requirements = reinterpret_cast<VkMemoryDedicatedRequirements *>(extensionRequirements);
3103 				vk::Cast(device)->getRequirements(requirements);
3104 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
3105 				if(vk::Cast(pInfo->image)->getSupportedExternalMemoryHandleTypes() == VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
3106 				{
3107 					requirements->prefersDedicatedAllocation = VK_TRUE;
3108 					requirements->requiresDedicatedAllocation = VK_TRUE;
3109 				}
3110 #endif
3111 			}
3112 			break;
3113 		default:
3114 			UNSUPPORTED("pMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3115 			break;
3116 		}
3117 
3118 		extensionRequirements = extensionRequirements->pNext;
3119 	}
3120 
3121 	vkGetImageMemoryRequirements(device, pInfo->image, &(pMemoryRequirements->memoryRequirements));
3122 }
3123 
vkGetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3124 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
3125 {
3126 	TRACE("(VkDevice device = %p, const VkBufferMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
3127 	      device, pInfo, pMemoryRequirements);
3128 
3129 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
3130 	while(extInfo)
3131 	{
3132 		UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3133 		extInfo = extInfo->pNext;
3134 	}
3135 
3136 	VkBaseOutStructure *extensionRequirements = reinterpret_cast<VkBaseOutStructure *>(pMemoryRequirements->pNext);
3137 	while(extensionRequirements)
3138 	{
3139 		switch(extensionRequirements->sType)
3140 		{
3141 		case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
3142 			{
3143 				auto requirements = reinterpret_cast<VkMemoryDedicatedRequirements *>(extensionRequirements);
3144 				vk::Cast(device)->getRequirements(requirements);
3145 			}
3146 			break;
3147 		default:
3148 			UNSUPPORTED("pMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3149 			break;
3150 		}
3151 
3152 		extensionRequirements = extensionRequirements->pNext;
3153 	}
3154 
3155 	vkGetBufferMemoryRequirements(device, pInfo->buffer, &(pMemoryRequirements->memoryRequirements));
3156 }
3157 
vkGetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)3158 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
3159 {
3160 	TRACE("(VkDevice device = %p, const VkImageSparseMemoryRequirementsInfo2* pInfo = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements = %p)",
3161 	      device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
3162 
3163 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
3164 	while(extInfo)
3165 	{
3166 		UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3167 		extInfo = extInfo->pNext;
3168 	}
3169 
3170 	auto extensionRequirements = reinterpret_cast<VkBaseInStructure const *>(pSparseMemoryRequirements->pNext);
3171 	while(extensionRequirements)
3172 	{
3173 		UNSUPPORTED("pSparseMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3174 		extensionRequirements = extensionRequirements->pNext;
3175 	}
3176 
3177 	// The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
3178 	// "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
3179 	*pSparseMemoryRequirementCount = 0;
3180 }
3181 
vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)3182 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 *pFeatures)
3183 {
3184 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures2* pFeatures = %p)", physicalDevice, pFeatures);
3185 
3186 	vk::Cast(physicalDevice)->getFeatures2(pFeatures);
3187 }
3188 
vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)3189 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 *pProperties)
3190 {
3191 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties2* pProperties = %p)", physicalDevice, pProperties);
3192 
3193 	VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pProperties->pNext);
3194 	while(extensionProperties)
3195 	{
3196 		// Casting to a long since some structures, such as
3197 		// VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID and
3198 		// VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT
3199 		// are not enumerated in the official Vulkan header
3200 		switch((long)(extensionProperties->sType))
3201 		{
3202 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
3203 			{
3204 				auto properties = reinterpret_cast<VkPhysicalDeviceIDProperties *>(extensionProperties);
3205 				vk::Cast(physicalDevice)->getProperties(properties);
3206 			}
3207 			break;
3208 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
3209 			{
3210 				auto properties = reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>(extensionProperties);
3211 				vk::Cast(physicalDevice)->getProperties(properties);
3212 			}
3213 			break;
3214 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
3215 			{
3216 				auto properties = reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>(extensionProperties);
3217 				vk::Cast(physicalDevice)->getProperties(properties);
3218 			}
3219 			break;
3220 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
3221 			{
3222 				auto properties = reinterpret_cast<VkPhysicalDevicePointClippingProperties *>(extensionProperties);
3223 				vk::Cast(physicalDevice)->getProperties(properties);
3224 			}
3225 			break;
3226 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
3227 			{
3228 				auto properties = reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>(extensionProperties);
3229 				vk::Cast(physicalDevice)->getProperties(properties);
3230 			}
3231 			break;
3232 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
3233 			{
3234 				auto properties = reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>(extensionProperties);
3235 				vk::Cast(physicalDevice)->getProperties(properties);
3236 			}
3237 			break;
3238 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT:
3239 			// Explicitly ignored, since VK_EXT_sample_locations is not supported
3240 			ASSERT(!hasDeviceExtension(VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME));
3241 			break;
3242 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
3243 			{
3244 				auto properties = reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(extensionProperties);
3245 				vk::Cast(physicalDevice)->getProperties(properties);
3246 			}
3247 			break;
3248 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES:
3249 			{
3250 				auto properties = reinterpret_cast<VkPhysicalDeviceDriverProperties *>(extensionProperties);
3251 				vk::Cast(physicalDevice)->getProperties(properties);
3252 			}
3253 			break;
3254 #ifdef __ANDROID__
3255 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID:
3256 			{
3257 				auto properties = reinterpret_cast<VkPhysicalDevicePresentationPropertiesANDROID *>(extensionProperties);
3258 				vk::Cast(physicalDevice)->getProperties(properties);
3259 			}
3260 			break;
3261 #endif
3262 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
3263 			{
3264 				auto properties = reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>(extensionProperties);
3265 				vk::Cast(physicalDevice)->getProperties(properties);
3266 			}
3267 			break;
3268 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT:
3269 			{
3270 				auto properties = reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>(extensionProperties);
3271 				vk::Cast(physicalDevice)->getProperties(properties);
3272 			}
3273 			break;
3274 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES:
3275 			{
3276 				auto properties = reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>(extensionProperties);
3277 				vk::Cast(physicalDevice)->getProperties(properties);
3278 			}
3279 			break;
3280 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
3281 			{
3282 				auto properties = reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>(extensionProperties);
3283 				vk::Cast(physicalDevice)->getProperties(properties);
3284 			}
3285 			break;
3286 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES:
3287 			{
3288 				auto properties = reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>(extensionProperties);
3289 				vk::Cast(physicalDevice)->getProperties(properties);
3290 			}
3291 			break;
3292 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES:
3293 			{
3294 				auto properties = reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>(extensionProperties);
3295 				vk::Cast(physicalDevice)->getProperties(properties);
3296 			}
3297 			break;
3298 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
3299 			{
3300 				auto properties = reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>(extensionProperties);
3301 				vk::Cast(physicalDevice)->getProperties(properties);
3302 			}
3303 			break;
3304 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES:
3305 			{
3306 				auto properties = reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>(extensionProperties);
3307 				vk::Cast(physicalDevice)->getProperties(properties);
3308 			}
3309 			break;
3310 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES:
3311 			{
3312 				auto properties = reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>(extensionProperties);
3313 				vk::Cast(physicalDevice)->getProperties(properties);
3314 			}
3315 			break;
3316 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT:
3317 			{
3318 				auto properties = reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT *>(extensionProperties);
3319 				vk::Cast(physicalDevice)->getProperties(properties);
3320 			}
3321 			break;
3322 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
3323 			{
3324 				auto properties = reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(extensionProperties);
3325 				vk::Cast(physicalDevice)->getProperties(properties);
3326 			}
3327 			break;
3328 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR:
3329 			break;
3330 		default:
3331 			// "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
3332 			UNSUPPORTED("pProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3333 			break;
3334 		}
3335 
3336 		extensionProperties = extensionProperties->pNext;
3337 	}
3338 
3339 	vkGetPhysicalDeviceProperties(physicalDevice, &(pProperties->properties));
3340 }
3341 
vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)3342 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 *pFormatProperties)
3343 {
3344 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties2* pFormatProperties = %p)",
3345 	      physicalDevice, format, pFormatProperties);
3346 
3347 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pFormatProperties->pNext);
3348 	while(extInfo)
3349 	{
3350 		UNSUPPORTED("pFormatProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3351 		extInfo = extInfo->pNext;
3352 	}
3353 
3354 	vkGetPhysicalDeviceFormatProperties(physicalDevice, format, &(pFormatProperties->formatProperties));
3355 }
3356 
checkFormatUsage(VkImageUsageFlags usage,VkFormatFeatureFlags features)3357 static bool checkFormatUsage(VkImageUsageFlags usage, VkFormatFeatureFlags features)
3358 {
3359 	// Check for usage conflict with features
3360 	if((usage & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT))
3361 	{
3362 		return false;
3363 	}
3364 
3365 	if((usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
3366 	{
3367 		return false;
3368 	}
3369 
3370 	if((usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
3371 	{
3372 		return false;
3373 	}
3374 
3375 	if((usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
3376 	{
3377 		return false;
3378 	}
3379 
3380 	if((usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) && !(features & (VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)))
3381 	{
3382 		return false;
3383 	}
3384 
3385 	if((usage & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT))
3386 	{
3387 		return false;
3388 	}
3389 
3390 	if((usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_DST_BIT))
3391 	{
3392 		return false;
3393 	}
3394 
3395 	return true;
3396 }
3397 
vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3398 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VkImageFormatProperties2 *pImageFormatProperties)
3399 {
3400 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo = %p, VkImageFormatProperties2* pImageFormatProperties = %p)",
3401 	      physicalDevice, pImageFormatInfo, pImageFormatProperties);
3402 
3403 	// "If the combination of parameters to vkGetPhysicalDeviceImageFormatProperties is not supported by the implementation
3404 	//  for use in vkCreateImage, then all members of VkImageFormatProperties will be filled with zero."
3405 	memset(&pImageFormatProperties->imageFormatProperties, 0, sizeof(VkImageFormatProperties));
3406 
3407 	const VkBaseInStructure *extensionFormatInfo = reinterpret_cast<const VkBaseInStructure *>(pImageFormatInfo->pNext);
3408 
3409 	const VkExternalMemoryHandleTypeFlagBits *handleType = nullptr;
3410 	VkImageUsageFlags stencilUsage = 0;
3411 	while(extensionFormatInfo)
3412 	{
3413 		switch(extensionFormatInfo->sType)
3414 		{
3415 		case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
3416 			{
3417 				// Per the Vulkan spec on VkImageFormatListcreateInfo:
3418 				//     "If the pNext chain of VkImageCreateInfo includes a
3419 				//      VkImageFormatListCreateInfo structure, then that
3420 				//      structure contains a list of all formats that can be
3421 				//      used when creating views of this image"
3422 				// This limitation does not affect SwiftShader's behavior and
3423 				// the Vulkan Validation Layers can detect Views created with a
3424 				// format which is not included in that list.
3425 			}
3426 			break;
3427 		case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
3428 			{
3429 				const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionFormatInfo);
3430 				stencilUsage = stencilUsageInfo->stencilUsage;
3431 			}
3432 			break;
3433 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
3434 			{
3435 				const VkPhysicalDeviceExternalImageFormatInfo *imageFormatInfo = reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>(extensionFormatInfo);
3436 				handleType = &(imageFormatInfo->handleType);
3437 			}
3438 			break;
3439 		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
3440 			{
3441 				// Explicitly ignored, since VK_EXT_image_drm_format_modifier is not supported
3442 				ASSERT(!hasDeviceExtension(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME));
3443 			}
3444 			break;
3445 		default:
3446 			UNSUPPORTED("pImageFormatInfo->pNext sType = %s", vk::Stringify(extensionFormatInfo->sType).c_str());
3447 			break;
3448 		}
3449 
3450 		extensionFormatInfo = extensionFormatInfo->pNext;
3451 	}
3452 
3453 	VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pImageFormatProperties->pNext);
3454 
3455 #ifdef __ANDROID__
3456 	bool hasAHBUsage = false;
3457 #endif
3458 
3459 	while(extensionProperties)
3460 	{
3461 		switch(extensionProperties->sType)
3462 		{
3463 		case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
3464 			{
3465 				auto properties = reinterpret_cast<VkExternalImageFormatProperties *>(extensionProperties);
3466 				vk::Cast(physicalDevice)->getProperties(handleType, properties);
3467 			}
3468 			break;
3469 		case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
3470 			{
3471 				auto properties = reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>(extensionProperties);
3472 				vk::Cast(physicalDevice)->getProperties(properties);
3473 			}
3474 			break;
3475 		case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
3476 			{
3477 				// Explicitly ignored, since VK_AMD_texture_gather_bias_lod is not supported
3478 				ASSERT(!hasDeviceExtension(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME));
3479 			}
3480 			break;
3481 #ifdef __ANDROID__
3482 		case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
3483 			{
3484 				auto properties = reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>(extensionProperties);
3485 				vk::Cast(physicalDevice)->getProperties(pImageFormatInfo, properties);
3486 				hasAHBUsage = true;
3487 			}
3488 			break;
3489 #endif
3490 		default:
3491 			UNSUPPORTED("pImageFormatProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3492 			break;
3493 		}
3494 
3495 		extensionProperties = extensionProperties->pNext;
3496 	}
3497 
3498 	VkFormat format = pImageFormatInfo->format;
3499 	VkImageType type = pImageFormatInfo->type;
3500 	VkImageTiling tiling = pImageFormatInfo->tiling;
3501 	VkImageUsageFlags usage = pImageFormatInfo->usage;
3502 	VkImageCreateFlags flags = pImageFormatInfo->flags;
3503 
3504 	VkFormatProperties properties;
3505 	vk::PhysicalDevice::GetFormatProperties(format, &properties);
3506 
3507 	VkFormatFeatureFlags features;
3508 	switch(tiling)
3509 	{
3510 	case VK_IMAGE_TILING_LINEAR:
3511 		features = properties.linearTilingFeatures;
3512 		break;
3513 
3514 	case VK_IMAGE_TILING_OPTIMAL:
3515 		features = properties.optimalTilingFeatures;
3516 		break;
3517 
3518 	default:
3519 		UNSUPPORTED("VkImageTiling %d", int(tiling));
3520 		features = 0;
3521 	}
3522 
3523 	if(features == 0)
3524 	{
3525 		return VK_ERROR_FORMAT_NOT_SUPPORTED;
3526 	}
3527 
3528 	// Reject any usage or separate stencil usage that is not compatible with the specified format.
3529 	if(!checkFormatUsage(usage, features))
3530 	{
3531 		return VK_ERROR_FORMAT_NOT_SUPPORTED;
3532 	}
3533 	// If stencilUsage is 0 then no separate usage was provided and it takes on the same value as usage,
3534 	// which has already been checked. So only check non-zero stencilUsage.
3535 	if(stencilUsage != 0 && !checkFormatUsage(stencilUsage, features))
3536 	{
3537 		return VK_ERROR_FORMAT_NOT_SUPPORTED;
3538 	}
3539 
3540 	auto allRecognizedUsageBits = VK_IMAGE_USAGE_SAMPLED_BIT |
3541 	                              VK_IMAGE_USAGE_STORAGE_BIT |
3542 	                              VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
3543 	                              VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
3544 	                              VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
3545 	                              VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
3546 	                              VK_IMAGE_USAGE_TRANSFER_DST_BIT |
3547 	                              VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
3548 	ASSERT(!(usage & ~(allRecognizedUsageBits)));
3549 
3550 	if(usage & VK_IMAGE_USAGE_SAMPLED_BIT)
3551 	{
3552 		if(tiling == VK_IMAGE_TILING_LINEAR)
3553 		{
3554 			// TODO(b/171299814): Compressed formats and cube maps are not supported for sampling using VK_IMAGE_TILING_LINEAR; otherwise, sampling
3555 			// in linear tiling is always supported as long as it can be sampled when using VK_IMAGE_TILING_OPTIMAL.
3556 			if(!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) ||
3557 			   vk::Format(format).isCompressed() ||
3558 			   (flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT))
3559 			{
3560 				return VK_ERROR_FORMAT_NOT_SUPPORTED;
3561 			}
3562 		}
3563 		else if(!(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT))
3564 		{
3565 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
3566 		}
3567 	}
3568 
3569 	// "Images created with tiling equal to VK_IMAGE_TILING_LINEAR have further restrictions on their limits and capabilities
3570 	//  compared to images created with tiling equal to VK_IMAGE_TILING_OPTIMAL."
3571 	if(tiling == VK_IMAGE_TILING_LINEAR)
3572 	{
3573 		if(type != VK_IMAGE_TYPE_2D)
3574 		{
3575 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
3576 		}
3577 
3578 		if(vk::Format(format).isDepth() || vk::Format(format).isStencil())
3579 		{
3580 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
3581 		}
3582 	}
3583 
3584 	// "Images created with a format from one of those listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views
3585 	//  have further restrictions on their limits and capabilities compared to images created with other formats."
3586 	if(vk::Format(format).isYcbcrFormat())
3587 	{
3588 		if(type != VK_IMAGE_TYPE_2D)
3589 		{
3590 			return VK_ERROR_FORMAT_NOT_SUPPORTED;
3591 		}
3592 	}
3593 
3594 	vk::Cast(physicalDevice)->getImageFormatProperties(format, type, tiling, usage, flags, &pImageFormatProperties->imageFormatProperties);
3595 
3596 #ifdef __ANDROID__
3597 	if(hasAHBUsage)
3598 	{
3599 		// AHardwareBuffer_lock may only be called with a single layer.
3600 		pImageFormatProperties->imageFormatProperties.maxArrayLayers = 1;
3601 		pImageFormatProperties->imageFormatProperties.maxMipLevels = 1;
3602 	}
3603 #endif
3604 
3605 	return VK_SUCCESS;
3606 }
3607 
vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)3608 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties)
3609 {
3610 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties2* pQueueFamilyProperties = %p)",
3611 	      physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3612 
3613 	if(pQueueFamilyProperties)
3614 	{
3615 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pQueueFamilyProperties->pNext);
3616 		while(extInfo)
3617 		{
3618 			UNSUPPORTED("pQueueFamilyProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3619 			extInfo = extInfo->pNext;
3620 		}
3621 	}
3622 
3623 	if(!pQueueFamilyProperties)
3624 	{
3625 		*pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
3626 	}
3627 	else
3628 	{
3629 		vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
3630 	}
3631 }
3632 
vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)3633 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
3634 {
3635 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties2* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
3636 
3637 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pMemoryProperties->pNext);
3638 	while(extInfo)
3639 	{
3640 		UNSUPPORTED("pMemoryProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3641 		extInfo = extInfo->pNext;
3642 	}
3643 
3644 	vkGetPhysicalDeviceMemoryProperties(physicalDevice, &(pMemoryProperties->memoryProperties));
3645 }
3646 
vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)3647 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VkSparseImageFormatProperties2 *pProperties)
3648 {
3649 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo = %p, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties2* pProperties = %p)",
3650 	      physicalDevice, pFormatInfo, pPropertyCount, pProperties);
3651 
3652 	if(pProperties)
3653 	{
3654 		auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pProperties->pNext);
3655 		while(extInfo)
3656 		{
3657 			UNSUPPORTED("pProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3658 			extInfo = extInfo->pNext;
3659 		}
3660 	}
3661 
3662 	// We do not support sparse images.
3663 	*pPropertyCount = 0;
3664 }
3665 
vkTrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)3666 VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags)
3667 {
3668 	TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolTrimFlags flags = %d)",
3669 	      device, static_cast<void *>(commandPool), flags);
3670 
3671 	if(flags != 0)
3672 	{
3673 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
3674 		UNSUPPORTED("flags %d", int(flags));
3675 	}
3676 
3677 	vk::Cast(commandPool)->trim(flags);
3678 }
3679 
vkGetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)3680 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue)
3681 {
3682 	TRACE("(VkDevice device = %p, const VkDeviceQueueInfo2* pQueueInfo = %p, VkQueue* pQueue = %p)",
3683 	      device, pQueueInfo, pQueue);
3684 
3685 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pQueueInfo->pNext);
3686 	while(extInfo)
3687 	{
3688 		UNSUPPORTED("pQueueInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3689 		extInfo = extInfo->pNext;
3690 	}
3691 
3692 	if(pQueueInfo->flags != 0)
3693 	{
3694 		// The only flag that can be set here is VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
3695 		// According to the Vulkan 1.2.132 spec, 4.3.1. Queue Family Properties:
3696 		// "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT specifies that the device queue is a
3697 		//  protected-capable queue. If the protected memory feature is not enabled,
3698 		//  the VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT bit of flags must not be set."
3699 		UNSUPPORTED("VkPhysicalDeviceVulkan11Features::protectedMemory");
3700 	}
3701 
3702 	vkGetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
3703 }
3704 
vkCreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)3705 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSamplerYcbcrConversion *pYcbcrConversion)
3706 {
3707 	TRACE("(VkDevice device = %p, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSamplerYcbcrConversion* pYcbcrConversion = %p)",
3708 	      device, pCreateInfo, pAllocator, pYcbcrConversion);
3709 
3710 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
3711 	while(extInfo)
3712 	{
3713 		UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3714 		extInfo = extInfo->pNext;
3715 	}
3716 
3717 	return vk::SamplerYcbcrConversion::Create(pAllocator, pCreateInfo, pYcbcrConversion);
3718 }
3719 
vkDestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)3720 VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks *pAllocator)
3721 {
3722 	TRACE("(VkDevice device = %p, VkSamplerYcbcrConversion ycbcrConversion = %p, const VkAllocationCallbacks* pAllocator = %p)",
3723 	      device, static_cast<void *>(ycbcrConversion), pAllocator);
3724 
3725 	vk::destroy(ycbcrConversion, pAllocator);
3726 }
3727 
vkCreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)3728 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
3729 {
3730 	TRACE("(VkDevice device = %p, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate = %p)",
3731 	      device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
3732 
3733 	if(pCreateInfo->flags != 0)
3734 	{
3735 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
3736 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
3737 	}
3738 
3739 	if(pCreateInfo->templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
3740 	{
3741 		UNSUPPORTED("pCreateInfo->templateType %d", int(pCreateInfo->templateType));
3742 	}
3743 
3744 	auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
3745 	while(extInfo)
3746 	{
3747 		UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3748 		extInfo = extInfo->pNext;
3749 	}
3750 
3751 	return vk::DescriptorUpdateTemplate::Create(pAllocator, pCreateInfo, pDescriptorUpdateTemplate);
3752 }
3753 
vkDestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)3754 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks *pAllocator)
3755 {
3756 	TRACE("(VkDevice device = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const VkAllocationCallbacks* pAllocator = %p)",
3757 	      device, static_cast<void *>(descriptorUpdateTemplate), pAllocator);
3758 
3759 	vk::destroy(descriptorUpdateTemplate, pAllocator);
3760 }
3761 
vkUpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)3762 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData)
3763 {
3764 	TRACE("(VkDevice device = %p, VkDescriptorSet descriptorSet = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const void* pData = %p)",
3765 	      device, static_cast<void *>(descriptorSet), static_cast<void *>(descriptorUpdateTemplate), pData);
3766 
3767 	vk::Cast(descriptorUpdateTemplate)->updateDescriptorSet(vk::Cast(device), descriptorSet, pData);
3768 }
3769 
vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)3770 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VkExternalBufferProperties *pExternalBufferProperties)
3771 {
3772 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo = %p, VkExternalBufferProperties* pExternalBufferProperties = %p)",
3773 	      physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
3774 
3775 	vk::Cast(physicalDevice)->getProperties(pExternalBufferInfo, pExternalBufferProperties);
3776 }
3777 
vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)3778 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VkExternalFenceProperties *pExternalFenceProperties)
3779 {
3780 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo = %p, VkExternalFenceProperties* pExternalFenceProperties = %p)",
3781 	      physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
3782 
3783 	vk::Cast(physicalDevice)->getProperties(pExternalFenceInfo, pExternalFenceProperties);
3784 }
3785 
vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)3786 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
3787 {
3788 	TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo = %p, VkExternalSemaphoreProperties* pExternalSemaphoreProperties = %p)",
3789 	      physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
3790 
3791 	vk::Cast(physicalDevice)->getProperties(pExternalSemaphoreInfo, pExternalSemaphoreProperties);
3792 }
3793 
vkGetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)3794 VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, VkDescriptorSetLayoutSupport *pSupport)
3795 {
3796 	TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, VkDescriptorSetLayoutSupport* pSupport = %p)",
3797 	      device, pCreateInfo, pSupport);
3798 
3799 	vk::Cast(device)->getDescriptorSetLayoutSupport(pCreateInfo, pSupport);
3800 }
3801 
vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)3802 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern)
3803 {
3804 	TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t lineStippleFactor = %u, uint16_t lineStipplePattern = %u)",
3805 	      commandBuffer, lineStippleFactor, lineStipplePattern);
3806 
3807 	static constexpr uint16_t solidLine = 0xFFFFu;
3808 	if(lineStipplePattern != solidLine)
3809 	{
3810 		// VkPhysicalDeviceLineRasterizationFeaturesEXT::stippled*Lines are all set to VK_FALSE and,
3811 		// according to the Vulkan spec for VkPipelineRasterizationLineStateCreateInfoEXT:
3812 		// "If stippledLineEnable is VK_FALSE, the values of lineStippleFactor and lineStipplePattern are ignored."
3813 		WARN("vkCmdSetLineStippleEXT: line stipple pattern ignored : 0x%04X", lineStipplePattern);
3814 	}
3815 }
3816 
vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)3817 VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
3818 {
3819 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
3820 	      commandBuffer, pLabelInfo);
3821 
3822 	vk::Cast(commandBuffer)->beginDebugUtilsLabel(pLabelInfo);
3823 }
3824 
vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)3825 VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)
3826 {
3827 	TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
3828 
3829 	vk::Cast(commandBuffer)->endDebugUtilsLabel();
3830 }
3831 
vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)3832 VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
3833 {
3834 	TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
3835 	      commandBuffer, pLabelInfo);
3836 
3837 	vk::Cast(commandBuffer)->insertDebugUtilsLabel(pLabelInfo);
3838 }
3839 
vkCreateDebugUtilsMessengerEXT(VkInstance instance,const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDebugUtilsMessengerEXT * pMessenger)3840 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT *pMessenger)
3841 {
3842 	TRACE("(VkInstance instance = %p, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDebugUtilsMessengerEXT* pMessenger = %p)",
3843 	      instance, pCreateInfo, pAllocator, pMessenger);
3844 
3845 	if(pCreateInfo->flags != 0)
3846 	{
3847 		// Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
3848 		UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
3849 	}
3850 
3851 	return vk::DebugUtilsMessenger::Create(pAllocator, pCreateInfo, pMessenger);
3852 }
3853 
vkDestroyDebugUtilsMessengerEXT(VkInstance instance,VkDebugUtilsMessengerEXT messenger,const VkAllocationCallbacks * pAllocator)3854 VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks *pAllocator)
3855 {
3856 	TRACE("(VkInstance instance = %p, VkDebugUtilsMessengerEXT messenger = %p, const VkAllocationCallbacks* pAllocator = %p)",
3857 	      instance, static_cast<void *>(messenger), pAllocator);
3858 
3859 	vk::destroy(messenger, pAllocator);
3860 }
3861 
vkQueueBeginDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)3862 VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
3863 {
3864 	TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
3865 	      queue, pLabelInfo);
3866 
3867 	vk::Cast(queue)->beginDebugUtilsLabel(pLabelInfo);
3868 }
3869 
vkQueueEndDebugUtilsLabelEXT(VkQueue queue)3870 VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT(VkQueue queue)
3871 {
3872 	TRACE("(VkQueue queue = %p)", queue);
3873 
3874 	vk::Cast(queue)->endDebugUtilsLabel();
3875 }
3876 
vkQueueInsertDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)3877 VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
3878 {
3879 	TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
3880 	      queue, pLabelInfo);
3881 
3882 	vk::Cast(queue)->insertDebugUtilsLabel(pLabelInfo);
3883 }
3884 
vkSetDebugUtilsObjectNameEXT(VkDevice device,const VkDebugUtilsObjectNameInfoEXT * pNameInfo)3885 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo)
3886 {
3887 	TRACE("(VkDevice device = %p, const VkDebugUtilsObjectNameInfoEXT* pNameInfo = %p)",
3888 	      device, pNameInfo);
3889 
3890 	return vk::Cast(device)->setDebugUtilsObjectName(pNameInfo);
3891 }
3892 
vkSetDebugUtilsObjectTagEXT(VkDevice device,const VkDebugUtilsObjectTagInfoEXT * pTagInfo)3893 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo)
3894 {
3895 	TRACE("(VkDevice device = %p, const VkDebugUtilsObjectTagInfoEXT* pTagInfo = %p)",
3896 	      device, pTagInfo);
3897 
3898 	return vk::Cast(device)->setDebugUtilsObjectTag(pTagInfo);
3899 }
3900 
vkSubmitDebugUtilsMessageEXT(VkInstance instance,VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData)3901 VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData)
3902 {
3903 	TRACE("(VkInstance instance = %p, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity = %d, VkDebugUtilsMessageTypeFlagsEXT messageTypes = %d, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData = %p)",
3904 	      instance, messageSeverity, messageTypes, pCallbackData);
3905 
3906 	vk::Cast(instance)->submitDebugUtilsMessage(messageSeverity, messageTypes, pCallbackData);
3907 }
3908 
3909 #ifdef VK_USE_PLATFORM_XCB_KHR
vkCreateXcbSurfaceKHR(VkInstance instance,const VkXcbSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3910 VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3911 {
3912 	TRACE("(VkInstance instance = %p, VkXcbSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3913 	      instance, pCreateInfo, pAllocator, pSurface);
3914 
3915 	// VUID-VkXcbSurfaceCreateInfoKHR-connection-01310 : connection must point to a valid X11 xcb_connection_t
3916 	ASSERT(pCreateInfo->connection);
3917 
3918 	return vk::XcbSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3919 }
3920 
vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id)3921 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t *connection, xcb_visualid_t visual_id)
3922 {
3923 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, xcb_connection_t* connection = %p, xcb_visualid_t visual_id = %d)",
3924 	      physicalDevice, int(queueFamilyIndex), connection, int(visual_id));
3925 
3926 	return VK_TRUE;
3927 }
3928 #endif
3929 
3930 #ifdef VK_USE_PLATFORM_XLIB_KHR
vkCreateXlibSurfaceKHR(VkInstance instance,const VkXlibSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3931 VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR(VkInstance instance, const VkXlibSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3932 {
3933 	TRACE("(VkInstance instance = %p, VkXlibSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3934 	      instance, pCreateInfo, pAllocator, pSurface);
3935 
3936 	// VUID-VkXlibSurfaceCreateInfoKHR-dpy-01313: dpy must point to a valid Xlib Display
3937 	ASSERT(pCreateInfo->dpy);
3938 
3939 	return vk::XlibSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3940 }
3941 
vkGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,Display * dpy,VisualID visualID)3942 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display *dpy, VisualID visualID)
3943 {
3944 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, Display* dpy = %p, VisualID visualID = %lu)",
3945 	      physicalDevice, int(queueFamilyIndex), dpy, visualID);
3946 
3947 	return VK_TRUE;
3948 }
3949 #endif
3950 
3951 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
vkCreateWaylandSurfaceKHR(VkInstance instance,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3952 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3953 {
3954 	TRACE("(VkInstance instance = %p, VkWaylandSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3955 	      instance, pCreateInfo, pAllocator, pSurface);
3956 
3957 	return vk::WaylandSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
3958 }
3959 
vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * display)3960 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display *display)
3961 {
3962 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, struct wl_display* display = %p)",
3963 	      physicalDevice, int(queueFamilyIndex), display);
3964 
3965 	return VK_TRUE;
3966 }
3967 #endif
3968 
3969 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
vkCreateDirectFBSurfaceEXT(VkInstance instance,const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3970 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3971 {
3972 	TRACE("(VkInstance instance = %p, VkDirectFBSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3973 	      instance, pCreateInfo, pAllocator, pSurface);
3974 
3975 	return vk::DirectFBSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
3976 }
3977 
vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,IDirectFB * dfb)3978 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB *dfb)
3979 {
3980 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, IDirectFB* dfb = %p)",
3981 	      physicalDevice, int(queueFamilyIndex), dfb);
3982 
3983 	return VK_TRUE;
3984 }
3985 #endif
3986 
3987 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)3988 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode)
3989 {
3990 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, VkDisplayModeCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkDisplayModeKHR* pModei = %p)",
3991 	      physicalDevice, static_cast<void *>(display), pCreateInfo, pAllocator, pMode);
3992 
3993 	return VK_SUCCESS;
3994 }
3995 
vkCreateDisplayPlaneSurfaceKHR(VkInstance instance,const VkDisplaySurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)3996 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
3997 {
3998 	TRACE("(VkInstance instance = %p, VkDisplaySurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
3999 	      instance, pCreateInfo, pAllocator, pSurface);
4000 
4001 	return vk::DisplaySurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4002 }
4003 
vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,uint32_t * pPropertyCount,VkDisplayModePropertiesKHR * pProperties)4004 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties)
4005 {
4006 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, uint32_t* pPropertyCount = %p, VkDisplayModePropertiesKHR* pProperties = %p)",
4007 	      physicalDevice, static_cast<void *>(display), pPropertyCount, pProperties);
4008 
4009 	return vk::DisplaySurfaceKHR::GetDisplayModeProperties(pPropertyCount, pProperties);
4010 }
4011 
vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkDisplayModeKHR mode,uint32_t planeIndex,VkDisplayPlaneCapabilitiesKHR * pCapabilities)4012 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities)
4013 {
4014 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayModeKHR mode = %p, uint32_t planeIndex = %d, VkDisplayPlaneCapabilitiesKHR* pCapabilities = %p)",
4015 	      physicalDevice, static_cast<void *>(mode), planeIndex, pCapabilities);
4016 
4017 	return vk::DisplaySurfaceKHR::GetDisplayPlaneCapabilities(pCapabilities);
4018 }
4019 
vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice,uint32_t planeIndex,uint32_t * pDisplayCount,VkDisplayKHR * pDisplays)4020 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount, VkDisplayKHR *pDisplays)
4021 {
4022 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t planeIndex = %d, uint32_t* pDisplayCount = %p, VkDisplayKHR* pDisplays = %p)",
4023 	      physicalDevice, planeIndex, pDisplayCount, pDisplays);
4024 
4025 	return vk::DisplaySurfaceKHR::GetDisplayPlaneSupportedDisplays(pDisplayCount, pDisplays);
4026 }
4027 
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPlanePropertiesKHR * pProperties)4028 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPlanePropertiesKHR *pProperties)
4029 {
4030 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPlanePropertiesKHR* pProperties = %p)",
4031 	      physicalDevice, pPropertyCount, pProperties);
4032 
4033 	return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayPlaneProperties(pPropertyCount, pProperties);
4034 }
4035 
vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPropertiesKHR * pProperties)4036 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPropertiesKHR *pProperties)
4037 {
4038 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPropertiesKHR* pProperties = %p)",
4039 	      physicalDevice, pPropertyCount, pProperties);
4040 
4041 	return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayProperties(pPropertyCount, pProperties);
4042 }
4043 #endif
4044 
4045 #ifdef VK_USE_PLATFORM_MACOS_MVK
vkCreateMacOSSurfaceMVK(VkInstance instance,const VkMacOSSurfaceCreateInfoMVK * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4046 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4047 {
4048 	TRACE("(VkInstance instance = %p, VkMacOSSurfaceCreateInfoMVK* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4049 	      instance, pCreateInfo, pAllocator, pSurface);
4050 
4051 	return vk::MacOSSurfaceMVK::Create(pAllocator, pCreateInfo, pSurface);
4052 }
4053 #endif
4054 
4055 #ifdef VK_USE_PLATFORM_METAL_EXT
vkCreateMetalSurfaceEXT(VkInstance instance,const VkMetalSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4056 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4057 {
4058 	TRACE("(VkInstance instance = %p, VkMetalSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4059 	      instance, pCreateInfo, pAllocator, pSurface);
4060 
4061 	return vk::MetalSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
4062 }
4063 #endif
4064 
4065 #ifdef VK_USE_PLATFORM_WIN32_KHR
vkCreateWin32SurfaceKHR(VkInstance instance,const VkWin32SurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4066 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4067 {
4068 	TRACE("(VkInstance instance = %p, VkWin32SurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4069 	      instance, pCreateInfo, pAllocator, pSurface);
4070 
4071 	return vk::Win32SurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4072 }
4073 
vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex)4074 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex)
4075 {
4076 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d)",
4077 	      physicalDevice, queueFamilyIndex);
4078 	return VK_TRUE;
4079 }
4080 #endif
4081 
vkCreateHeadlessSurfaceEXT(VkInstance instance,const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4082 VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4083 {
4084 	TRACE("(VkInstance instance = %p, VkHeadlessSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4085 	      instance, pCreateInfo, pAllocator, pSurface);
4086 
4087 	return vk::HeadlessSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4088 }
4089 
4090 #ifndef __ANDROID__
vkDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)4091 VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator)
4092 {
4093 	TRACE("(VkInstance instance = %p, VkSurfaceKHR surface = %p, const VkAllocationCallbacks* pAllocator = %p)",
4094 	      instance, static_cast<void *>(surface), pAllocator);
4095 
4096 	vk::destroy(surface, pAllocator);
4097 }
4098 
vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported)4099 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32 *pSupported)
4100 {
4101 	TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, VkSurface surface = %p, VKBool32* pSupported = %p)",
4102 	      physicalDevice, int(queueFamilyIndex), static_cast<void *>(surface), pSupported);
4103 
4104 	*pSupported = VK_TRUE;
4105 	return VK_SUCCESS;
4106 }
4107 
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * pSurfaceCapabilities)4108 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities)
4109 {
4110 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities = %p)",
4111 	      physicalDevice, static_cast<void *>(surface), pSurfaceCapabilities);
4112 
4113 	return vk::Cast(surface)->getSurfaceCapabilities(pSurfaceCapabilities);
4114 }
4115 
vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)4116 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pSurfaceFormatCount, VkSurfaceFormatKHR *pSurfaceFormats)
4117 {
4118 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p. uint32_t* pSurfaceFormatCount = %p, VkSurfaceFormatKHR* pSurfaceFormats = %p)",
4119 	      physicalDevice, static_cast<void *>(surface), pSurfaceFormatCount, pSurfaceFormats);
4120 
4121 	if(!pSurfaceFormats)
4122 	{
4123 		*pSurfaceFormatCount = vk::Cast(surface)->getSurfaceFormatsCount();
4124 		return VK_SUCCESS;
4125 	}
4126 
4127 	return vk::Cast(surface)->getSurfaceFormats(pSurfaceFormatCount, pSurfaceFormats);
4128 }
4129 
vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)4130 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pPresentModeCount, VkPresentModeKHR *pPresentModes)
4131 {
4132 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p uint32_t* pPresentModeCount = %p, VkPresentModeKHR* pPresentModes = %p)",
4133 	      physicalDevice, static_cast<void *>(surface), pPresentModeCount, pPresentModes);
4134 
4135 	if(!pPresentModes)
4136 	{
4137 		*pPresentModeCount = vk::Cast(surface)->getPresentModeCount();
4138 		return VK_SUCCESS;
4139 	}
4140 
4141 	return vk::Cast(surface)->getPresentModes(pPresentModeCount, pPresentModes);
4142 }
4143 
vkCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain)4144 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain)
4145 {
4146 	TRACE("(VkDevice device = %p, const VkSwapchainCreateInfoKHR* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSwapchainKHR* pSwapchain = %p)",
4147 	      device, pCreateInfo, pAllocator, pSwapchain);
4148 
4149 	if(pCreateInfo->oldSwapchain)
4150 	{
4151 		vk::Cast(pCreateInfo->oldSwapchain)->retire();
4152 	}
4153 
4154 	if(vk::Cast(pCreateInfo->surface)->hasAssociatedSwapchain())
4155 	{
4156 		return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
4157 	}
4158 
4159 	VkResult status = vk::SwapchainKHR::Create(pAllocator, pCreateInfo, pSwapchain);
4160 
4161 	if(status != VK_SUCCESS)
4162 	{
4163 		return status;
4164 	}
4165 
4166 	auto swapchain = vk::Cast(*pSwapchain);
4167 	status = swapchain->createImages(device, pCreateInfo);
4168 
4169 	if(status != VK_SUCCESS)
4170 	{
4171 		vk::destroy(*pSwapchain, pAllocator);
4172 		return status;
4173 	}
4174 
4175 	vk::Cast(pCreateInfo->surface)->associateSwapchain(swapchain);
4176 
4177 	return VK_SUCCESS;
4178 }
4179 
vkDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)4180 VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator)
4181 {
4182 	TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, const VkAllocationCallbacks* pAllocator = %p)",
4183 	      device, static_cast<void *>(swapchain), pAllocator);
4184 
4185 	vk::destroy(swapchain, pAllocator);
4186 }
4187 
vkGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages)4188 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages)
4189 {
4190 	TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint32_t* pSwapchainImageCount = %p, VkImage* pSwapchainImages = %p)",
4191 	      device, static_cast<void *>(swapchain), pSwapchainImageCount, pSwapchainImages);
4192 
4193 	if(!pSwapchainImages)
4194 	{
4195 		*pSwapchainImageCount = vk::Cast(swapchain)->getImageCount();
4196 		return VK_SUCCESS;
4197 	}
4198 
4199 	return vk::Cast(swapchain)->getImages(pSwapchainImageCount, pSwapchainImages);
4200 }
4201 
vkAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)4202 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex)
4203 {
4204 	TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint64_t timeout = %" PRIu64 ", VkSemaphore semaphore = %p, VkFence fence = %p, uint32_t* pImageIndex = %p)",
4205 	      device, static_cast<void *>(swapchain), timeout, static_cast<void *>(semaphore), static_cast<void *>(fence), pImageIndex);
4206 
4207 	return vk::Cast(swapchain)->getNextImage(timeout, vk::DynamicCast<vk::BinarySemaphore>(semaphore), vk::Cast(fence), pImageIndex);
4208 }
4209 
vkQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo)4210 VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo)
4211 {
4212 	TRACE("(VkQueue queue = %p, const VkPresentInfoKHR* pPresentInfo = %p)",
4213 	      queue, pPresentInfo);
4214 
4215 	return vk::Cast(queue)->present(pPresentInfo);
4216 }
4217 
vkAcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)4218 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo, uint32_t *pImageIndex)
4219 {
4220 	TRACE("(VkDevice device = %p, const VkAcquireNextImageInfoKHR *pAcquireInfo = %p, uint32_t *pImageIndex = %p",
4221 	      device, pAcquireInfo, pImageIndex);
4222 
4223 	return vk::Cast(pAcquireInfo->swapchain)->getNextImage(pAcquireInfo->timeout, vk::DynamicCast<vk::BinarySemaphore>(pAcquireInfo->semaphore), vk::Cast(pAcquireInfo->fence), pImageIndex);
4224 }
4225 
vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device,VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities)4226 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities)
4227 {
4228 	TRACE("(VkDevice device = %p, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities = %p)",
4229 	      device, pDeviceGroupPresentCapabilities);
4230 
4231 	for(unsigned int i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; i++)
4232 	{
4233 		// The only real physical device in the presentation group is device 0,
4234 		// and it can present to itself.
4235 		pDeviceGroupPresentCapabilities->presentMask[i] = (i == 0) ? 1 : 0;
4236 	}
4237 
4238 	pDeviceGroupPresentCapabilities->modes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4239 
4240 	return VK_SUCCESS;
4241 }
4242 
vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device,VkSurfaceKHR surface,VkDeviceGroupPresentModeFlagsKHR * pModes)4243 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR *pModes)
4244 {
4245 	TRACE("(VkDevice device = %p, VkSurfaceKHR surface = %p, VkDeviceGroupPresentModeFlagsKHR *pModes = %p)",
4246 	      device, static_cast<void *>(surface), pModes);
4247 
4248 	*pModes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4249 	return VK_SUCCESS;
4250 }
4251 
vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pRectCount,VkRect2D * pRects)4252 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pRectCount, VkRect2D *pRects)
4253 {
4254 	TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, uint32_t* pRectCount = %p, VkRect2D* pRects = %p)",
4255 	      physicalDevice, static_cast<void *>(surface), pRectCount, pRects);
4256 
4257 	return vk::Cast(surface)->getPresentRectangles(pRectCount, pRects);
4258 }
4259 
4260 #endif  // ! __ANDROID__
4261 
4262 #ifdef __ANDROID__
4263 
vkGetSwapchainGrallocUsage2ANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)4264 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsage2ANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainUsage, uint64_t *grallocConsumerUsage, uint64_t *grallocProducerUsage)
4265 {
4266 	TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, VkSwapchainImageUsageFlagsANDROID swapchainUsage = %d, uint64_t* grallocConsumerUsage = %p, uin64_t* grallocProducerUsage = %p)",
4267 	      device, format, imageUsage, swapchainUsage, grallocConsumerUsage, grallocProducerUsage);
4268 
4269 	*grallocConsumerUsage = 0;
4270 	*grallocProducerUsage = GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN;
4271 
4272 	return VK_SUCCESS;
4273 }
4274 
vkGetSwapchainGrallocUsageANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)4275 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsageANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, int *grallocUsage)
4276 {
4277 	TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, int* grallocUsage = %p)",
4278 	      device, format, imageUsage, grallocUsage);
4279 
4280 	*grallocUsage = GRALLOC_USAGE_SW_WRITE_OFTEN;
4281 
4282 	return VK_SUCCESS;
4283 }
4284 
vkAcquireImageANDROID(VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)4285 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireImageANDROID(VkDevice device, VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence)
4286 {
4287 	TRACE("(VkDevice device = %p, VkImage image = %p, int nativeFenceFd = %d, VkSemaphore semaphore = %p, VkFence fence = %p)",
4288 	      device, static_cast<void *>(image), nativeFenceFd, static_cast<void *>(semaphore), static_cast<void *>(fence));
4289 
4290 	if(nativeFenceFd >= 0)
4291 	{
4292 		sync_wait(nativeFenceFd, -1);
4293 		close(nativeFenceFd);
4294 	}
4295 
4296 	if(fence != VK_NULL_HANDLE)
4297 	{
4298 		vk::Cast(fence)->complete();
4299 	}
4300 
4301 	if(semaphore != VK_NULL_HANDLE)
4302 	{
4303 		vk::DynamicCast<vk::BinarySemaphore>(semaphore)->signal();
4304 	}
4305 
4306 	return VK_SUCCESS;
4307 }
4308 
vkQueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)4309 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSignalReleaseImageANDROID(VkQueue queue, uint32_t waitSemaphoreCount, const VkSemaphore *pWaitSemaphores, VkImage image, int *pNativeFenceFd)
4310 {
4311 	TRACE("(VkQueue queue = %p, uint32_t waitSemaphoreCount = %d, const VkSemaphore* pWaitSemaphores = %p, VkImage image = %p, int* pNativeFenceFd = %p)",
4312 	      queue, waitSemaphoreCount, pWaitSemaphores, static_cast<void *>(image), pNativeFenceFd);
4313 
4314 	// This is a hack to deal with screen tearing for now.
4315 	// Need to correctly implement threading using VkSemaphore
4316 	// to get rid of it. b/132458423
4317 	vkQueueWaitIdle(queue);
4318 
4319 	*pNativeFenceFd = -1;
4320 
4321 	return vk::Cast(image)->prepareForExternalUseANDROID();
4322 }
4323 #endif  // __ANDROID__
4324 }
4325