1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "VkBuffer.hpp"
16 #include "VkBufferView.hpp"
17 #include "VkCommandBuffer.hpp"
18 #include "VkCommandPool.hpp"
19 #include "VkConfig.hpp"
20 #include "VkDebugUtilsMessenger.hpp"
21 #include "VkDescriptorPool.hpp"
22 #include "VkDescriptorSetLayout.hpp"
23 #include "VkDescriptorUpdateTemplate.hpp"
24 #include "VkDestroy.hpp"
25 #include "VkDevice.hpp"
26 #include "VkDeviceMemory.hpp"
27 #include "VkEvent.hpp"
28 #include "VkFence.hpp"
29 #include "VkFramebuffer.hpp"
30 #include "VkGetProcAddress.hpp"
31 #include "VkImage.hpp"
32 #include "VkImageView.hpp"
33 #include "VkInstance.hpp"
34 #include "VkPhysicalDevice.hpp"
35 #include "VkPipeline.hpp"
36 #include "VkPipelineCache.hpp"
37 #include "VkPipelineLayout.hpp"
38 #include "VkQueryPool.hpp"
39 #include "VkQueue.hpp"
40 #include "VkRenderPass.hpp"
41 #include "VkSampler.hpp"
42 #include "VkSemaphore.hpp"
43 #include "VkShaderModule.hpp"
44 #include "VkStringify.hpp"
45 #include "VkStructConversion.hpp"
46 #include "VkTimelineSemaphore.hpp"
47
48 #include "Reactor/Nucleus.hpp"
49 #include "System/CPUID.hpp"
50 #include "System/Debug.hpp"
51 #include "System/SwiftConfig.hpp"
52 #include "WSI/HeadlessSurfaceKHR.hpp"
53 #include "WSI/VkSwapchainKHR.hpp"
54
55 #if defined(VK_USE_PLATFORM_METAL_EXT) || defined(VK_USE_PLATFORM_MACOS_MVK)
56 # include "WSI/MetalSurface.hpp"
57 #endif
58
59 #ifdef VK_USE_PLATFORM_XCB_KHR
60 # include "WSI/XcbSurfaceKHR.hpp"
61 #endif
62
63 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
64 # include "WSI/WaylandSurfaceKHR.hpp"
65 #endif
66
67 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
68 # include "WSI/DirectFBSurfaceEXT.hpp"
69 #endif
70
71 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
72 # include "WSI/DisplaySurfaceKHR.hpp"
73 #endif
74
75 #ifdef VK_USE_PLATFORM_WIN32_KHR
76 # include "WSI/Win32SurfaceKHR.hpp"
77 #endif
78
79 #include "marl/mutex.h"
80 #include "marl/scheduler.h"
81 #include "marl/thread.h"
82 #include "marl/tsa.h"
83
84 #ifdef __ANDROID__
85 # include "commit.h"
86 # include "System/GrallocAndroid.hpp"
87 # include <android/log.h>
88 # include <hardware/gralloc1.h>
89 # include <sync/sync.h>
90 # ifdef SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
91 # include "VkDeviceMemoryExternalAndroid.hpp"
92 # endif
93 #endif
94
95 #include <algorithm>
96 #include <cinttypes>
97 #include <cstring>
98 #include <functional>
99 #include <map>
100 #include <string>
101
102 namespace {
103
104 // Enable commit_id.py and #include commit.h for other platforms.
105 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
logBuildVersionInformation()106 void logBuildVersionInformation()
107 {
108 // TODO(b/144093703): Don't call __android_log_print() directly
109 __android_log_print(ANDROID_LOG_INFO, "SwiftShader", "SwiftShader Version: %s", SWIFTSHADER_VERSION_STRING);
110 }
111 #endif // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
112
113 // setReactorDefaultConfig() sets the default configuration for Vulkan's use of
114 // Reactor.
setReactorDefaultConfig()115 void setReactorDefaultConfig()
116 {
117 auto swConfig = sw::getConfiguration();
118 auto cfg = rr::Config::Edit()
119 .set(rr::Optimization::Level::Default)
120 .clearOptimizationPasses()
121 .add(rr::Optimization::Pass::ScalarReplAggregates)
122 .add(rr::Optimization::Pass::SCCP)
123 .add(rr::Optimization::Pass::CFGSimplification)
124 .add(rr::Optimization::Pass::EarlyCSEPass)
125 .add(rr::Optimization::Pass::CFGSimplification)
126 .add(rr::Optimization::Pass::InstructionCombining)
127 .setDebugConfig(sw::getReactorDebugConfig(swConfig));
128
129 rr::Nucleus::adjustDefaultConfig(cfg);
130 }
131
getOrCreateScheduler()132 std::shared_ptr<marl::Scheduler> getOrCreateScheduler()
133 {
134 struct Scheduler
135 {
136 marl::mutex mutex;
137 std::weak_ptr<marl::Scheduler> weakptr GUARDED_BY(mutex);
138 };
139
140 static Scheduler scheduler; // TODO(b/208256248): Avoid exit-time destructor.
141
142 marl::lock lock(scheduler.mutex);
143 auto sptr = scheduler.weakptr.lock();
144 if(!sptr)
145 {
146 const sw::Configuration &config = sw::getConfiguration();
147 marl::Scheduler::Config cfg = sw::getSchedulerConfiguration(config);
148 sptr = std::make_shared<marl::Scheduler>(cfg);
149 scheduler.weakptr = sptr;
150 }
151 return sptr;
152 }
153
154 // initializeLibrary() is called by vkCreateInstance() to perform one-off global
155 // initialization of the swiftshader driver.
initializeLibrary()156 void initializeLibrary()
157 {
158 static bool doOnce = [] {
159 #if defined(__ANDROID__) && defined(ENABLE_BUILD_VERSION_OUTPUT)
160 logBuildVersionInformation();
161 #endif // __ANDROID__ && ENABLE_BUILD_VERSION_OUTPUT
162 setReactorDefaultConfig();
163 return true;
164 }();
165 (void)doOnce;
166 }
167
168 template<class T>
ValidateRenderPassPNextChain(VkDevice device,const T * pCreateInfo)169 void ValidateRenderPassPNextChain(VkDevice device, const T *pCreateInfo)
170 {
171 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
172
173 while(extensionCreateInfo)
174 {
175 switch(extensionCreateInfo->sType)
176 {
177 case VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO:
178 {
179 const VkRenderPassInputAttachmentAspectCreateInfo *inputAttachmentAspectCreateInfo = reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>(extensionCreateInfo);
180
181 for(uint32_t i = 0; i < inputAttachmentAspectCreateInfo->aspectReferenceCount; i++)
182 {
183 const auto &aspectReference = inputAttachmentAspectCreateInfo->pAspectReferences[i];
184 ASSERT(aspectReference.subpass < pCreateInfo->subpassCount);
185 const auto &subpassDescription = pCreateInfo->pSubpasses[aspectReference.subpass];
186 ASSERT(aspectReference.inputAttachmentIndex < subpassDescription.inputAttachmentCount);
187 const auto &attachmentReference = subpassDescription.pInputAttachments[aspectReference.inputAttachmentIndex];
188 if(attachmentReference.attachment != VK_ATTACHMENT_UNUSED)
189 {
190 // If the pNext chain includes an instance of VkRenderPassInputAttachmentAspectCreateInfo, for any
191 // element of the pInputAttachments member of any element of pSubpasses where the attachment member
192 // is not VK_ATTACHMENT_UNUSED, the aspectMask member of the corresponding element of
193 // VkRenderPassInputAttachmentAspectCreateInfo::pAspectReferences must only include aspects that are
194 // present in images of the format specified by the element of pAttachments at attachment
195 vk::Format format(pCreateInfo->pAttachments[attachmentReference.attachment].format);
196 bool isDepth = format.isDepth();
197 bool isStencil = format.isStencil();
198 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_COLOR_BIT) || (!isDepth && !isStencil));
199 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_DEPTH_BIT) || isDepth);
200 ASSERT(!(aspectReference.aspectMask & VK_IMAGE_ASPECT_STENCIL_BIT) || isStencil);
201 }
202 }
203 }
204 break;
205 case VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO:
206 {
207 const VkRenderPassMultiviewCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>(extensionCreateInfo);
208 ASSERT((multiviewCreateInfo->subpassCount == 0) || (multiviewCreateInfo->subpassCount == pCreateInfo->subpassCount));
209 ASSERT((multiviewCreateInfo->dependencyCount == 0) || (multiviewCreateInfo->dependencyCount == pCreateInfo->dependencyCount));
210
211 bool zeroMask = (multiviewCreateInfo->pViewMasks[0] == 0);
212 for(uint32_t i = 1; i < multiviewCreateInfo->subpassCount; i++)
213 {
214 ASSERT((multiviewCreateInfo->pViewMasks[i] == 0) == zeroMask);
215 }
216
217 if(zeroMask)
218 {
219 ASSERT(multiviewCreateInfo->correlationMaskCount == 0);
220 }
221
222 for(uint32_t i = 0; i < multiviewCreateInfo->dependencyCount; i++)
223 {
224 const auto &dependency = pCreateInfo->pDependencies[i];
225 if(multiviewCreateInfo->pViewOffsets[i] != 0)
226 {
227 ASSERT(dependency.srcSubpass != dependency.dstSubpass);
228 ASSERT(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT);
229 }
230 if(zeroMask)
231 {
232 ASSERT(!(dependency.dependencyFlags & VK_DEPENDENCY_VIEW_LOCAL_BIT));
233 }
234 }
235
236 // If the pNext chain includes an instance of VkRenderPassMultiviewCreateInfo,
237 // each element of its pViewMask member must not include a bit at a position
238 // greater than the value of VkPhysicalDeviceLimits::maxFramebufferLayers
239 // pViewMask is a 32 bit value. If maxFramebufferLayers > 32, it's impossible
240 // for pViewMask to contain a bit at an illegal position
241 // Note: Verify pViewMask values instead if we hit this assert
242 ASSERT(vk::Cast(device)->getPhysicalDevice()->getProperties().limits.maxFramebufferLayers >= 32);
243 }
244 break;
245 case VK_STRUCTURE_TYPE_MAX_ENUM:
246 // dEQP tests that this value is ignored.
247 break;
248 default:
249 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
250 break;
251 }
252
253 extensionCreateInfo = extensionCreateInfo->pNext;
254 }
255 }
256
257 } // namespace
258
259 extern "C" {
vk_icdGetInstanceProcAddr(VkInstance instance,const char * pName)260 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
261 {
262 TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
263
264 return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
265 }
266
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t * pSupportedVersion)267 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
268 {
269 *pSupportedVersion = 3;
270 return VK_SUCCESS;
271 }
272
273 #if VK_USE_PLATFORM_FUCHSIA
274
275 // This symbol must be exported by a Fuchsia Vulkan ICD. The Vulkan loader will
276 // call it, passing the address of a global function pointer that can later be
277 // used at runtime to connect to Fuchsia FIDL services, as required by certain
278 // extensions. See https://fxbug.dev/13095 for more details.
279 //
280 // NOTE: This entry point has not been upstreamed to Khronos yet, which reserves
281 // all symbols starting with vk_icd. See https://fxbug.dev/13074 which
282 // tracks upstreaming progress.
vk_icdInitializeConnectToServiceCallback(PFN_vkConnectToService callback)283 VK_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vk_icdInitializeConnectToServiceCallback(
284 PFN_vkConnectToService callback)
285 {
286 TRACE("(callback = %p)", callback);
287 vk::icdFuchsiaServiceConnectCallback = callback;
288 return VK_SUCCESS;
289 }
290
291 #endif // VK_USE_PLATFORM_FUCHSIA
292
293 struct ExtensionProperties : public VkExtensionProperties
294 {
__anon620a84800302ExtensionProperties295 std::function<bool()> isSupported = [] { return true; };
296 };
297
298 // TODO(b/208256248): Avoid exit-time destructor.
299 static const ExtensionProperties instanceExtensionProperties[] = {
300 { { VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION } },
301 { { VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION } },
302 { { VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION } },
303 { { VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION } },
304 { { VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION } },
305 { { VK_EXT_DEBUG_UTILS_EXTENSION_NAME, VK_EXT_DEBUG_UTILS_SPEC_VERSION } },
306 { { VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME, VK_EXT_HEADLESS_SURFACE_SPEC_VERSION } },
307 #ifndef __ANDROID__
308 { { VK_KHR_SURFACE_EXTENSION_NAME, VK_KHR_SURFACE_SPEC_VERSION } },
309 #endif
310 #ifdef VK_USE_PLATFORM_XCB_KHR
__anon620a84800402null311 { { VK_KHR_XCB_SURFACE_EXTENSION_NAME, VK_KHR_XCB_SURFACE_SPEC_VERSION }, [] { return vk::XcbSurfaceKHR::isSupported(); } },
312 #endif
313 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
314 { { VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, VK_KHR_WAYLAND_SURFACE_SPEC_VERSION } },
315 #endif
316 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
317 { { VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME, VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION } },
318 #endif
319 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
320 { { VK_KHR_DISPLAY_EXTENSION_NAME, VK_KHR_DISPLAY_SPEC_VERSION } },
321 #endif
322 #ifdef VK_USE_PLATFORM_MACOS_MVK
323 { { VK_MVK_MACOS_SURFACE_EXTENSION_NAME, VK_MVK_MACOS_SURFACE_SPEC_VERSION } },
324 #endif
325 #ifdef VK_USE_PLATFORM_METAL_EXT
326 { { VK_EXT_METAL_SURFACE_EXTENSION_NAME, VK_EXT_METAL_SURFACE_SPEC_VERSION } },
327 #endif
328 #ifdef VK_USE_PLATFORM_WIN32_KHR
329 { { VK_KHR_WIN32_SURFACE_EXTENSION_NAME, VK_KHR_WIN32_SURFACE_SPEC_VERSION } },
330 #endif
331 };
332
333 // TODO(b/208256248): Avoid exit-time destructor.
334 static const ExtensionProperties deviceExtensionProperties[] = {
335 { { VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION } },
336 // Vulkan 1.1 promoted extensions
337 { { VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, VK_KHR_BIND_MEMORY_2_SPEC_VERSION } },
338 { { VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION } },
339 { { VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION } },
340 { { VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION } },
341 { { VK_KHR_DEVICE_GROUP_EXTENSION_NAME, VK_KHR_DEVICE_GROUP_SPEC_VERSION } },
342 { { VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME, VK_KHR_EXTERNAL_FENCE_SPEC_VERSION } },
343 { { VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION } },
344 { { VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
345 { { VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION } },
346 { { VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_KHR_MAINTENANCE1_SPEC_VERSION } },
347 { { VK_KHR_MAINTENANCE2_EXTENSION_NAME, VK_KHR_MAINTENANCE2_SPEC_VERSION } },
348 { { VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_KHR_MAINTENANCE3_SPEC_VERSION } },
349 { { VK_KHR_MULTIVIEW_EXTENSION_NAME, VK_KHR_MULTIVIEW_SPEC_VERSION } },
350 { { VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME, VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION } },
351 { { VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION } },
352 { { VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME, VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION } },
353 { { VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME, VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION } },
354 { { VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION } },
355 { { VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME, VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION } },
356 // Only 1.1 core version of this is supported. The extension has additional requirements
357 //{{ VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME, VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION }},
358 { { VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME, VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION } },
359 // Only 1.1 core version of this is supported. The extension has additional requirements
360 //{{ VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME, VK_KHR_VARIABLE_POINTERS_SPEC_VERSION }},
361 { { VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION } },
362 // The following extension is only used to add support for Bresenham lines
363 { { VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, VK_EXT_LINE_RASTERIZATION_SPEC_VERSION } },
364 // The following extension is used by ANGLE to emulate blitting the stencil buffer
365 { { VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION } },
366 { { VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME, VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION } },
367 // Useful for D3D emulation
368 { { VK_EXT_4444_FORMATS_EXTENSION_NAME, VK_EXT_4444_FORMATS_SPEC_VERSION } },
369 // Used by ANGLE to support GL_KHR_blend_equation_advanced
370 { { VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION } },
371 #ifndef __ANDROID__
372 // We fully support the KHR_swapchain v70 additions, so just track the spec version.
373 { { VK_KHR_SWAPCHAIN_EXTENSION_NAME, VK_KHR_SWAPCHAIN_SPEC_VERSION } },
374 #else
375 // We only support V7 of this extension. Missing functionality: in V8,
376 // it becomes possible to pass a VkNativeBufferANDROID structure to
377 // vkBindImageMemory2. Android's swapchain implementation does this in
378 // order to support passing VkBindImageMemorySwapchainInfoKHR
379 // (from KHR_swapchain v70) to vkBindImageMemory2.
380 { { VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME, 7 } },
381 #endif
382 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
383 { { VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME, VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION } },
384 #endif
385 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
386 { { VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION } },
387 #endif
388 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
389 { { VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION } },
390 #endif
391
392 { { VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION } },
393
394 #if VK_USE_PLATFORM_FUCHSIA
395 { { VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION } },
396 { { VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION } },
397 #endif
398 { { VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, VK_EXT_PROVOKING_VERTEX_SPEC_VERSION } },
399 #if !defined(__ANDROID__)
400 { { VK_GOOGLE_SAMPLER_FILTERING_PRECISION_EXTENSION_NAME, VK_GOOGLE_SAMPLER_FILTERING_PRECISION_SPEC_VERSION } },
401 #endif
402 { { VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME, VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION } },
403 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
404 { { VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME, VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION } },
405 #endif // SWIFTSHADER_DEVICE_MEMORY_REPORT
406 // Vulkan 1.2 promoted extensions
407 { { VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, VK_EXT_HOST_QUERY_RESET_SPEC_VERSION } },
408 { { VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME, VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION } },
409 { { VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME, VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION } },
410 { { VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION } },
411 { { VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION } },
412 { { VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME, VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION } },
413 { { VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME, VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION } },
414 { { VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION } },
415 { { VK_KHR_SPIRV_1_4_EXTENSION_NAME, VK_KHR_SPIRV_1_4_SPEC_VERSION } },
416 { { VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME, VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION } },
417 { { VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION } },
418 // Vulkan 1.3 promoted extensions
419 { { VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME, VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION } },
420 { { VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME, VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION } },
421 { { VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME, VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION } },
422 { { VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME, VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION } },
423 { { VK_EXT_PRIVATE_DATA_EXTENSION_NAME, VK_EXT_PRIVATE_DATA_SPEC_VERSION } },
424 { { VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME, VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION } },
425 { { VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME, VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION } },
426 { { VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME, VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION } },
427 { { VK_EXT_TOOLING_INFO_EXTENSION_NAME, VK_EXT_TOOLING_INFO_SPEC_VERSION } },
428 { { VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME, VK_KHR_COPY_COMMANDS_2_SPEC_VERSION } },
429 { { VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME, VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION } },
430 { { VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME, VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION } },
431 { { VK_KHR_MAINTENANCE_4_EXTENSION_NAME, VK_KHR_MAINTENANCE_4_SPEC_VERSION } },
432 { { VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME, VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION } },
433 { { VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME, VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION } },
434 { { VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME, VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION } },
435 { { VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME, VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION } },
436 // Additional extension
437 { { VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME, VK_GOOGLE_DECORATE_STRING_SPEC_VERSION } },
438 { { VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME, VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION } },
439 { { VK_GOOGLE_USER_TYPE_EXTENSION_NAME, VK_GOOGLE_USER_TYPE_SPEC_VERSION } },
440 { { VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME, VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION } },
441 { { VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION } },
442 { { VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME, VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION } },
443 };
444
numSupportedExtensions(const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)445 static uint32_t numSupportedExtensions(const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
446 {
447 uint32_t count = 0;
448
449 for(uint32_t i = 0; i < extensionPropertiesCount; i++)
450 {
451 if(extensionProperties[i].isSupported())
452 {
453 count++;
454 }
455 }
456
457 return count;
458 }
459
numInstanceSupportedExtensions()460 static uint32_t numInstanceSupportedExtensions()
461 {
462 return numSupportedExtensions(instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
463 }
464
numDeviceSupportedExtensions()465 static uint32_t numDeviceSupportedExtensions()
466 {
467 return numSupportedExtensions(deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
468 }
469
hasExtension(const char * extensionName,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)470 static bool hasExtension(const char *extensionName, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
471 {
472 for(uint32_t i = 0; i < extensionPropertiesCount; i++)
473 {
474 if(strcmp(extensionName, extensionProperties[i].extensionName) == 0)
475 {
476 return extensionProperties[i].isSupported();
477 }
478 }
479
480 return false;
481 }
482
hasInstanceExtension(const char * extensionName)483 static bool hasInstanceExtension(const char *extensionName)
484 {
485 return hasExtension(extensionName, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
486 }
487
hasDeviceExtension(const char * extensionName)488 static bool hasDeviceExtension(const char *extensionName)
489 {
490 return hasExtension(extensionName, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
491 }
492
copyExtensions(VkExtensionProperties * pProperties,uint32_t toCopy,const ExtensionProperties * extensionProperties,uint32_t extensionPropertiesCount)493 static void copyExtensions(VkExtensionProperties *pProperties, uint32_t toCopy, const ExtensionProperties *extensionProperties, uint32_t extensionPropertiesCount)
494 {
495 for(uint32_t i = 0, j = 0; i < toCopy; i++, j++)
496 {
497 while((j < extensionPropertiesCount) && !extensionProperties[j].isSupported())
498 {
499 j++;
500 }
501 if(j < extensionPropertiesCount)
502 {
503 pProperties[i] = extensionProperties[j];
504 }
505 }
506 }
507
copyInstanceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)508 static void copyInstanceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
509 {
510 copyExtensions(pProperties, toCopy, instanceExtensionProperties, sizeof(instanceExtensionProperties) / sizeof(instanceExtensionProperties[0]));
511 }
512
copyDeviceExtensions(VkExtensionProperties * pProperties,uint32_t toCopy)513 static void copyDeviceExtensions(VkExtensionProperties *pProperties, uint32_t toCopy)
514 {
515 copyExtensions(pProperties, toCopy, deviceExtensionProperties, sizeof(deviceExtensionProperties) / sizeof(deviceExtensionProperties[0]));
516 }
517
vkCreateInstance(const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)518 VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance)
519 {
520 TRACE("(const VkInstanceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkInstance* pInstance = %p)",
521 pCreateInfo, pAllocator, pInstance);
522
523 initializeLibrary();
524
525 if(pCreateInfo->flags != 0)
526 {
527 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
528 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
529 }
530
531 if(pCreateInfo->enabledLayerCount != 0)
532 {
533 UNIMPLEMENTED("b/148240133: pCreateInfo->enabledLayerCount != 0"); // FIXME(b/148240133)
534 }
535
536 for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
537 {
538 if(!hasInstanceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
539 {
540 return VK_ERROR_EXTENSION_NOT_PRESENT;
541 }
542 }
543
544 VkDebugUtilsMessengerEXT messenger = { VK_NULL_HANDLE };
545 if(pCreateInfo->pNext)
546 {
547 const VkBaseInStructure *createInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
548 switch(createInfo->sType)
549 {
550 case VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT:
551 {
552 const VkDebugUtilsMessengerCreateInfoEXT *debugUtilsMessengerCreateInfoEXT = reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>(createInfo);
553 VkResult result = vk::DebugUtilsMessenger::Create(pAllocator, debugUtilsMessengerCreateInfoEXT, &messenger);
554 if(result != VK_SUCCESS)
555 {
556 return result;
557 }
558 }
559 break;
560 case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:
561 // According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
562 // "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
563 // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
564 // internal use by the loader, and do not have corresponding
565 // Vulkan structures in this Specification."
566 break;
567 default:
568 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(createInfo->sType).c_str());
569 break;
570 }
571 }
572
573 *pInstance = VK_NULL_HANDLE;
574 VkPhysicalDevice physicalDevice = VK_NULL_HANDLE;
575
576 VkResult result = vk::DispatchablePhysicalDevice::Create(pAllocator, pCreateInfo, &physicalDevice);
577 if(result != VK_SUCCESS)
578 {
579 vk::destroy(messenger, pAllocator);
580 return result;
581 }
582
583 result = vk::DispatchableInstance::Create(pAllocator, pCreateInfo, pInstance, physicalDevice, vk::Cast(messenger));
584 if(result != VK_SUCCESS)
585 {
586 vk::destroy(messenger, pAllocator);
587 vk::destroy(physicalDevice, pAllocator);
588 return result;
589 }
590
591 return result;
592 }
593
vkDestroyInstance(VkInstance instance,const VkAllocationCallbacks * pAllocator)594 VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator)
595 {
596 TRACE("(VkInstance instance = %p, const VkAllocationCallbacks* pAllocator = %p)", instance, pAllocator);
597
598 vk::destroy(instance, pAllocator);
599 }
600
vkEnumeratePhysicalDevices(VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)601 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices)
602 {
603 TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceCount = %p, VkPhysicalDevice* pPhysicalDevices = %p)",
604 instance, pPhysicalDeviceCount, pPhysicalDevices);
605
606 return vk::Cast(instance)->getPhysicalDevices(pPhysicalDeviceCount, pPhysicalDevices);
607 }
608
vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)609 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures)
610 {
611 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures* pFeatures = %p)",
612 physicalDevice, pFeatures);
613
614 *pFeatures = vk::Cast(physicalDevice)->getFeatures();
615 }
616
vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)617 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties)
618 {
619 TRACE("GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties* pFormatProperties = %p)",
620 physicalDevice, (int)format, pFormatProperties);
621
622 vk::PhysicalDevice::GetFormatProperties(format, pFormatProperties);
623 }
624
vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)625 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties)
626 {
627 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkImageTiling tiling = %d, VkImageUsageFlags usage = %d, VkImageCreateFlags flags = %d, VkImageFormatProperties* pImageFormatProperties = %p)",
628 physicalDevice, (int)format, (int)type, (int)tiling, usage, flags, pImageFormatProperties);
629
630 VkPhysicalDeviceImageFormatInfo2 info2 = {};
631 info2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
632 info2.pNext = nullptr;
633 info2.format = format;
634 info2.type = type;
635 info2.tiling = tiling;
636 info2.usage = usage;
637 info2.flags = flags;
638
639 VkImageFormatProperties2 properties2 = {};
640 properties2.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
641 properties2.pNext = nullptr;
642
643 VkResult result = vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, &info2, &properties2);
644
645 *pImageFormatProperties = properties2.imageFormatProperties;
646
647 return result;
648 }
649
vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)650 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties)
651 {
652 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties* pProperties = %p)",
653 physicalDevice, pProperties);
654
655 *pProperties = vk::Cast(physicalDevice)->getProperties();
656 }
657
vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties * pQueueFamilyProperties)658 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties)
659 {
660 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties* pQueueFamilyProperties = %p))", physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
661
662 if(!pQueueFamilyProperties)
663 {
664 *pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
665 }
666 else
667 {
668 vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
669 }
670 }
671
vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)672 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties)
673 {
674 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
675
676 *pMemoryProperties = vk::PhysicalDevice::GetMemoryProperties();
677 }
678
vkGetInstanceProcAddr(VkInstance instance,const char * pName)679 VK_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char *pName)
680 {
681 TRACE("(VkInstance instance = %p, const char* pName = %p)", instance, pName);
682
683 return vk::GetInstanceProcAddr(vk::Cast(instance), pName);
684 }
685
vkGetDeviceProcAddr(VkDevice device,const char * pName)686 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char *pName)
687 {
688 TRACE("(VkDevice device = %p, const char* pName = %p)", device, pName);
689
690 return vk::GetDeviceProcAddr(vk::Cast(device), pName);
691 }
692
vkCreateDevice(VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)693 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice)
694 {
695 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkDeviceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDevice* pDevice = %p)",
696 physicalDevice, pCreateInfo, pAllocator, pDevice);
697
698 if(pCreateInfo->flags != 0)
699 {
700 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
701 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
702 }
703
704 if(pCreateInfo->enabledLayerCount != 0)
705 {
706 // "The ppEnabledLayerNames and enabledLayerCount members of VkDeviceCreateInfo are deprecated and their values must be ignored by implementations."
707 UNSUPPORTED("pCreateInfo->enabledLayerCount != 0");
708 }
709
710 for(uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
711 {
712 if(!hasDeviceExtension(pCreateInfo->ppEnabledExtensionNames[i]))
713 {
714 return VK_ERROR_EXTENSION_NOT_PRESENT;
715 }
716 }
717
718 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
719
720 const VkPhysicalDeviceFeatures *enabledFeatures = pCreateInfo->pEnabledFeatures;
721
722 while(extensionCreateInfo)
723 {
724 switch(extensionCreateInfo->sType)
725 {
726 case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:
727 // According to the Vulkan spec, section 2.7.2. Implicit Valid Usage:
728 // "The values VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO and
729 // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO are reserved for
730 // internal use by the loader, and do not have corresponding
731 // Vulkan structures in this Specification."
732 break;
733 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
734 {
735 ASSERT(!pCreateInfo->pEnabledFeatures); // "If the pNext chain includes a VkPhysicalDeviceFeatures2 structure, then pEnabledFeatures must be NULL"
736
737 const VkPhysicalDeviceFeatures2 *physicalDeviceFeatures2 = reinterpret_cast<const VkPhysicalDeviceFeatures2 *>(extensionCreateInfo);
738
739 enabledFeatures = &physicalDeviceFeatures2->features;
740 }
741 break;
742 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
743 {
744 const VkPhysicalDeviceSamplerYcbcrConversionFeatures *samplerYcbcrConversionFeatures = reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>(extensionCreateInfo);
745
746 // YCbCr conversion is supported.
747 // samplerYcbcrConversionFeatures->samplerYcbcrConversion can be VK_TRUE or VK_FALSE.
748 // No action needs to be taken on our end in either case; it's the apps responsibility that
749 // "To create a sampler Y'CbCr conversion, the samplerYcbcrConversion feature must be enabled."
750 (void)samplerYcbcrConversionFeatures->samplerYcbcrConversion;
751 }
752 break;
753 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
754 {
755 const VkPhysicalDevice16BitStorageFeatures *storage16BitFeatures = reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>(extensionCreateInfo);
756
757 if(storage16BitFeatures->storageBuffer16BitAccess != VK_FALSE ||
758 storage16BitFeatures->uniformAndStorageBuffer16BitAccess != VK_FALSE ||
759 storage16BitFeatures->storagePushConstant16 != VK_FALSE ||
760 storage16BitFeatures->storageInputOutput16 != VK_FALSE)
761 {
762 return VK_ERROR_FEATURE_NOT_PRESENT;
763 }
764 }
765 break;
766 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES:
767 {
768 const VkPhysicalDeviceVariablePointerFeatures *variablePointerFeatures = reinterpret_cast<const VkPhysicalDeviceVariablePointerFeatures *>(extensionCreateInfo);
769
770 if(variablePointerFeatures->variablePointersStorageBuffer != VK_FALSE ||
771 variablePointerFeatures->variablePointers != VK_FALSE)
772 {
773 return VK_ERROR_FEATURE_NOT_PRESENT;
774 }
775 }
776 break;
777 case VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO:
778 {
779 const VkDeviceGroupDeviceCreateInfo *groupDeviceCreateInfo = reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>(extensionCreateInfo);
780
781 if((groupDeviceCreateInfo->physicalDeviceCount != 1) ||
782 (groupDeviceCreateInfo->pPhysicalDevices[0] != physicalDevice))
783 {
784 return VK_ERROR_FEATURE_NOT_PRESENT;
785 }
786 }
787 break;
788 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
789 {
790 const VkPhysicalDeviceMultiviewFeatures *multiviewFeatures = reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>(extensionCreateInfo);
791
792 if(multiviewFeatures->multiviewGeometryShader ||
793 multiviewFeatures->multiviewTessellationShader)
794 {
795 return VK_ERROR_FEATURE_NOT_PRESENT;
796 }
797 }
798 break;
799 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
800 {
801 const VkPhysicalDeviceShaderDrawParametersFeatures *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>(extensionCreateInfo);
802
803 if(shaderDrawParametersFeatures->shaderDrawParameters)
804 {
805 return VK_ERROR_FEATURE_NOT_PRESENT;
806 }
807 }
808 break;
809 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES:
810 {
811 const VkPhysicalDeviceDynamicRenderingFeatures *dynamicRenderingFeatures = reinterpret_cast<const VkPhysicalDeviceDynamicRenderingFeatures *>(extensionCreateInfo);
812
813 // Dynamic rendering is supported
814 (void)(dynamicRenderingFeatures->dynamicRendering);
815 }
816 break;
817 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR:
818 {
819 const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *shaderDrawParametersFeatures = reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR *>(extensionCreateInfo);
820
821 // Separate depth and stencil layouts is already supported
822 (void)(shaderDrawParametersFeatures->separateDepthStencilLayouts);
823 }
824 break;
825 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT:
826 {
827 const auto *lineRasterizationFeatures = reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>(extensionCreateInfo);
828 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(lineRasterizationFeatures);
829 if(!hasFeatures)
830 {
831 return VK_ERROR_FEATURE_NOT_PRESENT;
832 }
833 }
834 break;
835 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT:
836 {
837 const VkPhysicalDeviceProvokingVertexFeaturesEXT *provokingVertexFeatures = reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>(extensionCreateInfo);
838 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(provokingVertexFeatures);
839 if(!hasFeatures)
840 {
841 return VK_ERROR_FEATURE_NOT_PRESENT;
842 }
843 }
844 break;
845 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES:
846 {
847 const VkPhysicalDeviceImageRobustnessFeatures *imageRobustnessFeatures = reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeatures *>(extensionCreateInfo);
848
849 // We currently always provide robust image accesses. When the feature is disabled, results are
850 // undefined (for images with Dim != Buffer), so providing robustness is also acceptable.
851 // TODO(b/159329067): Only provide robustness when requested.
852 (void)imageRobustnessFeatures->robustImageAccess;
853 }
854 break;
855 // For unsupported structures, check that we don't expose the corresponding extension string:
856 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT:
857 ASSERT(!hasDeviceExtension(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME));
858 break;
859 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR:
860 {
861 const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *imagelessFramebufferFeatures = reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeaturesKHR *>(extensionCreateInfo);
862 // Always provide Imageless Framebuffers
863 (void)imagelessFramebufferFeatures->imagelessFramebuffer;
864 }
865 break;
866 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES:
867 {
868 const VkPhysicalDeviceScalarBlockLayoutFeatures *scalarBlockLayoutFeatures = reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>(extensionCreateInfo);
869
870 // VK_EXT_scalar_block_layout is supported, allowing C-like structure layout for SPIR-V blocks.
871 (void)scalarBlockLayoutFeatures->scalarBlockLayout;
872 }
873 break;
874 #ifdef SWIFTSHADER_DEVICE_MEMORY_REPORT
875 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT:
876 {
877 const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *deviceMemoryReportFeatures = reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>(extensionCreateInfo);
878 (void)deviceMemoryReportFeatures->deviceMemoryReport;
879 }
880 break;
881 #endif // SWIFTSHADER_DEVICE_MEMORY_REPORT
882 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES:
883 {
884 const VkPhysicalDeviceHostQueryResetFeatures *hostQueryResetFeatures = reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>(extensionCreateInfo);
885
886 // VK_EXT_host_query_reset is always enabled.
887 (void)hostQueryResetFeatures->hostQueryReset;
888 }
889 break;
890 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES:
891 {
892 const VkPhysicalDevicePipelineCreationCacheControlFeatures *pipelineCreationCacheControlFeatures = reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeatures *>(extensionCreateInfo);
893
894 // VK_EXT_pipeline_creation_cache_control is always enabled.
895 (void)pipelineCreationCacheControlFeatures->pipelineCreationCacheControl;
896 }
897 break;
898 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES:
899 {
900 const auto *tsFeatures = reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>(extensionCreateInfo);
901
902 // VK_KHR_timeline_semaphores is always enabled
903 (void)tsFeatures->timelineSemaphore;
904 }
905 break;
906 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT:
907 {
908 const auto *customBorderColorFeatures = reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT *>(extensionCreateInfo);
909
910 // VK_EXT_custom_border_color is always enabled
911 (void)customBorderColorFeatures->customBorderColors;
912 (void)customBorderColorFeatures->customBorderColorWithoutFormat;
913 }
914 break;
915 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
916 {
917 const auto *vk11Features = reinterpret_cast<const VkPhysicalDeviceVulkan11Features *>(extensionCreateInfo);
918 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk11Features);
919 if(!hasFeatures)
920 {
921 return VK_ERROR_FEATURE_NOT_PRESENT;
922 }
923 }
924 break;
925 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
926 {
927 const auto *vk12Features = reinterpret_cast<const VkPhysicalDeviceVulkan12Features *>(extensionCreateInfo);
928 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk12Features);
929 if(!hasFeatures)
930 {
931 return VK_ERROR_FEATURE_NOT_PRESENT;
932 }
933 }
934 break;
935 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES:
936 {
937 const auto *vk13Features = reinterpret_cast<const VkPhysicalDeviceVulkan13Features *>(extensionCreateInfo);
938 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(vk13Features);
939 if(!hasFeatures)
940 {
941 return VK_ERROR_FEATURE_NOT_PRESENT;
942 }
943 }
944 break;
945 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT:
946 {
947 const auto *depthClipFeatures = reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>(extensionCreateInfo);
948 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(depthClipFeatures);
949 if(!hasFeatures)
950 {
951 return VK_ERROR_FEATURE_NOT_PRESENT;
952 }
953 }
954 break;
955 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT:
956 {
957 const auto *blendOpFeatures = reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>(extensionCreateInfo);
958 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(blendOpFeatures);
959 if(!hasFeatures)
960 {
961 return VK_ERROR_FEATURE_NOT_PRESENT;
962 }
963 }
964 break;
965 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT:
966 {
967 const auto *dynamicStateFeatures = reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>(extensionCreateInfo);
968 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(dynamicStateFeatures);
969 if(!hasFeatures)
970 {
971 return VK_ERROR_FEATURE_NOT_PRESENT;
972 }
973 }
974 break;
975 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES:
976 {
977 const auto *privateDataFeatures = reinterpret_cast<const VkPhysicalDevicePrivateDataFeatures *>(extensionCreateInfo);
978 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(privateDataFeatures);
979 if(!hasFeatures)
980 {
981 return VK_ERROR_FEATURE_NOT_PRESENT;
982 }
983 }
984 break;
985 case VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO:
986 {
987 const auto *privateDataCreateInfo = reinterpret_cast<const VkDevicePrivateDataCreateInfo *>(extensionCreateInfo);
988 (void)privateDataCreateInfo->privateDataSlotRequestCount;
989 }
990 break;
991 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES:
992 {
993 const auto *textureCompressionASTCHDRFeatures = reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeatures *>(extensionCreateInfo);
994 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(textureCompressionASTCHDRFeatures);
995 if(!hasFeatures)
996 {
997 return VK_ERROR_FEATURE_NOT_PRESENT;
998 }
999 }
1000 break;
1001 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES:
1002 {
1003 const auto *shaderDemoteToHelperInvocationFeatures = reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures *>(extensionCreateInfo);
1004 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(shaderDemoteToHelperInvocationFeatures);
1005 if(!hasFeatures)
1006 {
1007 return VK_ERROR_FEATURE_NOT_PRESENT;
1008 }
1009 }
1010 break;
1011 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES:
1012 {
1013 const auto *shaderTerminateInvocationFeatures = reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeatures *>(extensionCreateInfo);
1014 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(shaderTerminateInvocationFeatures);
1015 if(!hasFeatures)
1016 {
1017 return VK_ERROR_FEATURE_NOT_PRESENT;
1018 }
1019 }
1020 break;
1021 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES:
1022 {
1023 const auto *subgroupSizeControlFeatures = reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeatures *>(extensionCreateInfo);
1024 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(subgroupSizeControlFeatures);
1025 if(!hasFeatures)
1026 {
1027 return VK_ERROR_FEATURE_NOT_PRESENT;
1028 }
1029 }
1030 break;
1031 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES:
1032 {
1033 const auto *uniformBlockFeatures = reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeatures *>(extensionCreateInfo);
1034 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(uniformBlockFeatures);
1035 if(!hasFeatures)
1036 {
1037 return VK_ERROR_FEATURE_NOT_PRESENT;
1038 }
1039 }
1040 break;
1041 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES:
1042 {
1043 const auto *integerDotProductFeatures = reinterpret_cast<const VkPhysicalDeviceShaderIntegerDotProductFeatures *>(extensionCreateInfo);
1044 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(integerDotProductFeatures);
1045 if(!hasFeatures)
1046 {
1047 return VK_ERROR_FEATURE_NOT_PRESENT;
1048 }
1049 }
1050 break;
1051 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES:
1052 {
1053 const auto *zeroInitializeWorkgroupMemoryFeatures = reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures *>(extensionCreateInfo);
1054 bool hasFeatures = vk::Cast(physicalDevice)->hasExtendedFeatures(zeroInitializeWorkgroupMemoryFeatures);
1055 if(!hasFeatures)
1056 {
1057 return VK_ERROR_FEATURE_NOT_PRESENT;
1058 }
1059 }
1060 break;
1061 // These structs are supported, but no behavior changes based on their feature bools
1062 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES:
1063 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES:
1064 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT:
1065 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES:
1066 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES:
1067 break;
1068 default:
1069 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
1070 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1071 break;
1072 }
1073
1074 extensionCreateInfo = extensionCreateInfo->pNext;
1075 }
1076
1077 ASSERT(pCreateInfo->queueCreateInfoCount > 0);
1078
1079 if(enabledFeatures)
1080 {
1081 if(!vk::Cast(physicalDevice)->hasFeatures(*enabledFeatures))
1082 {
1083 return VK_ERROR_FEATURE_NOT_PRESENT;
1084 }
1085 }
1086
1087 uint32_t queueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
1088
1089 for(uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++)
1090 {
1091 const VkDeviceQueueCreateInfo &queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
1092 if(queueCreateInfo.flags != 0)
1093 {
1094 UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d]->flags %d", i, queueCreateInfo.flags);
1095 }
1096
1097 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(queueCreateInfo.pNext);
1098 while(extInfo)
1099 {
1100 UNSUPPORTED("pCreateInfo->pQueueCreateInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
1101 extInfo = extInfo->pNext;
1102 }
1103
1104 ASSERT(queueCreateInfo.queueFamilyIndex < queueFamilyPropertyCount);
1105 (void)queueFamilyPropertyCount; // Silence unused variable warning
1106 }
1107
1108 auto scheduler = getOrCreateScheduler();
1109 return vk::DispatchableDevice::Create(pAllocator, pCreateInfo, pDevice, vk::Cast(physicalDevice), enabledFeatures, scheduler);
1110 }
1111
vkDestroyDevice(VkDevice device,const VkAllocationCallbacks * pAllocator)1112 VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator)
1113 {
1114 TRACE("(VkDevice device = %p, const VkAllocationCallbacks* pAllocator = %p)", device, pAllocator);
1115
1116 vk::destroy(device, pAllocator);
1117 }
1118
vkEnumerateInstanceExtensionProperties(const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1119 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
1120 {
1121 TRACE("(const char* pLayerName = %p, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)",
1122 pLayerName, pPropertyCount, pProperties);
1123
1124 uint32_t extensionPropertiesCount = numInstanceSupportedExtensions();
1125
1126 if(!pProperties)
1127 {
1128 *pPropertyCount = extensionPropertiesCount;
1129 return VK_SUCCESS;
1130 }
1131
1132 auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
1133 copyInstanceExtensions(pProperties, toCopy);
1134
1135 *pPropertyCount = toCopy;
1136 return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
1137 }
1138
vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1139 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice, const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties)
1140 {
1141 TRACE("(VkPhysicalDevice physicalDevice = %p, const char* pLayerName, uint32_t* pPropertyCount = %p, VkExtensionProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
1142
1143 uint32_t extensionPropertiesCount = numDeviceSupportedExtensions();
1144
1145 if(!pProperties)
1146 {
1147 *pPropertyCount = extensionPropertiesCount;
1148 return VK_SUCCESS;
1149 }
1150
1151 auto toCopy = std::min(*pPropertyCount, extensionPropertiesCount);
1152 copyDeviceExtensions(pProperties, toCopy);
1153
1154 *pPropertyCount = toCopy;
1155 return (toCopy < extensionPropertiesCount) ? VK_INCOMPLETE : VK_SUCCESS;
1156 }
1157
vkEnumerateInstanceLayerProperties(uint32_t * pPropertyCount,VkLayerProperties * pProperties)1158 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties)
1159 {
1160 TRACE("(uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", pPropertyCount, pProperties);
1161
1162 if(!pProperties)
1163 {
1164 *pPropertyCount = 0;
1165 return VK_SUCCESS;
1166 }
1167
1168 return VK_SUCCESS;
1169 }
1170
vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkLayerProperties * pProperties)1171 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkLayerProperties *pProperties)
1172 {
1173 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkLayerProperties* pProperties = %p)", physicalDevice, pPropertyCount, pProperties);
1174
1175 if(!pProperties)
1176 {
1177 *pPropertyCount = 0;
1178 return VK_SUCCESS;
1179 }
1180
1181 return VK_SUCCESS;
1182 }
1183
vkGetDeviceQueue(VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)1184 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue)
1185 {
1186 TRACE("(VkDevice device = %p, uint32_t queueFamilyIndex = %d, uint32_t queueIndex = %d, VkQueue* pQueue = %p)",
1187 device, queueFamilyIndex, queueIndex, pQueue);
1188
1189 *pQueue = vk::Cast(device)->getQueue(queueFamilyIndex, queueIndex);
1190 }
1191
vkQueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)1192 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence)
1193 {
1194 TRACE("(VkQueue queue = %p, uint32_t submitCount = %d, const VkSubmitInfo* pSubmits = %p, VkFence fence = %p)",
1195 queue, submitCount, pSubmits, static_cast<void *>(fence));
1196
1197 return vk::Cast(queue)->submit(submitCount, vk::SubmitInfo::Allocate(submitCount, pSubmits), vk::Cast(fence));
1198 }
1199
vkQueueSubmit2(VkQueue queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)1200 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 *pSubmits, VkFence fence)
1201 {
1202 TRACE("(VkQueue queue = %p, uint32_t submitCount = %d, const VkSubmitInfo2* pSubmits = %p, VkFence fence = %p)",
1203 queue, submitCount, pSubmits, static_cast<void *>(fence));
1204
1205 return vk::Cast(queue)->submit(submitCount, vk::SubmitInfo::Allocate(submitCount, pSubmits), vk::Cast(fence));
1206 }
1207
vkQueueWaitIdle(VkQueue queue)1208 VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(VkQueue queue)
1209 {
1210 TRACE("(VkQueue queue = %p)", queue);
1211
1212 return vk::Cast(queue)->waitIdle();
1213 }
1214
vkDeviceWaitIdle(VkDevice device)1215 VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(VkDevice device)
1216 {
1217 TRACE("(VkDevice device = %p)", device);
1218
1219 return vk::Cast(device)->waitIdle();
1220 }
1221
vkAllocateMemory(VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)1222 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory)
1223 {
1224 TRACE("(VkDevice device = %p, const VkMemoryAllocateInfo* pAllocateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDeviceMemory* pMemory = %p)",
1225 device, pAllocateInfo, pAllocator, pMemory);
1226
1227 VkResult result = vk::DeviceMemory::Allocate(pAllocator, pAllocateInfo, pMemory, vk::Cast(device));
1228
1229 if(result != VK_SUCCESS)
1230 {
1231 vk::destroy(*pMemory, pAllocator);
1232 *pMemory = VK_NULL_HANDLE;
1233 }
1234
1235 return result;
1236 }
1237
vkFreeMemory(VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)1238 VKAPI_ATTR void VKAPI_CALL vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator)
1239 {
1240 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, const VkAllocationCallbacks* pAllocator = %p)",
1241 device, static_cast<void *>(memory), pAllocator);
1242
1243 vk::destroy(memory, pAllocator);
1244 }
1245
1246 #if SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
vkGetMemoryFdKHR(VkDevice device,const VkMemoryGetFdInfoKHR * getFdInfo,int * pFd)1247 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(VkDevice device, const VkMemoryGetFdInfoKHR *getFdInfo, int *pFd)
1248 {
1249 TRACE("(VkDevice device = %p, const VkMemoryGetFdInfoKHR* getFdInfo = %p, int* pFd = %p",
1250 device, getFdInfo, pFd);
1251
1252 if(getFdInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1253 {
1254 UNSUPPORTED("pGetFdInfo->handleType %u", getFdInfo->handleType);
1255 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1256 }
1257 return vk::Cast(getFdInfo->memory)->exportFd(pFd);
1258 }
1259
vkGetMemoryFdPropertiesKHR(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,int fd,VkMemoryFdPropertiesKHR * pMemoryFdProperties)1260 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR *pMemoryFdProperties)
1261 {
1262 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, int fd = %d, VkMemoryFdPropertiesKHR* pMemoryFdProperties = %p)",
1263 device, handleType, fd, pMemoryFdProperties);
1264
1265 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT)
1266 {
1267 UNSUPPORTED("handleType %u", handleType);
1268 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1269 }
1270
1271 if(fd < 0)
1272 {
1273 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1274 }
1275
1276 const VkPhysicalDeviceMemoryProperties &memoryProperties =
1277 vk::PhysicalDevice::GetMemoryProperties();
1278
1279 // All SwiftShader memory types support this!
1280 pMemoryFdProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1281
1282 return VK_SUCCESS;
1283 }
1284 #endif // SWIFTSHADER_EXTERNAL_MEMORY_OPAQUE_FD
1285 #if VK_USE_PLATFORM_FUCHSIA
vkGetMemoryZirconHandleFUCHSIA(VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pGetHandleInfo,zx_handle_t * pHandle)1286 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA *pGetHandleInfo, zx_handle_t *pHandle)
1287 {
1288 TRACE("(VkDevice device = %p, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetHandleInfo = %p, zx_handle_t* pHandle = %p",
1289 device, pGetHandleInfo, pHandle);
1290
1291 if(pGetHandleInfo->handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
1292 {
1293 UNSUPPORTED("pGetHandleInfo->handleType %u", pGetHandleInfo->handleType);
1294 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1295 }
1296 return vk::Cast(pGetHandleInfo->memory)->exportHandle(pHandle);
1297 }
1298
vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,zx_handle_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties)1299 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t handle, VkMemoryZirconHandlePropertiesFUCHSIA *pMemoryZirconHandleProperties)
1300 {
1301 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, zx_handle_t handle = %d, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties = %p)",
1302 device, handleType, handle, pMemoryZirconHandleProperties);
1303
1304 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
1305 {
1306 UNSUPPORTED("handleType %u", handleType);
1307 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1308 }
1309
1310 if(handle == ZX_HANDLE_INVALID)
1311 {
1312 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1313 }
1314
1315 const VkPhysicalDeviceMemoryProperties &memoryProperties =
1316 vk::PhysicalDevice::GetMemoryProperties();
1317
1318 // All SwiftShader memory types support this!
1319 pMemoryZirconHandleProperties->memoryTypeBits = (1U << memoryProperties.memoryTypeCount) - 1U;
1320
1321 return VK_SUCCESS;
1322 }
1323 #endif // VK_USE_PLATFORM_FUCHSIA
1324
vkGetMemoryHostPointerPropertiesEXT(VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,const void * pHostPointer,VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties)1325 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void *pHostPointer, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties)
1326 {
1327 TRACE("(VkDevice device = %p, VkExternalMemoryHandleTypeFlagBits handleType = %x, const void *pHostPointer = %p, VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties = %p)",
1328 device, handleType, pHostPointer, pMemoryHostPointerProperties);
1329
1330 if(handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT && handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT)
1331 {
1332 UNSUPPORTED("handleType %u", handleType);
1333 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1334 }
1335 pMemoryHostPointerProperties->memoryTypeBits = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1336
1337 return VK_SUCCESS;
1338 }
1339
1340 #if SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1341 VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo, struct AHardwareBuffer **pBuffer)
1342 {
1343 TRACE("(VkDevice device = %p, const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo = %p, struct AHardwareBuffer **pBuffer = %p)",
1344 device, pInfo, pBuffer);
1345
1346 return vk::Cast(pInfo->memory)->exportAndroidHardwareBuffer(pBuffer);
1347 }
1348
vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device,const struct AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)1349 VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(VkDevice device, const struct AHardwareBuffer *buffer, VkAndroidHardwareBufferPropertiesANDROID *pProperties)
1350 {
1351 TRACE("(VkDevice device = %p, const struct AHardwareBuffer *buffer = %p, VkAndroidHardwareBufferPropertiesANDROID *pProperties = %p)",
1352 device, buffer, pProperties);
1353
1354 return vk::DeviceMemory::GetAndroidHardwareBufferProperties(device, buffer, pProperties);
1355 }
1356 #endif // SWIFTSHADER_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER
1357
vkMapMemory(VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)1358 VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData)
1359 {
1360 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize offset = %d, VkDeviceSize size = %d, VkMemoryMapFlags flags = %d, void** ppData = %p)",
1361 device, static_cast<void *>(memory), int(offset), int(size), flags, ppData);
1362
1363 if(flags != 0)
1364 {
1365 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1366 UNSUPPORTED("flags %d", int(flags));
1367 }
1368
1369 return vk::Cast(memory)->map(offset, size, ppData);
1370 }
1371
vkUnmapMemory(VkDevice device,VkDeviceMemory memory)1372 VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(VkDevice device, VkDeviceMemory memory)
1373 {
1374 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p)", device, static_cast<void *>(memory));
1375
1376 // Noop, memory will be released when the DeviceMemory object is released
1377 }
1378
vkFlushMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1379 VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1380 {
1381 TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1382 device, memoryRangeCount, pMemoryRanges);
1383
1384 // Noop, host and device memory are the same to SwiftShader
1385
1386 return VK_SUCCESS;
1387 }
1388
vkInvalidateMappedMemoryRanges(VkDevice device,uint32_t memoryRangeCount,const VkMappedMemoryRange * pMemoryRanges)1389 VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges)
1390 {
1391 TRACE("(VkDevice device = %p, uint32_t memoryRangeCount = %d, const VkMappedMemoryRange* pMemoryRanges = %p)",
1392 device, memoryRangeCount, pMemoryRanges);
1393
1394 // Noop, host and device memory are the same to SwiftShader
1395
1396 return VK_SUCCESS;
1397 }
1398
vkGetDeviceMemoryCommitment(VkDevice pDevice,VkDeviceMemory pMemory,VkDeviceSize * pCommittedMemoryInBytes)1399 VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(VkDevice pDevice, VkDeviceMemory pMemory, VkDeviceSize *pCommittedMemoryInBytes)
1400 {
1401 TRACE("(VkDevice device = %p, VkDeviceMemory memory = %p, VkDeviceSize* pCommittedMemoryInBytes = %p)",
1402 pDevice, static_cast<void *>(pMemory), pCommittedMemoryInBytes);
1403
1404 auto memory = vk::Cast(pMemory);
1405
1406 #if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
1407 const auto &memoryProperties = vk::PhysicalDevice::GetMemoryProperties();
1408 uint32_t typeIndex = memory->getMemoryTypeIndex();
1409 ASSERT(typeIndex < memoryProperties.memoryTypeCount);
1410 ASSERT(memoryProperties.memoryTypes[typeIndex].propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
1411 #endif
1412
1413 *pCommittedMemoryInBytes = memory->getCommittedMemoryInBytes();
1414 }
1415
vkBindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)1416 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1417 {
1418 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1419 device, static_cast<void *>(buffer), static_cast<void *>(memory), int(memoryOffset));
1420
1421 if(!vk::Cast(buffer)->canBindToMemory(vk::Cast(memory)))
1422 {
1423 UNSUPPORTED("vkBindBufferMemory with invalid external memory");
1424 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1425 }
1426 vk::Cast(buffer)->bind(vk::Cast(memory), memoryOffset);
1427 return VK_SUCCESS;
1428 }
1429
vkBindImageMemory(VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)1430 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset)
1431 {
1432 TRACE("(VkDevice device = %p, VkImage image = %p, VkDeviceMemory memory = %p, VkDeviceSize memoryOffset = %d)",
1433 device, static_cast<void *>(image), static_cast<void *>(memory), int(memoryOffset));
1434
1435 if(!vk::Cast(image)->canBindToMemory(vk::Cast(memory)))
1436 {
1437 UNSUPPORTED("vkBindImageMemory with invalid external memory");
1438 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1439 }
1440 vk::Cast(image)->bind(vk::Cast(memory), memoryOffset);
1441 return VK_SUCCESS;
1442 }
1443
vkGetBufferMemoryRequirements(VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)1444 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements)
1445 {
1446 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1447 device, static_cast<void *>(buffer), pMemoryRequirements);
1448
1449 *pMemoryRequirements = vk::Cast(buffer)->getMemoryRequirements();
1450 }
1451
vkGetImageMemoryRequirements(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)1452 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements)
1453 {
1454 TRACE("(VkDevice device = %p, VkImage image = %p, VkMemoryRequirements* pMemoryRequirements = %p)",
1455 device, static_cast<void *>(image), pMemoryRequirements);
1456
1457 *pMemoryRequirements = vk::Cast(image)->getMemoryRequirements();
1458 }
1459
vkGetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1460 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
1461 {
1462 TRACE("(VkDevice device = %p, VkImage image = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements* pSparseMemoryRequirements = %p)",
1463 device, static_cast<void *>(image), pSparseMemoryRequirementCount, pSparseMemoryRequirements);
1464
1465 // The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
1466 // "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
1467 *pSparseMemoryRequirementCount = 0;
1468 }
1469
vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkSampleCountFlagBits samples,VkImageUsageFlags usage,VkImageTiling tiling,uint32_t * pPropertyCount,VkSparseImageFormatProperties * pProperties)1470 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties)
1471 {
1472 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkImageType type = %d, VkSampleCountFlagBits samples = %d, VkImageUsageFlags usage = %d, VkImageTiling tiling = %d, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties* pProperties = %p)",
1473 physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
1474
1475 // We do not support sparse images.
1476 *pPropertyCount = 0;
1477 }
1478
vkQueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1479 VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence)
1480 {
1481 TRACE("()");
1482 UNSUPPORTED("vkQueueBindSparse");
1483 return VK_SUCCESS;
1484 }
1485
vkCreateFence(VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)1486 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(VkDevice device, const VkFenceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence)
1487 {
1488 TRACE("(VkDevice device = %p, const VkFenceCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFence* pFence = %p)",
1489 device, pCreateInfo, pAllocator, pFence);
1490
1491 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1492 while(nextInfo)
1493 {
1494 switch(nextInfo->sType)
1495 {
1496 case VK_STRUCTURE_TYPE_MAX_ENUM:
1497 // dEQP tests that this value is ignored.
1498 break;
1499 default:
1500 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1501 break;
1502 }
1503 nextInfo = nextInfo->pNext;
1504 }
1505
1506 return vk::Fence::Create(pAllocator, pCreateInfo, pFence);
1507 }
1508
vkDestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)1509 VKAPI_ATTR void VKAPI_CALL vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator)
1510 {
1511 TRACE("(VkDevice device = %p, VkFence fence = %p, const VkAllocationCallbacks* pAllocator = %p)",
1512 device, static_cast<void *>(fence), pAllocator);
1513
1514 vk::destroy(fence, pAllocator);
1515 }
1516
vkResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)1517 VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences)
1518 {
1519 TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p)",
1520 device, fenceCount, pFences);
1521
1522 for(uint32_t i = 0; i < fenceCount; i++)
1523 {
1524 vk::Cast(pFences[i])->reset();
1525 }
1526
1527 return VK_SUCCESS;
1528 }
1529
vkGetFenceStatus(VkDevice device,VkFence fence)1530 VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(VkDevice device, VkFence fence)
1531 {
1532 TRACE("(VkDevice device = %p, VkFence fence = %p)", device, static_cast<void *>(fence));
1533
1534 return vk::Cast(fence)->getStatus();
1535 }
1536
vkWaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)1537 VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout)
1538 {
1539 TRACE("(VkDevice device = %p, uint32_t fenceCount = %d, const VkFence* pFences = %p, VkBool32 waitAll = %d, uint64_t timeout = %" PRIu64 ")",
1540 device, int(fenceCount), pFences, int(waitAll), timeout);
1541
1542 return vk::Cast(device)->waitForFences(fenceCount, pFences, waitAll, timeout);
1543 }
1544
vkCreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)1545 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore)
1546 {
1547 TRACE("(VkDevice device = %p, const VkSemaphoreCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSemaphore* pSemaphore = %p)",
1548 device, pCreateInfo, pAllocator, pSemaphore);
1549
1550 if(pCreateInfo->flags != 0)
1551 {
1552 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1553 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1554 }
1555
1556 VkSemaphoreType type = VK_SEMAPHORE_TYPE_BINARY;
1557 for(const auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1558 nextInfo != nullptr; nextInfo = nextInfo->pNext)
1559 {
1560 switch(nextInfo->sType)
1561 {
1562 case VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO:
1563 // Let the semaphore constructor handle this
1564 break;
1565 case VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO:
1566 {
1567 const VkSemaphoreTypeCreateInfo *info = reinterpret_cast<const VkSemaphoreTypeCreateInfo *>(nextInfo);
1568 type = info->semaphoreType;
1569 }
1570 break;
1571 default:
1572 WARN("nextInfo->sType = %s", vk::Stringify(nextInfo->sType).c_str());
1573 break;
1574 }
1575 }
1576
1577 if(type == VK_SEMAPHORE_TYPE_BINARY)
1578 {
1579 return vk::BinarySemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1580 }
1581 else
1582 {
1583 return vk::TimelineSemaphore::Create(pAllocator, pCreateInfo, pSemaphore, pAllocator);
1584 }
1585 }
1586
vkDestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)1587 VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator)
1588 {
1589 TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, const VkAllocationCallbacks* pAllocator = %p)",
1590 device, static_cast<void *>(semaphore), pAllocator);
1591
1592 vk::destroy(semaphore, pAllocator);
1593 }
1594
1595 #if SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
vkGetSemaphoreFdKHR(VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)1596 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd)
1597 {
1598 TRACE("(VkDevice device = %p, const VkSemaphoreGetFdInfoKHR* pGetFdInfo = %p, int* pFd = %p)",
1599 device, static_cast<const void *>(pGetFdInfo), static_cast<void *>(pFd));
1600
1601 if(pGetFdInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1602 {
1603 UNSUPPORTED("pGetFdInfo->handleType %d", int(pGetFdInfo->handleType));
1604 }
1605
1606 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetFdInfo->semaphore);
1607 ASSERT(sem != nullptr);
1608 return sem->exportFd(pFd);
1609 }
1610
vkImportSemaphoreFdKHR(VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreInfo)1611 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreInfo)
1612 {
1613 TRACE("(VkDevice device = %p, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreInfo = %p",
1614 device, static_cast<const void *>(pImportSemaphoreInfo));
1615
1616 if(pImportSemaphoreInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT)
1617 {
1618 UNSUPPORTED("pImportSemaphoreInfo->handleType %d", int(pImportSemaphoreInfo->handleType));
1619 }
1620 bool temporaryImport = (pImportSemaphoreInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1621
1622 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreInfo->semaphore);
1623 ASSERT(sem != nullptr);
1624 return sem->importFd(pImportSemaphoreInfo->fd, temporaryImport);
1625 }
1626 #endif // SWIFTSHADER_EXTERNAL_SEMAPHORE_OPAQUE_FD
1627
1628 #if VK_USE_PLATFORM_FUCHSIA
vkImportSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo)1629 VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA(
1630 VkDevice device,
1631 const VkImportSemaphoreZirconHandleInfoFUCHSIA *pImportSemaphoreZirconHandleInfo)
1632 {
1633 TRACE("(VkDevice device = %p, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo = %p)",
1634 device, pImportSemaphoreZirconHandleInfo);
1635
1636 if(pImportSemaphoreZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA)
1637 {
1638 UNSUPPORTED("pImportSemaphoreZirconHandleInfo->handleType %d", int(pImportSemaphoreZirconHandleInfo->handleType));
1639 }
1640 bool temporaryImport = (pImportSemaphoreZirconHandleInfo->flags & VK_SEMAPHORE_IMPORT_TEMPORARY_BIT) != 0;
1641 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pImportSemaphoreZirconHandleInfo->semaphore);
1642 ASSERT(sem != nullptr);
1643 return sem->importHandle(pImportSemaphoreZirconHandleInfo->zirconHandle, temporaryImport);
1644 }
1645
vkGetSemaphoreZirconHandleFUCHSIA(VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,zx_handle_t * pZirconHandle)1646 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA(
1647 VkDevice device,
1648 const VkSemaphoreGetZirconHandleInfoFUCHSIA *pGetZirconHandleInfo,
1649 zx_handle_t *pZirconHandle)
1650 {
1651 TRACE("(VkDevice device = %p, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo = %p, zx_handle_t* pZirconHandle = %p)",
1652 device, static_cast<const void *>(pGetZirconHandleInfo), static_cast<void *>(pZirconHandle));
1653
1654 if(pGetZirconHandleInfo->handleType != VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA)
1655 {
1656 UNSUPPORTED("pGetZirconHandleInfo->handleType %d", int(pGetZirconHandleInfo->handleType));
1657 }
1658
1659 auto *sem = vk::DynamicCast<vk::BinarySemaphore>(pGetZirconHandleInfo->semaphore);
1660 ASSERT(sem != nullptr);
1661 return sem->exportHandle(pZirconHandle);
1662 }
1663 #endif // VK_USE_PLATFORM_FUCHSIA
1664
vkGetSemaphoreCounterValue(VkDevice device,VkSemaphore semaphore,uint64_t * pValue)1665 VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue)
1666 {
1667 TRACE("(VkDevice device = %p, VkSemaphore semaphore = %p, uint64_t* pValue = %p)",
1668 device, static_cast<void *>(semaphore), pValue);
1669 *pValue = vk::DynamicCast<vk::TimelineSemaphore>(semaphore)->getCounterValue();
1670 return VK_SUCCESS;
1671 }
1672
vkSignalSemaphore(VkDevice device,const VkSemaphoreSignalInfo * pSignalInfo)1673 VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo)
1674 {
1675 TRACE("(VkDevice device = %p, const VkSemaphoreSignalInfo *pSignalInfo = %p)",
1676 device, pSignalInfo);
1677 vk::DynamicCast<vk::TimelineSemaphore>(pSignalInfo->semaphore)->signal(pSignalInfo->value);
1678 return VK_SUCCESS;
1679 }
1680
vkWaitSemaphores(VkDevice device,const VkSemaphoreWaitInfo * pWaitInfo,uint64_t timeout)1681 VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout)
1682 {
1683 TRACE("(VkDevice device = %p, const VkSemaphoreWaitInfo *pWaitInfo = %p, uint64_t timeout = %" PRIu64 ")",
1684 device, pWaitInfo, timeout);
1685 return vk::Cast(device)->waitForSemaphores(pWaitInfo, timeout);
1686 }
1687
vkCreateEvent(VkDevice device,const VkEventCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkEvent * pEvent)1688 VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(VkDevice device, const VkEventCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkEvent *pEvent)
1689 {
1690 TRACE("(VkDevice device = %p, const VkEventCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkEvent* pEvent = %p)",
1691 device, pCreateInfo, pAllocator, pEvent);
1692
1693 // VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR is provided by VK_KHR_synchronization2
1694 if((pCreateInfo->flags != 0) && (pCreateInfo->flags != VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR))
1695 {
1696 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1697 }
1698
1699 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1700 while(extInfo)
1701 {
1702 // Vulkan 1.2: "pNext must be NULL"
1703 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1704 extInfo = extInfo->pNext;
1705 }
1706
1707 return vk::Event::Create(pAllocator, pCreateInfo, pEvent);
1708 }
1709
vkDestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * pAllocator)1710 VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator)
1711 {
1712 TRACE("(VkDevice device = %p, VkEvent event = %p, const VkAllocationCallbacks* pAllocator = %p)",
1713 device, static_cast<void *>(event), pAllocator);
1714
1715 vk::destroy(event, pAllocator);
1716 }
1717
vkGetEventStatus(VkDevice device,VkEvent event)1718 VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(VkDevice device, VkEvent event)
1719 {
1720 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1721
1722 return vk::Cast(event)->getStatus();
1723 }
1724
vkSetEvent(VkDevice device,VkEvent event)1725 VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(VkDevice device, VkEvent event)
1726 {
1727 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1728
1729 vk::Cast(event)->signal();
1730
1731 return VK_SUCCESS;
1732 }
1733
vkResetEvent(VkDevice device,VkEvent event)1734 VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(VkDevice device, VkEvent event)
1735 {
1736 TRACE("(VkDevice device = %p, VkEvent event = %p)", device, static_cast<void *>(event));
1737
1738 vk::Cast(event)->reset();
1739
1740 return VK_SUCCESS;
1741 }
1742
vkCreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkQueryPool * pQueryPool)1743 VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool)
1744 {
1745 TRACE("(VkDevice device = %p, const VkQueryPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkQueryPool* pQueryPool = %p)",
1746 device, pCreateInfo, pAllocator, pQueryPool);
1747
1748 if(pCreateInfo->flags != 0)
1749 {
1750 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1751 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1752 }
1753
1754 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1755 while(extInfo)
1756 {
1757 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1758 extInfo = extInfo->pNext;
1759 }
1760
1761 return vk::QueryPool::Create(pAllocator, pCreateInfo, pQueryPool);
1762 }
1763
vkDestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * pAllocator)1764 VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator)
1765 {
1766 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
1767 device, static_cast<void *>(queryPool), pAllocator);
1768
1769 vk::destroy(queryPool, pAllocator);
1770 }
1771
vkGetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)1772 VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags)
1773 {
1774 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, size_t dataSize = %d, void* pData = %p, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
1775 device, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), int(dataSize), pData, int(stride), flags);
1776
1777 return vk::Cast(queryPool)->getResults(firstQuery, queryCount, dataSize, pData, stride, flags);
1778 }
1779
vkCreateBuffer(VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1780 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer)
1781 {
1782 TRACE("(VkDevice device = %p, const VkBufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBuffer* pBuffer = %p)",
1783 device, pCreateInfo, pAllocator, pBuffer);
1784
1785 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1786 while(nextInfo)
1787 {
1788 switch(nextInfo->sType)
1789 {
1790 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1791 // Do nothing. Should be handled by vk::Buffer::Create().
1792 break;
1793 case VK_STRUCTURE_TYPE_MAX_ENUM:
1794 // dEQP tests that this value is ignored.
1795 break;
1796 default:
1797 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
1798 break;
1799 }
1800 nextInfo = nextInfo->pNext;
1801 }
1802
1803 return vk::Buffer::Create(pAllocator, pCreateInfo, pBuffer);
1804 }
1805
vkDestroyBuffer(VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)1806 VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator)
1807 {
1808 TRACE("(VkDevice device = %p, VkBuffer buffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
1809 device, static_cast<void *>(buffer), pAllocator);
1810
1811 vk::destroy(buffer, pAllocator);
1812 }
1813
vkGetBufferDeviceAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1814 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1815 {
1816 TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1817 device, pInfo);
1818 UNSUPPORTED("VK_KHR_buffer_device_address");
1819 return 0;
1820 }
1821
vkGetBufferOpaqueCaptureAddress(VkDevice device,const VkBufferDeviceAddressInfo * pInfo)1822 VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo)
1823 {
1824 TRACE("(VkDevice device = %p, const VkBufferDeviceAddressInfo* pInfo = %p)",
1825 device, pInfo);
1826 UNSUPPORTED("VK_KHR_buffer_device_address");
1827 return 0;
1828 }
1829
vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device,const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo)1830 VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo)
1831 {
1832 TRACE("(VkDevice device = %p, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo = %p)",
1833 device, pInfo);
1834 UNSUPPORTED("VK_KHR_buffer_device_address");
1835 return 0;
1836 }
1837
vkCreateBufferView(VkDevice device,const VkBufferViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBufferView * pView)1838 VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBufferView *pView)
1839 {
1840 TRACE("(VkDevice device = %p, const VkBufferViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkBufferView* pView = %p)",
1841 device, pCreateInfo, pAllocator, pView);
1842
1843 if(pCreateInfo->flags != 0)
1844 {
1845 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
1846 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
1847 }
1848
1849 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
1850 while(extInfo)
1851 {
1852 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
1853 extInfo = extInfo->pNext;
1854 }
1855
1856 return vk::BufferView::Create(pAllocator, pCreateInfo, pView);
1857 }
1858
vkDestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * pAllocator)1859 VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator)
1860 {
1861 TRACE("(VkDevice device = %p, VkBufferView bufferView = %p, const VkAllocationCallbacks* pAllocator = %p)",
1862 device, static_cast<void *>(bufferView), pAllocator);
1863
1864 vk::destroy(bufferView, pAllocator);
1865 }
1866
vkCreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)1867 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(VkDevice device, const VkImageCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImage *pImage)
1868 {
1869 TRACE("(VkDevice device = %p, const VkImageCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImage* pImage = %p)",
1870 device, pCreateInfo, pAllocator, pImage);
1871
1872 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
1873
1874 #ifdef __ANDROID__
1875 vk::BackingMemory backmem;
1876 bool swapchainImage = false;
1877 #endif
1878
1879 while(extensionCreateInfo)
1880 {
1881 // Casting to an int since some structures, such as VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID and
1882 // VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID, are not enumerated in the official Vulkan headers.
1883 switch((int)(extensionCreateInfo->sType))
1884 {
1885 #ifdef __ANDROID__
1886 case VK_STRUCTURE_TYPE_SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID:
1887 {
1888 const VkSwapchainImageCreateInfoANDROID *swapImageCreateInfo = reinterpret_cast<const VkSwapchainImageCreateInfoANDROID *>(extensionCreateInfo);
1889 backmem.androidUsage = swapImageCreateInfo->usage;
1890 }
1891 break;
1892 case VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID:
1893 {
1894 const VkNativeBufferANDROID *nativeBufferInfo = reinterpret_cast<const VkNativeBufferANDROID *>(extensionCreateInfo);
1895 backmem.nativeHandle = nativeBufferInfo->handle;
1896 backmem.stride = nativeBufferInfo->stride;
1897 swapchainImage = true;
1898 }
1899 break;
1900 case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
1901 break;
1902 case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
1903 // Do nothing. Should be handled by vk::Image::Create()
1904 break;
1905 #endif
1906 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
1907 // Do nothing. Should be handled by vk::Image::Create()
1908 break;
1909 case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
1910 /* Do nothing. We don't actually need the swapchain handle yet; we'll do all the work in vkBindImageMemory2. */
1911 break;
1912 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
1913 // Do nothing. This extension tells the driver which image formats will be used
1914 // by the application. Swiftshader is not impacted from lacking this information,
1915 // so we don't need to track the format list.
1916 break;
1917 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
1918 {
1919 // SwiftShader does not use an image's usage info for non-debug purposes outside of
1920 // vkGetPhysicalDeviceImageFormatProperties2. This also applies to separate stencil usage.
1921 const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionCreateInfo);
1922 (void)stencilUsageInfo->stencilUsage;
1923 }
1924 break;
1925 case VK_STRUCTURE_TYPE_MAX_ENUM:
1926 // dEQP tests that this value is ignored.
1927 break;
1928 default:
1929 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
1930 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
1931 break;
1932 }
1933
1934 extensionCreateInfo = extensionCreateInfo->pNext;
1935 }
1936
1937 VkResult result = vk::Image::Create(pAllocator, pCreateInfo, pImage, vk::Cast(device));
1938
1939 #ifdef __ANDROID__
1940 if(swapchainImage)
1941 {
1942 if(result != VK_SUCCESS)
1943 {
1944 return result;
1945 }
1946
1947 vk::Image *image = vk::Cast(*pImage);
1948 VkMemoryRequirements memRequirements = image->getMemoryRequirements();
1949
1950 VkMemoryAllocateInfo allocInfo = {};
1951 allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1952 allocInfo.allocationSize = memRequirements.size;
1953 allocInfo.memoryTypeIndex = 0;
1954
1955 VkDeviceMemory devmem = { VK_NULL_HANDLE };
1956 result = vkAllocateMemory(device, &allocInfo, pAllocator, &devmem);
1957 if(result != VK_SUCCESS)
1958 {
1959 return result;
1960 }
1961
1962 vkBindImageMemory(device, *pImage, devmem, 0);
1963 backmem.externalMemory = true;
1964
1965 image->setBackingMemory(backmem);
1966 }
1967 #endif
1968
1969 return result;
1970 }
1971
vkDestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)1972 VKAPI_ATTR void VKAPI_CALL vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator)
1973 {
1974 TRACE("(VkDevice device = %p, VkImage image = %p, const VkAllocationCallbacks* pAllocator = %p)",
1975 device, static_cast<void *>(image), pAllocator);
1976
1977 #ifdef __ANDROID__
1978 vk::Image *img = vk::Cast(image);
1979 if(img && img->hasExternalMemory())
1980 {
1981 vk::destroy(img->getExternalMemory(), pAllocator);
1982 }
1983 #endif
1984
1985 vk::destroy(image, pAllocator);
1986 }
1987
vkGetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)1988 VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, VkSubresourceLayout *pLayout)
1989 {
1990 TRACE("(VkDevice device = %p, VkImage image = %p, const VkImageSubresource* pSubresource = %p, VkSubresourceLayout* pLayout = %p)",
1991 device, static_cast<void *>(image), pSubresource, pLayout);
1992
1993 vk::Cast(image)->getSubresourceLayout(pSubresource, pLayout);
1994 }
1995
vkCreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)1996 VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImageView *pView)
1997 {
1998 TRACE("(VkDevice device = %p, const VkImageViewCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkImageView* pView = %p)",
1999 device, pCreateInfo, pAllocator, pView);
2000
2001 if(pCreateInfo->flags != 0)
2002 {
2003 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2004 }
2005
2006 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2007 const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
2008
2009 while(extensionCreateInfo)
2010 {
2011 switch(extensionCreateInfo->sType)
2012 {
2013 case VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO:
2014 {
2015 const VkImageViewUsageCreateInfo *multiviewCreateInfo = reinterpret_cast<const VkImageViewUsageCreateInfo *>(extensionCreateInfo);
2016 ASSERT(!(~vk::Cast(pCreateInfo->image)->getUsage() & multiviewCreateInfo->usage));
2017 }
2018 break;
2019 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
2020 {
2021 const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo = reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
2022 ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
2023
2024 if(ycbcrConversion)
2025 {
2026 ASSERT((pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.r == VK_COMPONENT_SWIZZLE_R) &&
2027 (pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.g == VK_COMPONENT_SWIZZLE_G) &&
2028 (pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.b == VK_COMPONENT_SWIZZLE_B) &&
2029 (pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_IDENTITY || pCreateInfo->components.a == VK_COMPONENT_SWIZZLE_A));
2030 }
2031 }
2032 break;
2033 case VK_STRUCTURE_TYPE_MAX_ENUM:
2034 // dEQP tests that this value is ignored.
2035 break;
2036 default:
2037 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2038 break;
2039 }
2040
2041 extensionCreateInfo = extensionCreateInfo->pNext;
2042 }
2043
2044 VkResult result = vk::ImageView::Create(pAllocator, pCreateInfo, pView, ycbcrConversion);
2045 if(result == VK_SUCCESS)
2046 {
2047 vk::Cast(device)->registerImageView(vk::Cast(*pView));
2048 }
2049
2050 return result;
2051 }
2052
vkDestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)2053 VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator)
2054 {
2055 TRACE("(VkDevice device = %p, VkImageView imageView = %p, const VkAllocationCallbacks* pAllocator = %p)",
2056 device, static_cast<void *>(imageView), pAllocator);
2057
2058 vk::Cast(device)->unregisterImageView(vk::Cast(imageView));
2059 vk::destroy(imageView, pAllocator);
2060 }
2061
vkCreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)2062 VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule)
2063 {
2064 TRACE("(VkDevice device = %p, const VkShaderModuleCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkShaderModule* pShaderModule = %p)",
2065 device, pCreateInfo, pAllocator, pShaderModule);
2066
2067 if(pCreateInfo->flags != 0)
2068 {
2069 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2070 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2071 }
2072
2073 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2074 while(nextInfo)
2075 {
2076 switch(nextInfo->sType)
2077 {
2078 case VK_STRUCTURE_TYPE_MAX_ENUM:
2079 // dEQP tests that this value is ignored.
2080 break;
2081 default:
2082 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2083 break;
2084 }
2085 nextInfo = nextInfo->pNext;
2086 }
2087
2088 return vk::ShaderModule::Create(pAllocator, pCreateInfo, pShaderModule);
2089 }
2090
vkDestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)2091 VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator)
2092 {
2093 TRACE("(VkDevice device = %p, VkShaderModule shaderModule = %p, const VkAllocationCallbacks* pAllocator = %p)",
2094 device, static_cast<void *>(shaderModule), pAllocator);
2095
2096 vk::destroy(shaderModule, pAllocator);
2097 }
2098
vkCreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)2099 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache)
2100 {
2101 TRACE("(VkDevice device = %p, const VkPipelineCacheCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineCache* pPipelineCache = %p)",
2102 device, pCreateInfo, pAllocator, pPipelineCache);
2103
2104 if(pCreateInfo->flags != 0)
2105 {
2106 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2107 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2108 }
2109
2110 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
2111 while(extInfo)
2112 {
2113 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2114 extInfo = extInfo->pNext;
2115 }
2116
2117 return vk::PipelineCache::Create(pAllocator, pCreateInfo, pPipelineCache);
2118 }
2119
vkDestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)2120 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator)
2121 {
2122 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, const VkAllocationCallbacks* pAllocator = %p)",
2123 device, static_cast<void *>(pipelineCache), pAllocator);
2124
2125 vk::destroy(pipelineCache, pAllocator);
2126 }
2127
vkGetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)2128 VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData)
2129 {
2130 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, size_t* pDataSize = %p, void* pData = %p)",
2131 device, static_cast<void *>(pipelineCache), pDataSize, pData);
2132
2133 return vk::Cast(pipelineCache)->getData(pDataSize, pData);
2134 }
2135
vkMergePipelineCaches(VkDevice device,VkPipelineCache dstCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)2136 VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches)
2137 {
2138 TRACE("(VkDevice device = %p, VkPipelineCache dstCache = %p, uint32_t srcCacheCount = %d, const VkPipelineCache* pSrcCaches = %p)",
2139 device, static_cast<void *>(dstCache), int(srcCacheCount), pSrcCaches);
2140
2141 return vk::Cast(dstCache)->merge(srcCacheCount, pSrcCaches);
2142 }
2143
vkCreateGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2144 VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2145 {
2146 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkGraphicsPipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2147 device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2148
2149 memset(pPipelines, 0, sizeof(void *) * createInfoCount);
2150
2151 VkResult errorResult = VK_SUCCESS;
2152 for(uint32_t i = 0; i < createInfoCount; i++)
2153 {
2154 VkResult result = vk::GraphicsPipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2155
2156 if(result == VK_SUCCESS)
2157 {
2158 result = static_cast<vk::GraphicsPipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2159 if(result != VK_SUCCESS)
2160 {
2161 vk::destroy(pPipelines[i], pAllocator);
2162 }
2163 }
2164
2165 if(result != VK_SUCCESS)
2166 {
2167 // According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2168 // "When an application attempts to create many pipelines in a single command,
2169 // it is possible that some subset may fail creation. In that case, the
2170 // corresponding entries in the pPipelines output array will be filled with
2171 // VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2172 // out of memory errors), the vkCreate*Pipelines commands will return an
2173 // error code. The implementation will attempt to create all pipelines, and
2174 // only return VK_NULL_HANDLE values for those that actually failed."
2175 pPipelines[i] = VK_NULL_HANDLE;
2176 errorResult = result;
2177
2178 // VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT specifies that control
2179 // will be returned to the application on failure of the corresponding pipeline
2180 // rather than continuing to create additional pipelines.
2181 if(pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT)
2182 {
2183 return errorResult;
2184 }
2185 }
2186 }
2187
2188 return errorResult;
2189 }
2190
vkCreateComputePipelines(VkDevice device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)2191 VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines)
2192 {
2193 TRACE("(VkDevice device = %p, VkPipelineCache pipelineCache = %p, uint32_t createInfoCount = %d, const VkComputePipelineCreateInfo* pCreateInfos = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipeline* pPipelines = %p)",
2194 device, static_cast<void *>(pipelineCache), int(createInfoCount), pCreateInfos, pAllocator, pPipelines);
2195
2196 memset(pPipelines, 0, sizeof(void *) * createInfoCount);
2197
2198 VkResult errorResult = VK_SUCCESS;
2199 for(uint32_t i = 0; i < createInfoCount; i++)
2200 {
2201 VkResult result = vk::ComputePipeline::Create(pAllocator, &pCreateInfos[i], &pPipelines[i], vk::Cast(device));
2202
2203 if(result == VK_SUCCESS)
2204 {
2205 result = static_cast<vk::ComputePipeline *>(vk::Cast(pPipelines[i]))->compileShaders(pAllocator, &pCreateInfos[i], vk::Cast(pipelineCache));
2206 if(result != VK_SUCCESS)
2207 {
2208 vk::destroy(pPipelines[i], pAllocator);
2209 }
2210 }
2211
2212 if(result != VK_SUCCESS)
2213 {
2214 // According to the Vulkan spec, section 9.4. Multiple Pipeline Creation
2215 // "When an application attempts to create many pipelines in a single command,
2216 // it is possible that some subset may fail creation. In that case, the
2217 // corresponding entries in the pPipelines output array will be filled with
2218 // VK_NULL_HANDLE values. If any pipeline fails creation (for example, due to
2219 // out of memory errors), the vkCreate*Pipelines commands will return an
2220 // error code. The implementation will attempt to create all pipelines, and
2221 // only return VK_NULL_HANDLE values for those that actually failed."
2222 pPipelines[i] = VK_NULL_HANDLE;
2223 errorResult = result;
2224
2225 // VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT specifies that control
2226 // will be returned to the application on failure of the corresponding pipeline
2227 // rather than continuing to create additional pipelines.
2228 if(pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT)
2229 {
2230 return errorResult;
2231 }
2232 }
2233 }
2234
2235 return errorResult;
2236 }
2237
vkDestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)2238 VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator)
2239 {
2240 TRACE("(VkDevice device = %p, VkPipeline pipeline = %p, const VkAllocationCallbacks* pAllocator = %p)",
2241 device, static_cast<void *>(pipeline), pAllocator);
2242
2243 vk::destroy(pipeline, pAllocator);
2244 }
2245
vkCreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)2246 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout)
2247 {
2248 TRACE("(VkDevice device = %p, const VkPipelineLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPipelineLayout* pPipelineLayout = %p)",
2249 device, pCreateInfo, pAllocator, pPipelineLayout);
2250
2251 if((pCreateInfo->flags != 0) &&
2252 // FIXME(b/228307968) : VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT is used by dEQP
2253 // without checking if VK_EXT_graphics_pipeline_library is present
2254 (pCreateInfo->flags != VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT))
2255 {
2256 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2257 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2258 }
2259
2260 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2261 while(nextInfo)
2262 {
2263 switch(nextInfo->sType)
2264 {
2265 case VK_STRUCTURE_TYPE_MAX_ENUM:
2266 // dEQP tests that this value is ignored.
2267 break;
2268 default:
2269 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2270 break;
2271 }
2272 nextInfo = nextInfo->pNext;
2273 }
2274
2275 return vk::PipelineLayout::Create(pAllocator, pCreateInfo, pPipelineLayout);
2276 }
2277
vkDestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * pAllocator)2278 VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator)
2279 {
2280 TRACE("(VkDevice device = %p, VkPipelineLayout pipelineLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2281 device, static_cast<void *>(pipelineLayout), pAllocator);
2282
2283 vk::release(pipelineLayout, pAllocator);
2284 }
2285
vkCreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2286 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSampler *pSampler)
2287 {
2288 TRACE("(VkDevice device = %p, const VkSamplerCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSampler* pSampler = %p)",
2289 device, pCreateInfo, pAllocator, pSampler);
2290
2291 if(pCreateInfo->flags != 0)
2292 {
2293 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2294 }
2295
2296 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2297 const vk::SamplerYcbcrConversion *ycbcrConversion = nullptr;
2298 VkSamplerFilteringPrecisionModeGOOGLE filteringPrecision = VK_SAMPLER_FILTERING_PRECISION_MODE_LOW_GOOGLE;
2299 VkClearColorValue borderColor = {};
2300
2301 while(extensionCreateInfo)
2302 {
2303 switch(static_cast<long>(extensionCreateInfo->sType))
2304 {
2305 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO:
2306 {
2307 const VkSamplerYcbcrConversionInfo *samplerYcbcrConversionInfo =
2308 reinterpret_cast<const VkSamplerYcbcrConversionInfo *>(extensionCreateInfo);
2309 ycbcrConversion = vk::Cast(samplerYcbcrConversionInfo->conversion);
2310 }
2311 break;
2312 #if !defined(__ANDROID__)
2313 case VK_STRUCTURE_TYPE_SAMPLER_FILTERING_PRECISION_GOOGLE:
2314 {
2315 const VkSamplerFilteringPrecisionGOOGLE *filteringInfo =
2316 reinterpret_cast<const VkSamplerFilteringPrecisionGOOGLE *>(extensionCreateInfo);
2317 filteringPrecision = filteringInfo->samplerFilteringPrecisionMode;
2318 }
2319 break;
2320 #endif
2321 case VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT:
2322 {
2323 const VkSamplerCustomBorderColorCreateInfoEXT *borderColorInfo =
2324 reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT *>(extensionCreateInfo);
2325
2326 borderColor = borderColorInfo->customBorderColor;
2327 }
2328 break;
2329 default:
2330 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2331 break;
2332 }
2333
2334 extensionCreateInfo = extensionCreateInfo->pNext;
2335 }
2336
2337 vk::SamplerState samplerState(pCreateInfo, ycbcrConversion, filteringPrecision, borderColor);
2338 uint32_t samplerID = vk::Cast(device)->indexSampler(samplerState);
2339
2340 VkResult result = vk::Sampler::Create(pAllocator, pCreateInfo, pSampler, samplerState, samplerID);
2341
2342 if(*pSampler == VK_NULL_HANDLE)
2343 {
2344 ASSERT(result != VK_SUCCESS);
2345 vk::Cast(device)->removeSampler(samplerState);
2346 }
2347
2348 return result;
2349 }
2350
vkDestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)2351 VKAPI_ATTR void VKAPI_CALL vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator)
2352 {
2353 TRACE("(VkDevice device = %p, VkSampler sampler = %p, const VkAllocationCallbacks* pAllocator = %p)",
2354 device, static_cast<void *>(sampler), pAllocator);
2355
2356 if(sampler != VK_NULL_HANDLE)
2357 {
2358 vk::Cast(device)->removeSampler(*vk::Cast(sampler));
2359
2360 vk::destroy(sampler, pAllocator);
2361 }
2362 }
2363
vkCreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)2364 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout)
2365 {
2366 TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorSetLayout* pSetLayout = %p)",
2367 device, pCreateInfo, pAllocator, pSetLayout);
2368
2369 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2370
2371 while(extensionCreateInfo)
2372 {
2373 switch(extensionCreateInfo->sType)
2374 {
2375 case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT:
2376 ASSERT(!vk::Cast(device)->hasExtension(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME));
2377 break;
2378 default:
2379 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
2380 break;
2381 }
2382
2383 extensionCreateInfo = extensionCreateInfo->pNext;
2384 }
2385
2386 return vk::DescriptorSetLayout::Create(pAllocator, pCreateInfo, pSetLayout);
2387 }
2388
vkDestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)2389 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks *pAllocator)
2390 {
2391 TRACE("(VkDevice device = %p, VkDescriptorSetLayout descriptorSetLayout = %p, const VkAllocationCallbacks* pAllocator = %p)",
2392 device, static_cast<void *>(descriptorSetLayout), pAllocator);
2393
2394 vk::destroy(descriptorSetLayout, pAllocator);
2395 }
2396
vkCreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)2397 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool)
2398 {
2399 TRACE("(VkDevice device = %p, const VkDescriptorPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorPool* pDescriptorPool = %p)",
2400 device, pCreateInfo, pAllocator, pDescriptorPool);
2401
2402 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
2403 while(extInfo)
2404 {
2405 switch(extInfo->sType)
2406 {
2407 case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO:
2408 break;
2409 default:
2410 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2411 break;
2412 }
2413 extInfo = extInfo->pNext;
2414 }
2415
2416 return vk::DescriptorPool::Create(pAllocator, pCreateInfo, pDescriptorPool);
2417 }
2418
vkDestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)2419 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator)
2420 {
2421 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2422 device, static_cast<void *>(descriptorPool), pAllocator);
2423
2424 vk::destroy(descriptorPool, pAllocator);
2425 }
2426
vkResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)2427 VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags)
2428 {
2429 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, VkDescriptorPoolResetFlags flags = 0x%x)",
2430 device, static_cast<void *>(descriptorPool), int(flags));
2431
2432 if(flags != 0)
2433 {
2434 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2435 UNSUPPORTED("flags %d", int(flags));
2436 }
2437
2438 return vk::Cast(descriptorPool)->reset();
2439 }
2440
vkAllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)2441 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets)
2442 {
2443 TRACE("(VkDevice device = %p, const VkDescriptorSetAllocateInfo* pAllocateInfo = %p, VkDescriptorSet* pDescriptorSets = %p)",
2444 device, pAllocateInfo, pDescriptorSets);
2445
2446 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pAllocateInfo->pNext);
2447 while(extInfo)
2448 {
2449 UNSUPPORTED("pAllocateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
2450 extInfo = extInfo->pNext;
2451 }
2452
2453 return vk::Cast(pAllocateInfo->descriptorPool)->allocateSets(pAllocateInfo->descriptorSetCount, pAllocateInfo->pSetLayouts, pDescriptorSets);
2454 }
2455
vkFreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)2456 VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets)
2457 {
2458 TRACE("(VkDevice device = %p, VkDescriptorPool descriptorPool = %p, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p)",
2459 device, static_cast<void *>(descriptorPool), descriptorSetCount, pDescriptorSets);
2460
2461 vk::Cast(descriptorPool)->freeSets(descriptorSetCount, pDescriptorSets);
2462
2463 return VK_SUCCESS;
2464 }
2465
vkUpdateDescriptorSets(VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2466 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies)
2467 {
2468 TRACE("(VkDevice device = %p, uint32_t descriptorWriteCount = %d, const VkWriteDescriptorSet* pDescriptorWrites = %p, uint32_t descriptorCopyCount = %d, const VkCopyDescriptorSet* pDescriptorCopies = %p)",
2469 device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2470
2471 vk::Cast(device)->updateDescriptorSets(descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
2472 }
2473
vkCreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)2474 VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer)
2475 {
2476 TRACE("(VkDevice device = %p, const VkFramebufferCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkFramebuffer* pFramebuffer = %p)",
2477 device, pCreateInfo, pAllocator, pFramebuffer);
2478
2479 return vk::Framebuffer::Create(pAllocator, pCreateInfo, pFramebuffer);
2480 }
2481
vkDestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)2482 VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator)
2483 {
2484 TRACE("(VkDevice device = %p, VkFramebuffer framebuffer = %p, const VkAllocationCallbacks* pAllocator = %p)",
2485 device, static_cast<void *>(framebuffer), pAllocator);
2486
2487 vk::destroy(framebuffer, pAllocator);
2488 }
2489
vkCreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2490 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2491 {
2492 TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2493 device, pCreateInfo, pAllocator, pRenderPass);
2494
2495 if(pCreateInfo->flags != 0)
2496 {
2497 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2498 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2499 }
2500
2501 ValidateRenderPassPNextChain(device, pCreateInfo);
2502
2503 return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2504 }
2505
vkCreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)2506 VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(VkDevice device, const VkRenderPassCreateInfo2KHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass)
2507 {
2508 TRACE("(VkDevice device = %p, const VkRenderPassCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkRenderPass* pRenderPass = %p)",
2509 device, pCreateInfo, pAllocator, pRenderPass);
2510
2511 if(pCreateInfo->flags != 0)
2512 {
2513 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
2514 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
2515 }
2516
2517 ValidateRenderPassPNextChain(device, pCreateInfo);
2518
2519 return vk::RenderPass::Create(pAllocator, pCreateInfo, pRenderPass);
2520 }
2521
vkDestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)2522 VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator)
2523 {
2524 TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, const VkAllocationCallbacks* pAllocator = %p)",
2525 device, static_cast<void *>(renderPass), pAllocator);
2526
2527 vk::destroy(renderPass, pAllocator);
2528 }
2529
vkGetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)2530 VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity)
2531 {
2532 TRACE("(VkDevice device = %p, VkRenderPass renderPass = %p, VkExtent2D* pGranularity = %p)",
2533 device, static_cast<void *>(renderPass), pGranularity);
2534
2535 vk::Cast(renderPass)->getRenderAreaGranularity(pGranularity);
2536 }
2537
vkCreateCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)2538 VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool)
2539 {
2540 TRACE("(VkDevice device = %p, const VkCommandPoolCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkCommandPool* pCommandPool = %p)",
2541 device, pCreateInfo, pAllocator, pCommandPool);
2542
2543 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
2544 while(nextInfo)
2545 {
2546 switch(nextInfo->sType)
2547 {
2548 case VK_STRUCTURE_TYPE_MAX_ENUM:
2549 // dEQP tests that this value is ignored.
2550 break;
2551 default:
2552 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2553 break;
2554 }
2555 nextInfo = nextInfo->pNext;
2556 }
2557
2558 return vk::CommandPool::Create(pAllocator, pCreateInfo, pCommandPool);
2559 }
2560
vkDestroyCommandPool(VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)2561 VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator)
2562 {
2563 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, const VkAllocationCallbacks* pAllocator = %p)",
2564 device, static_cast<void *>(commandPool), pAllocator);
2565
2566 vk::destroy(commandPool, pAllocator);
2567 }
2568
vkResetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)2569 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags)
2570 {
2571 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolResetFlags flags = %d)",
2572 device, static_cast<void *>(commandPool), int(flags));
2573
2574 return vk::Cast(commandPool)->reset(flags);
2575 }
2576
vkAllocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)2577 VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, VkCommandBuffer *pCommandBuffers)
2578 {
2579 TRACE("(VkDevice device = %p, const VkCommandBufferAllocateInfo* pAllocateInfo = %p, VkCommandBuffer* pCommandBuffers = %p)",
2580 device, pAllocateInfo, pCommandBuffers);
2581
2582 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pAllocateInfo->pNext);
2583 while(nextInfo)
2584 {
2585 switch(nextInfo->sType)
2586 {
2587 case VK_STRUCTURE_TYPE_MAX_ENUM:
2588 // dEQP tests that this value is ignored.
2589 break;
2590 default:
2591 UNSUPPORTED("pAllocateInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2592 break;
2593 }
2594 nextInfo = nextInfo->pNext;
2595 }
2596
2597 return vk::Cast(pAllocateInfo->commandPool)->allocateCommandBuffers(vk::Cast(device), pAllocateInfo->level, pAllocateInfo->commandBufferCount, pCommandBuffers);
2598 }
2599
vkFreeCommandBuffers(VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)2600 VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
2601 {
2602 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
2603 device, static_cast<void *>(commandPool), int(commandBufferCount), pCommandBuffers);
2604
2605 vk::Cast(commandPool)->freeCommandBuffers(commandBufferCount, pCommandBuffers);
2606 }
2607
vkBeginCommandBuffer(VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)2608 VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo)
2609 {
2610 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCommandBufferBeginInfo* pBeginInfo = %p)",
2611 commandBuffer, pBeginInfo);
2612
2613 auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pBeginInfo->pNext);
2614 while(nextInfo)
2615 {
2616 switch(nextInfo->sType)
2617 {
2618 case VK_STRUCTURE_TYPE_MAX_ENUM:
2619 // dEQP tests that this value is ignored.
2620 break;
2621 default:
2622 UNSUPPORTED("pBeginInfo->pNext sType = %s", vk::Stringify(nextInfo->sType).c_str());
2623 break;
2624 }
2625 nextInfo = nextInfo->pNext;
2626 }
2627
2628 return vk::Cast(commandBuffer)->begin(pBeginInfo->flags, pBeginInfo->pInheritanceInfo);
2629 }
2630
vkEndCommandBuffer(VkCommandBuffer commandBuffer)2631 VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(VkCommandBuffer commandBuffer)
2632 {
2633 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
2634
2635 return vk::Cast(commandBuffer)->end();
2636 }
2637
vkResetCommandBuffer(VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)2638 VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags)
2639 {
2640 TRACE("(VkCommandBuffer commandBuffer = %p, VkCommandBufferResetFlags flags = %d)", commandBuffer, int(flags));
2641
2642 return vk::Cast(commandBuffer)->reset(flags);
2643 }
2644
vkCmdBindPipeline(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)2645 VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline)
2646 {
2647 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipeline pipeline = %p)",
2648 commandBuffer, int(pipelineBindPoint), static_cast<void *>(pipeline));
2649
2650 vk::Cast(commandBuffer)->bindPipeline(pipelineBindPoint, vk::Cast(pipeline));
2651 }
2652
vkCmdSetViewport(VkCommandBuffer commandBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)2653 VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports)
2654 {
2655 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstViewport = %d, uint32_t viewportCount = %d, const VkViewport* pViewports = %p)",
2656 commandBuffer, int(firstViewport), int(viewportCount), pViewports);
2657
2658 vk::Cast(commandBuffer)->setViewport(firstViewport, viewportCount, pViewports);
2659 }
2660
vkCmdSetScissor(VkCommandBuffer commandBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)2661 VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors)
2662 {
2663 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstScissor = %d, uint32_t scissorCount = %d, const VkRect2D* pScissors = %p)",
2664 commandBuffer, int(firstScissor), int(scissorCount), pScissors);
2665
2666 vk::Cast(commandBuffer)->setScissor(firstScissor, scissorCount, pScissors);
2667 }
2668
vkCmdSetLineWidth(VkCommandBuffer commandBuffer,float lineWidth)2669 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
2670 {
2671 TRACE("(VkCommandBuffer commandBuffer = %p, float lineWidth = %f)", commandBuffer, lineWidth);
2672
2673 vk::Cast(commandBuffer)->setLineWidth(lineWidth);
2674 }
2675
vkCmdSetDepthBias(VkCommandBuffer commandBuffer,float depthBiasConstantFactor,float depthBiasClamp,float depthBiasSlopeFactor)2676 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor)
2677 {
2678 TRACE("(VkCommandBuffer commandBuffer = %p, float depthBiasConstantFactor = %f, float depthBiasClamp = %f, float depthBiasSlopeFactor = %f)",
2679 commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2680
2681 vk::Cast(commandBuffer)->setDepthBias(depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
2682 }
2683
vkCmdSetBlendConstants(VkCommandBuffer commandBuffer,const float blendConstants[4])2684 VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4])
2685 {
2686 TRACE("(VkCommandBuffer commandBuffer = %p, const float blendConstants[4] = {%f, %f, %f, %f})",
2687 commandBuffer, blendConstants[0], blendConstants[1], blendConstants[2], blendConstants[3]);
2688
2689 vk::Cast(commandBuffer)->setBlendConstants(blendConstants);
2690 }
2691
vkCmdSetDepthBounds(VkCommandBuffer commandBuffer,float minDepthBounds,float maxDepthBounds)2692 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
2693 {
2694 TRACE("(VkCommandBuffer commandBuffer = %p, float minDepthBounds = %f, float maxDepthBounds = %f)",
2695 commandBuffer, minDepthBounds, maxDepthBounds);
2696
2697 vk::Cast(commandBuffer)->setDepthBounds(minDepthBounds, maxDepthBounds);
2698 }
2699
vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t compareMask)2700 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask)
2701 {
2702 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t compareMask = %d)",
2703 commandBuffer, int(faceMask), int(compareMask));
2704
2705 vk::Cast(commandBuffer)->setStencilCompareMask(faceMask, compareMask);
2706 }
2707
vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t writeMask)2708 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask)
2709 {
2710 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t writeMask = %d)",
2711 commandBuffer, int(faceMask), int(writeMask));
2712
2713 vk::Cast(commandBuffer)->setStencilWriteMask(faceMask, writeMask);
2714 }
2715
vkCmdSetStencilReference(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,uint32_t reference)2716 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference)
2717 {
2718 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, uint32_t reference = %d)",
2719 commandBuffer, int(faceMask), int(reference));
2720
2721 vk::Cast(commandBuffer)->setStencilReference(faceMask, reference);
2722 }
2723
vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)2724 VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets)
2725 {
2726 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineBindPoint pipelineBindPoint = %d, VkPipelineLayout layout = %p, uint32_t firstSet = %d, uint32_t descriptorSetCount = %d, const VkDescriptorSet* pDescriptorSets = %p, uint32_t dynamicOffsetCount = %d, const uint32_t* pDynamicOffsets = %p)",
2727 commandBuffer, int(pipelineBindPoint), static_cast<void *>(layout), int(firstSet), int(descriptorSetCount), pDescriptorSets, int(dynamicOffsetCount), pDynamicOffsets);
2728
2729 vk::Cast(commandBuffer)->bindDescriptorSets(pipelineBindPoint, vk::Cast(layout), firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
2730 }
2731
vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)2732 VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType)
2733 {
2734 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkIndexType indexType = %d)",
2735 commandBuffer, static_cast<void *>(buffer), int(offset), int(indexType));
2736
2737 vk::Cast(commandBuffer)->bindIndexBuffer(vk::Cast(buffer), offset, indexType);
2738 }
2739
vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)2740 VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets)
2741 {
2742 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstBinding = %d, uint32_t bindingCount = %d, const VkBuffer* pBuffers = %p, const VkDeviceSize* pOffsets = %p)",
2743 commandBuffer, int(firstBinding), int(bindingCount), pBuffers, pOffsets);
2744
2745 vk::Cast(commandBuffer)->bindVertexBuffers(firstBinding, bindingCount, pBuffers, pOffsets, nullptr, nullptr);
2746 }
2747
vkCmdBindVertexBuffers2(VkCommandBuffer commandBuffer,uint32_t firstBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets,const VkDeviceSize * pSizes,const VkDeviceSize * pStrides)2748 VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets, const VkDeviceSize *pSizes, const VkDeviceSize *pStrides)
2749 {
2750 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t firstBinding = %d, uint32_t bindingCount = %d, const VkBuffer* pBuffers = %p, const VkDeviceSize* pOffsets = %p, const VkDeviceSize *pSizes = %p, const VkDeviceSize *pStrides = %p)",
2751 commandBuffer, int(firstBinding), int(bindingCount), pBuffers, pOffsets, pSizes, pStrides);
2752
2753 vk::Cast(commandBuffer)->bindVertexBuffers(firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides);
2754 }
2755
vkCmdSetCullMode(VkCommandBuffer commandBuffer,VkCullModeFlags cullMode)2756 VKAPI_ATTR void VKAPI_CALL vkCmdSetCullMode(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode)
2757 {
2758 TRACE("(VkCommandBuffer commandBuffer = %p, VkCullModeFlags cullMode = %d)",
2759 commandBuffer, int(cullMode));
2760
2761 vk::Cast(commandBuffer)->setCullMode(cullMode);
2762 }
2763
vkCmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthBoundsTestEnable)2764 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable)
2765 {
2766 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthBoundsTestEnable = %d)",
2767 commandBuffer, int(depthBoundsTestEnable));
2768
2769 vk::Cast(commandBuffer)->setDepthBoundsTestEnable(depthBoundsTestEnable);
2770 }
2771
vkCmdSetDepthCompareOp(VkCommandBuffer commandBuffer,VkCompareOp depthCompareOp)2772 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOp(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp)
2773 {
2774 TRACE("(VkCommandBuffer commandBuffer = %p, VkCompareOp depthCompareOp = %d)",
2775 commandBuffer, int(depthCompareOp));
2776
2777 vk::Cast(commandBuffer)->setDepthCompareOp(depthCompareOp);
2778 }
2779
vkCmdSetDepthTestEnable(VkCommandBuffer commandBuffer,VkBool32 depthTestEnable)2780 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnable(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable)
2781 {
2782 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthTestEnable = %d)",
2783 commandBuffer, int(depthTestEnable));
2784
2785 vk::Cast(commandBuffer)->setDepthTestEnable(depthTestEnable);
2786 }
2787
vkCmdSetDepthWriteEnable(VkCommandBuffer commandBuffer,VkBool32 depthWriteEnable)2788 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnable(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable)
2789 {
2790 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthWriteEnable = %d)",
2791 commandBuffer, int(depthWriteEnable));
2792
2793 vk::Cast(commandBuffer)->setDepthWriteEnable(depthWriteEnable);
2794 }
2795
vkCmdSetFrontFace(VkCommandBuffer commandBuffer,VkFrontFace frontFace)2796 VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFace(VkCommandBuffer commandBuffer, VkFrontFace frontFace)
2797 {
2798 TRACE("(VkCommandBuffer commandBuffer = %p, VkFrontFace frontFace = %d)",
2799 commandBuffer, int(frontFace));
2800
2801 vk::Cast(commandBuffer)->setFrontFace(frontFace);
2802 }
2803
vkCmdSetPrimitiveTopology(VkCommandBuffer commandBuffer,VkPrimitiveTopology primitiveTopology)2804 VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopology(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology)
2805 {
2806 TRACE("(VkCommandBuffer commandBuffer = %p, VkPrimitiveTopology primitiveTopology = %d)",
2807 commandBuffer, int(primitiveTopology));
2808
2809 vk::Cast(commandBuffer)->setPrimitiveTopology(primitiveTopology);
2810 }
2811
vkCmdSetScissorWithCount(VkCommandBuffer commandBuffer,uint32_t scissorCount,const VkRect2D * pScissors)2812 VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCount(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D *pScissors)
2813 {
2814 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t scissorCount = %d, const VkRect2D *pScissors = %p)",
2815 commandBuffer, scissorCount, pScissors);
2816
2817 vk::Cast(commandBuffer)->setScissorWithCount(scissorCount, pScissors);
2818 }
2819
vkCmdSetStencilOp(VkCommandBuffer commandBuffer,VkStencilFaceFlags faceMask,VkStencilOp failOp,VkStencilOp passOp,VkStencilOp depthFailOp,VkCompareOp compareOp)2820 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOp(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp)
2821 {
2822 TRACE("(VkCommandBuffer commandBuffer = %p, VkStencilFaceFlags faceMask = %d, VkStencilOp failOp = %d, VkStencilOp passOp = %d, VkStencilOp depthFailOp = %d, VkCompareOp compareOp = %d)",
2823 commandBuffer, int(faceMask), int(failOp), int(passOp), int(depthFailOp), int(compareOp));
2824
2825 vk::Cast(commandBuffer)->setStencilOp(faceMask, failOp, passOp, depthFailOp, compareOp);
2826 }
2827
vkCmdSetStencilTestEnable(VkCommandBuffer commandBuffer,VkBool32 stencilTestEnable)2828 VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnable(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable)
2829 {
2830 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 stencilTestEnable = %d)",
2831 commandBuffer, int(stencilTestEnable));
2832
2833 vk::Cast(commandBuffer)->setStencilTestEnable(stencilTestEnable);
2834 }
2835
vkCmdSetViewportWithCount(VkCommandBuffer commandBuffer,uint32_t viewportCount,const VkViewport * pViewports)2836 VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCount(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport *pViewports)
2837 {
2838 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t viewportCount = %d, const VkViewport *pViewports = %p)",
2839 commandBuffer, viewportCount, pViewports);
2840
2841 vk::Cast(commandBuffer)->setViewportWithCount(viewportCount, pViewports);
2842 }
2843
vkCmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer,VkBool32 rasterizerDiscardEnable)2844 VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnable(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable)
2845 {
2846 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 rasterizerDiscardEnable = %d)",
2847 commandBuffer, rasterizerDiscardEnable);
2848
2849 vk::Cast(commandBuffer)->setRasterizerDiscardEnable(rasterizerDiscardEnable);
2850 }
2851
vkCmdSetDepthBiasEnable(VkCommandBuffer commandBuffer,VkBool32 depthBiasEnable)2852 VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnable(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable)
2853 {
2854 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 depthBiasEnable = %d)",
2855 commandBuffer, depthBiasEnable);
2856
2857 vk::Cast(commandBuffer)->setDepthBiasEnable(depthBiasEnable);
2858 }
2859
vkCmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer,VkBool32 primitiveRestartEnable)2860 VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnable(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable)
2861 {
2862 TRACE("(VkCommandBuffer commandBuffer = %p, VkBool32 primitiveRestartEnable = %d)",
2863 commandBuffer, primitiveRestartEnable);
2864
2865 vk::Cast(commandBuffer)->setPrimitiveRestartEnable(primitiveRestartEnable);
2866 }
2867
vkCmdDraw(VkCommandBuffer commandBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)2868 VKAPI_ATTR void VKAPI_CALL vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance)
2869 {
2870 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t vertexCount = %d, uint32_t instanceCount = %d, uint32_t firstVertex = %d, uint32_t firstInstance = %d)",
2871 commandBuffer, int(vertexCount), int(instanceCount), int(firstVertex), int(firstInstance));
2872
2873 vk::Cast(commandBuffer)->draw(vertexCount, instanceCount, firstVertex, firstInstance);
2874 }
2875
vkCmdDrawIndexed(VkCommandBuffer commandBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)2876 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance)
2877 {
2878 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t indexCount = %d, uint32_t instanceCount = %d, uint32_t firstIndex = %d, int32_t vertexOffset = %d, uint32_t firstInstance = %d)",
2879 commandBuffer, int(indexCount), int(instanceCount), int(firstIndex), int(vertexOffset), int(firstInstance));
2880
2881 vk::Cast(commandBuffer)->drawIndexed(indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
2882 }
2883
vkCmdDrawIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)2884 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
2885 {
2886 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
2887 commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
2888
2889 vk::Cast(commandBuffer)->drawIndirect(vk::Cast(buffer), offset, drawCount, stride);
2890 }
2891
vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t drawCount,uint32_t stride)2892 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride)
2893 {
2894 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, uint32_t drawCount = %d, uint32_t stride = %d)",
2895 commandBuffer, static_cast<void *>(buffer), int(offset), int(drawCount), int(stride));
2896
2897 vk::Cast(commandBuffer)->drawIndexedIndirect(vk::Cast(buffer), offset, drawCount, stride);
2898 }
2899
vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)2900 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
2901 {
2902 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
2903 commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
2904 UNSUPPORTED("VK_KHR_draw_indirect_count");
2905 }
2906
vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset,VkBuffer countBuffer,VkDeviceSize countBufferOffset,uint32_t maxDrawCount,uint32_t stride)2907 VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride)
2908 {
2909 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d, VkBuffer countBuffer = %p, VkDeviceSize countBufferOffset = %d, uint32_t maxDrawCount = %d, uint32_t stride = %d",
2910 commandBuffer, static_cast<void *>(buffer), int(offset), static_cast<void *>(countBuffer), int(countBufferOffset), int(maxDrawCount), int(stride));
2911 UNSUPPORTED("VK_KHR_draw_indirect_count");
2912 }
2913
vkCmdDispatch(VkCommandBuffer commandBuffer,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)2914 VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
2915 {
2916 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t groupCountX = %d, uint32_t groupCountY = %d, uint32_t groupCountZ = %d)",
2917 commandBuffer, int(groupCountX), int(groupCountY), int(groupCountZ));
2918
2919 vk::Cast(commandBuffer)->dispatch(groupCountX, groupCountY, groupCountZ);
2920 }
2921
vkCmdDispatchIndirect(VkCommandBuffer commandBuffer,VkBuffer buffer,VkDeviceSize offset)2922 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
2923 {
2924 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer buffer = %p, VkDeviceSize offset = %d)",
2925 commandBuffer, static_cast<void *>(buffer), int(offset));
2926
2927 vk::Cast(commandBuffer)->dispatchIndirect(vk::Cast(buffer), offset);
2928 }
2929
vkCmdCopyBuffer(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)2930 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions)
2931 {
2932 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferCopy* pRegions = %p)",
2933 commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
2934
2935 vk::Cast(commandBuffer)->copyBuffer(vk::CopyBufferInfo(srcBuffer, dstBuffer, regionCount, pRegions));
2936 }
2937
vkCmdCopyBuffer2(VkCommandBuffer commandBuffer,const VkCopyBufferInfo2 * pCopyBufferInfo)2938 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 *pCopyBufferInfo)
2939 {
2940 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyBufferInfo2* pCopyBufferInfo = %p)",
2941 commandBuffer, pCopyBufferInfo);
2942
2943 vk::Cast(commandBuffer)->copyBuffer(*pCopyBufferInfo);
2944 }
2945
vkCmdCopyImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)2946 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions)
2947 {
2948 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageCopy* pRegions = %p)",
2949 commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
2950
2951 vk::Cast(commandBuffer)->copyImage(vk::CopyImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions));
2952 }
2953
vkCmdCopyImage2(VkCommandBuffer commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)2954 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2(VkCommandBuffer commandBuffer, const VkCopyImageInfo2 *pCopyImageInfo)
2955 {
2956 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyImageInfo2* pCopyImageInfo = %p)",
2957 commandBuffer, pCopyImageInfo);
2958
2959 vk::Cast(commandBuffer)->copyImage(*pCopyImageInfo);
2960 }
2961
vkCmdBlitImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)2962 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter)
2963 {
2964 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageBlit* pRegions = %p, VkFilter filter = %d)",
2965 commandBuffer, static_cast<void *>(srcImage), srcImageLayout, static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions, filter);
2966
2967 vk::Cast(commandBuffer)->blitImage(vk::BlitImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter));
2968 }
2969
vkCmdBlitImage2(VkCommandBuffer commandBuffer,const VkBlitImageInfo2 * pBlitImageInfo)2970 VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2(VkCommandBuffer commandBuffer, const VkBlitImageInfo2 *pBlitImageInfo)
2971 {
2972 TRACE("(VkCommandBuffer commandBuffer = %p, const VkBlitImageInfo2* pBlitImageInfo = %p)",
2973 commandBuffer, pBlitImageInfo);
2974
2975 vk::Cast(commandBuffer)->blitImage(*pBlitImageInfo);
2976 }
2977
vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)2978 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions)
2979 {
2980 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer srcBuffer = %p, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
2981 commandBuffer, static_cast<void *>(srcBuffer), static_cast<void *>(dstImage), dstImageLayout, int(regionCount), pRegions);
2982
2983 vk::Cast(commandBuffer)->copyBufferToImage(vk::CopyBufferToImageInfo(srcBuffer, dstImage, dstImageLayout, regionCount, pRegions));
2984 }
2985
vkCmdCopyBufferToImage2(VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo)2986 VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 *pCopyBufferToImageInfo)
2987 {
2988 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo = %p)",
2989 commandBuffer, pCopyBufferToImageInfo);
2990
2991 vk::Cast(commandBuffer)->copyBufferToImage(*pCopyBufferToImageInfo);
2992 }
2993
vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)2994 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions)
2995 {
2996 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkBuffer dstBuffer = %p, uint32_t regionCount = %d, const VkBufferImageCopy* pRegions = %p)",
2997 commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstBuffer), int(regionCount), pRegions);
2998
2999 vk::Cast(commandBuffer)->copyImageToBuffer(vk::CopyImageToBufferInfo(srcImage, srcImageLayout, dstBuffer, regionCount, pRegions));
3000 }
3001
vkCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)3002 VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 *pCopyImageToBufferInfo)
3003 {
3004 TRACE("(VkCommandBuffer commandBuffer = %p, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo = %p)",
3005 commandBuffer, pCopyImageToBufferInfo);
3006
3007 vk::Cast(commandBuffer)->copyImageToBuffer(*pCopyImageToBufferInfo);
3008 }
3009
vkCmdUpdateBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)3010 VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData)
3011 {
3012 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize dataSize = %d, const void* pData = %p)",
3013 commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(dataSize), pData);
3014
3015 vk::Cast(commandBuffer)->updateBuffer(vk::Cast(dstBuffer), dstOffset, dataSize, pData);
3016 }
3017
vkCmdFillBuffer(VkCommandBuffer commandBuffer,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize size,uint32_t data)3018 VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data)
3019 {
3020 TRACE("(VkCommandBuffer commandBuffer = %p, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize size = %d, uint32_t data = %d)",
3021 commandBuffer, static_cast<void *>(dstBuffer), int(dstOffset), int(size), data);
3022
3023 vk::Cast(commandBuffer)->fillBuffer(vk::Cast(dstBuffer), dstOffset, size, data);
3024 }
3025
vkCmdClearColorImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)3026 VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
3027 {
3028 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearColorValue* pColor = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
3029 commandBuffer, static_cast<void *>(image), int(imageLayout), pColor, int(rangeCount), pRanges);
3030
3031 vk::Cast(commandBuffer)->clearColorImage(vk::Cast(image), imageLayout, pColor, rangeCount, pRanges);
3032 }
3033
vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)3034 VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange *pRanges)
3035 {
3036 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage image = %p, VkImageLayout imageLayout = %d, const VkClearDepthStencilValue* pDepthStencil = %p, uint32_t rangeCount = %d, const VkImageSubresourceRange* pRanges = %p)",
3037 commandBuffer, static_cast<void *>(image), int(imageLayout), pDepthStencil, int(rangeCount), pRanges);
3038
3039 vk::Cast(commandBuffer)->clearDepthStencilImage(vk::Cast(image), imageLayout, pDepthStencil, rangeCount, pRanges);
3040 }
3041
vkCmdClearAttachments(VkCommandBuffer commandBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)3042 VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects)
3043 {
3044 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t attachmentCount = %d, const VkClearAttachment* pAttachments = %p, uint32_t rectCount = %d, const VkClearRect* pRects = %p)",
3045 commandBuffer, int(attachmentCount), pAttachments, int(rectCount), pRects);
3046
3047 vk::Cast(commandBuffer)->clearAttachments(attachmentCount, pAttachments, rectCount, pRects);
3048 }
3049
vkCmdResolveImage(VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)3050 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions)
3051 {
3052 TRACE("(VkCommandBuffer commandBuffer = %p, VkImage srcImage = %p, VkImageLayout srcImageLayout = %d, VkImage dstImage = %p, VkImageLayout dstImageLayout = %d, uint32_t regionCount = %d, const VkImageResolve* pRegions = %p)",
3053 commandBuffer, static_cast<void *>(srcImage), int(srcImageLayout), static_cast<void *>(dstImage), int(dstImageLayout), regionCount, pRegions);
3054
3055 vk::Cast(commandBuffer)->resolveImage(vk::ResolveImageInfo(srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions));
3056 }
3057
vkCmdResolveImage2(VkCommandBuffer commandBuffer,const VkResolveImageInfo2 * pResolveImageInfo)3058 VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2(VkCommandBuffer commandBuffer, const VkResolveImageInfo2 *pResolveImageInfo)
3059 {
3060 TRACE("(VkCommandBuffer commandBuffer = %p, const VkResolveImageInfo2* pResolveImageInfo = %p)",
3061 commandBuffer, pResolveImageInfo);
3062
3063 vk::Cast(commandBuffer)->resolveImage(*pResolveImageInfo);
3064 }
3065
vkCmdSetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)3066 VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
3067 {
3068 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
3069 commandBuffer, static_cast<void *>(event), int(stageMask));
3070
3071 vk::Cast(commandBuffer)->setEvent(vk::Cast(event), vk::DependencyInfo(stageMask, stageMask, VkDependencyFlags(0), 0, nullptr, 0, nullptr, 0, nullptr));
3072 }
3073
vkCmdSetEvent2(VkCommandBuffer commandBuffer,VkEvent event,const VkDependencyInfo * pDependencyInfo)3074 VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo *pDependencyInfo)
3075 {
3076 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, const VkDependencyInfo* pDependencyInfo = %p)",
3077 commandBuffer, static_cast<void *>(event), pDependencyInfo);
3078
3079 vk::Cast(commandBuffer)->setEvent(vk::Cast(event), *pDependencyInfo);
3080 }
3081
vkCmdResetEvent(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags stageMask)3082 VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
3083 {
3084 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags stageMask = %d)",
3085 commandBuffer, static_cast<void *>(event), int(stageMask));
3086
3087 vk::Cast(commandBuffer)->resetEvent(vk::Cast(event), stageMask);
3088 }
3089
vkCmdResetEvent2(VkCommandBuffer commandBuffer,VkEvent event,VkPipelineStageFlags2 stageMask)3090 VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask)
3091 {
3092 TRACE("(VkCommandBuffer commandBuffer = %p, VkEvent event = %p, VkPipelineStageFlags2 stageMask = %d)",
3093 commandBuffer, static_cast<void *>(event), int(stageMask));
3094
3095 vk::Cast(commandBuffer)->resetEvent(vk::Cast(event), stageMask);
3096 }
3097
vkCmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)3098 VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
3099 {
3100 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t eventCount = %d, const VkEvent* pEvents = %p, VkPipelineStageFlags srcStageMask = 0x%x, VkPipelineStageFlags dstStageMask = 0x%x, uint32_t memoryBarrierCount = %d, const VkMemoryBarrier* pMemoryBarriers = %p, uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
3101 commandBuffer, int(eventCount), pEvents, int(srcStageMask), int(dstStageMask), int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
3102
3103 vk::Cast(commandBuffer)->waitEvents(eventCount, pEvents, vk::DependencyInfo(srcStageMask, dstStageMask, VkDependencyFlags(0), memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers));
3104 }
3105
vkCmdWaitEvents2(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,const VkDependencyInfo * pDependencyInfos)3106 VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, const VkDependencyInfo *pDependencyInfos)
3107 {
3108 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t eventCount = %d, const VkEvent* pEvents = %p, const VkDependencyInfo* pDependencyInfos = %p)",
3109 commandBuffer, int(eventCount), pEvents, pDependencyInfos);
3110
3111 vk::Cast(commandBuffer)->waitEvents(eventCount, pEvents, *pDependencyInfos);
3112 }
3113
vkCmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)3114 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers)
3115 {
3116 TRACE(
3117 "(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlags srcStageMask = 0x%x, VkPipelineStageFlags dstStageMask = 0x%x, VkDependencyFlags dependencyFlags = %d, uint32_t memoryBarrierCount = %d, onst VkMemoryBarrier* pMemoryBarriers = %p,"
3118 " uint32_t bufferMemoryBarrierCount = %d, const VkBufferMemoryBarrier* pBufferMemoryBarriers = %p, uint32_t imageMemoryBarrierCount = %d, const VkImageMemoryBarrier* pImageMemoryBarriers = %p)",
3119 commandBuffer, int(srcStageMask), int(dstStageMask), dependencyFlags, int(memoryBarrierCount), pMemoryBarriers, int(bufferMemoryBarrierCount), pBufferMemoryBarriers, int(imageMemoryBarrierCount), pImageMemoryBarriers);
3120
3121 vk::Cast(commandBuffer)->pipelineBarrier(vk::DependencyInfo(srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers));
3122 }
3123
vkCmdPipelineBarrier2(VkCommandBuffer commandBuffer,const VkDependencyInfo * pDependencyInfo)3124 VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2(VkCommandBuffer commandBuffer, const VkDependencyInfo *pDependencyInfo)
3125 {
3126 TRACE("(VkCommandBuffer commandBuffer = %p, const VkDependencyInfo* pDependencyInfo = %p)",
3127 commandBuffer, pDependencyInfo);
3128
3129 vk::Cast(commandBuffer)->pipelineBarrier(*pDependencyInfo);
3130 }
3131
vkCmdBeginQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query,VkQueryControlFlags flags)3132 VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags)
3133 {
3134 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d, VkQueryControlFlags flags = %d)",
3135 commandBuffer, static_cast<void *>(queryPool), query, int(flags));
3136
3137 vk::Cast(commandBuffer)->beginQuery(vk::Cast(queryPool), query, flags);
3138 }
3139
vkCmdEndQuery(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t query)3140 VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query)
3141 {
3142 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t query = %d)",
3143 commandBuffer, static_cast<void *>(queryPool), int(query));
3144
3145 vk::Cast(commandBuffer)->endQuery(vk::Cast(queryPool), query);
3146 }
3147
vkCmdResetQueryPool(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)3148 VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
3149 {
3150 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
3151 commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount));
3152
3153 vk::Cast(commandBuffer)->resetQueryPool(vk::Cast(queryPool), firstQuery, queryCount);
3154 }
3155
vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t query)3156 VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query)
3157 {
3158 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlagBits pipelineStage = %d, VkQueryPool queryPool = %p, uint32_t query = %d)",
3159 commandBuffer, int(pipelineStage), static_cast<void *>(queryPool), int(query));
3160
3161 vk::Cast(commandBuffer)->writeTimestamp(pipelineStage, vk::Cast(queryPool), query);
3162 }
3163
vkCmdWriteTimestamp2(VkCommandBuffer commandBuffer,VkPipelineStageFlags2 stage,VkQueryPool queryPool,uint32_t query)3164 VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query)
3165 {
3166 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineStageFlags2 stage = %d, VkQueryPool queryPool = %p, uint32_t query = %d)",
3167 commandBuffer, int(stage), static_cast<void *>(queryPool), int(query));
3168
3169 vk::Cast(commandBuffer)->writeTimestamp(stage, vk::Cast(queryPool), query);
3170 }
3171
vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)3172 VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags)
3173 {
3174 TRACE("(VkCommandBuffer commandBuffer = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d, VkBuffer dstBuffer = %p, VkDeviceSize dstOffset = %d, VkDeviceSize stride = %d, VkQueryResultFlags flags = %d)",
3175 commandBuffer, static_cast<void *>(queryPool), int(firstQuery), int(queryCount), static_cast<void *>(dstBuffer), int(dstOffset), int(stride), int(flags));
3176
3177 vk::Cast(commandBuffer)->copyQueryPoolResults(vk::Cast(queryPool), firstQuery, queryCount, vk::Cast(dstBuffer), dstOffset, stride, flags);
3178 }
3179
vkCmdPushConstants(VkCommandBuffer commandBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t offset,uint32_t size,const void * pValues)3180 VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues)
3181 {
3182 TRACE("(VkCommandBuffer commandBuffer = %p, VkPipelineLayout layout = %p, VkShaderStageFlags stageFlags = %d, uint32_t offset = %d, uint32_t size = %d, const void* pValues = %p)",
3183 commandBuffer, static_cast<void *>(layout), stageFlags, offset, size, pValues);
3184
3185 vk::Cast(commandBuffer)->pushConstants(vk::Cast(layout), stageFlags, offset, size, pValues);
3186 }
3187
vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)3188 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents)
3189 {
3190 VkSubpassBeginInfo subpassBeginInfo = { VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, nullptr, contents };
3191 vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, &subpassBeginInfo);
3192 }
3193
vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfoKHR * pSubpassBeginInfo)3194 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, const VkSubpassBeginInfoKHR *pSubpassBeginInfo)
3195 {
3196 TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderPassBeginInfo* pRenderPassBegin = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p)",
3197 commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
3198
3199 const VkBaseInStructure *renderPassBeginInfo = reinterpret_cast<const VkBaseInStructure *>(pRenderPassBegin->pNext);
3200 const VkRenderPassAttachmentBeginInfo *attachmentBeginInfo = nullptr;
3201 while(renderPassBeginInfo)
3202 {
3203 switch(renderPassBeginInfo->sType)
3204 {
3205 case VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO:
3206 // This extension controls which render area is used on which physical device,
3207 // in order to distribute rendering between multiple physical devices.
3208 // SwiftShader only has a single physical device, so this extension does nothing in this case.
3209 break;
3210 case VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO:
3211 attachmentBeginInfo = reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>(renderPassBeginInfo);
3212 break;
3213 case VK_STRUCTURE_TYPE_MAX_ENUM:
3214 // dEQP tests that this value is ignored.
3215 break;
3216 default:
3217 UNSUPPORTED("pRenderPassBegin->pNext sType = %s", vk::Stringify(renderPassBeginInfo->sType).c_str());
3218 break;
3219 }
3220
3221 renderPassBeginInfo = renderPassBeginInfo->pNext;
3222 }
3223
3224 vk::Cast(commandBuffer)->beginRenderPass(vk::Cast(pRenderPassBegin->renderPass), vk::Cast(pRenderPassBegin->framebuffer), pRenderPassBegin->renderArea, pRenderPassBegin->clearValueCount, pRenderPassBegin->pClearValues, pSubpassBeginInfo->contents, attachmentBeginInfo);
3225 }
3226
vkCmdNextSubpass(VkCommandBuffer commandBuffer,VkSubpassContents contents)3227 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
3228 {
3229 TRACE("(VkCommandBuffer commandBuffer = %p, VkSubpassContents contents = %d)",
3230 commandBuffer, contents);
3231
3232 vk::Cast(commandBuffer)->nextSubpass(contents);
3233 }
3234
vkCmdNextSubpass2(VkCommandBuffer commandBuffer,const VkSubpassBeginInfoKHR * pSubpassBeginInfo,const VkSubpassEndInfoKHR * pSubpassEndInfo)3235 VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR *pSubpassBeginInfo, const VkSubpassEndInfoKHR *pSubpassEndInfo)
3236 {
3237 TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassBeginInfoKHR* pSubpassBeginInfo = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)",
3238 commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
3239
3240 vk::Cast(commandBuffer)->nextSubpass(pSubpassBeginInfo->contents);
3241 }
3242
vkCmdEndRenderPass(VkCommandBuffer commandBuffer)3243 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(VkCommandBuffer commandBuffer)
3244 {
3245 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
3246
3247 vk::Cast(commandBuffer)->endRenderPass();
3248 }
3249
vkCmdEndRenderPass2(VkCommandBuffer commandBuffer,const VkSubpassEndInfoKHR * pSubpassEndInfo)3250 VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR *pSubpassEndInfo)
3251 {
3252 TRACE("(VkCommandBuffer commandBuffer = %p, const VkSubpassEndInfoKHR* pSubpassEndInfo = %p)", commandBuffer, pSubpassEndInfo);
3253
3254 vk::Cast(commandBuffer)->endRenderPass();
3255 }
3256
vkCmdExecuteCommands(VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)3257 VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers)
3258 {
3259 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t commandBufferCount = %d, const VkCommandBuffer* pCommandBuffers = %p)",
3260 commandBuffer, commandBufferCount, pCommandBuffers);
3261
3262 vk::Cast(commandBuffer)->executeCommands(commandBufferCount, pCommandBuffers);
3263 }
3264
vkCmdBeginRendering(VkCommandBuffer commandBuffer,const VkRenderingInfo * pRenderingInfo)3265 VKAPI_ATTR void VKAPI_CALL vkCmdBeginRendering(VkCommandBuffer commandBuffer, const VkRenderingInfo *pRenderingInfo)
3266 {
3267 TRACE("(VkCommandBuffer commandBuffer = %p, const VkRenderingInfo* pRenderingInfo = %p)",
3268 commandBuffer, pRenderingInfo);
3269
3270 vk::Cast(commandBuffer)->beginRendering(pRenderingInfo);
3271 }
3272
vkCmdEndRendering(VkCommandBuffer commandBuffer)3273 VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering(VkCommandBuffer commandBuffer)
3274 {
3275 TRACE("(VkCommandBuffer commandBuffer = %p)",
3276 commandBuffer);
3277
3278 vk::Cast(commandBuffer)->endRendering();
3279 }
3280
vkEnumerateInstanceVersion(uint32_t * pApiVersion)3281 VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(uint32_t *pApiVersion)
3282 {
3283 TRACE("(uint32_t* pApiVersion = %p)", pApiVersion);
3284 *pApiVersion = vk::API_VERSION;
3285 return VK_SUCCESS;
3286 }
3287
vkBindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)3288 VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos)
3289 {
3290 TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindBufferMemoryInfo* pBindInfos = %p)",
3291 device, bindInfoCount, pBindInfos);
3292
3293 for(uint32_t i = 0; i < bindInfoCount; i++)
3294 {
3295 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pBindInfos[i].pNext);
3296 while(extInfo)
3297 {
3298 UNSUPPORTED("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
3299 extInfo = extInfo->pNext;
3300 }
3301
3302 if(!vk::Cast(pBindInfos[i].buffer)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
3303 {
3304 UNSUPPORTED("vkBindBufferMemory2 with invalid external memory");
3305 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
3306 }
3307 }
3308
3309 for(uint32_t i = 0; i < bindInfoCount; i++)
3310 {
3311 vk::Cast(pBindInfos[i].buffer)->bind(vk::Cast(pBindInfos[i].memory), pBindInfos[i].memoryOffset);
3312 }
3313
3314 return VK_SUCCESS;
3315 }
3316
vkBindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)3317 VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo *pBindInfos)
3318 {
3319 TRACE("(VkDevice device = %p, uint32_t bindInfoCount = %d, const VkBindImageMemoryInfo* pBindInfos = %p)",
3320 device, bindInfoCount, pBindInfos);
3321
3322 for(uint32_t i = 0; i < bindInfoCount; i++)
3323 {
3324 if(!vk::Cast(pBindInfos[i].image)->canBindToMemory(vk::Cast(pBindInfos[i].memory)))
3325 {
3326 UNSUPPORTED("vkBindImageMemory2 with invalid external memory");
3327 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
3328 }
3329 }
3330
3331 for(uint32_t i = 0; i < bindInfoCount; i++)
3332 {
3333 vk::DeviceMemory *memory = vk::Cast(pBindInfos[i].memory);
3334 VkDeviceSize offset = pBindInfos[i].memoryOffset;
3335
3336 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pBindInfos[i].pNext);
3337 while(extInfo)
3338 {
3339 switch(extInfo->sType)
3340 {
3341 case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO:
3342 /* Do nothing */
3343 break;
3344
3345 #ifndef __ANDROID__
3346 case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR:
3347 {
3348 auto swapchainInfo = reinterpret_cast<VkBindImageMemorySwapchainInfoKHR const *>(extInfo);
3349 memory = vk::Cast(swapchainInfo->swapchain)->getImage(swapchainInfo->imageIndex).getImageMemory();
3350 offset = 0;
3351 }
3352 break;
3353 #endif
3354
3355 default:
3356 UNSUPPORTED("pBindInfos[%d].pNext sType = %s", i, vk::Stringify(extInfo->sType).c_str());
3357 break;
3358 }
3359 extInfo = extInfo->pNext;
3360 }
3361
3362 vk::Cast(pBindInfos[i].image)->bind(memory, offset);
3363 }
3364
3365 return VK_SUCCESS;
3366 }
3367
vkGetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)3368 VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
3369 {
3370 TRACE("(VkDevice device = %p, uint32_t heapIndex = %d, uint32_t localDeviceIndex = %d, uint32_t remoteDeviceIndex = %d, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures = %p)",
3371 device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
3372
3373 ASSERT(localDeviceIndex != remoteDeviceIndex); // "localDeviceIndex must not equal remoteDeviceIndex"
3374 UNSUPPORTED("remoteDeviceIndex: %d", int(remoteDeviceIndex)); // Only one physical device is supported, and since the device indexes can't be equal, this should never be called.
3375 }
3376
vkCmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)3377 VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
3378 {
3379 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t deviceMask = %d", commandBuffer, deviceMask);
3380
3381 vk::Cast(commandBuffer)->setDeviceMask(deviceMask);
3382 }
3383
vkCmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)3384 VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ)
3385 {
3386 TRACE("(VkCommandBuffer commandBuffer = %p, baseGroupX = %u, baseGroupY = %u, baseGroupZ = %u, groupCountX = %u, groupCountY = %u, groupCountZ = %u)",
3387 commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
3388
3389 vk::Cast(commandBuffer)->dispatchBase(baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
3390 }
3391
vkResetQueryPool(VkDevice device,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount)3392 VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount)
3393 {
3394 TRACE("(VkDevice device = %p, VkQueryPool queryPool = %p, uint32_t firstQuery = %d, uint32_t queryCount = %d)",
3395 device, static_cast<void *>(queryPool), firstQuery, queryCount);
3396 vk::Cast(queryPool)->reset(firstQuery, queryCount);
3397 }
3398
vkEnumeratePhysicalDeviceGroups(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)3399 VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
3400 {
3401 TRACE("(VkInstance instance = %p, uint32_t* pPhysicalDeviceGroupCount = %p, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties = %p)",
3402 instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3403
3404 return vk::Cast(instance)->getPhysicalDeviceGroups(pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
3405 }
3406
vkGetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3407 VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
3408 {
3409 TRACE("(VkDevice device = %p, const VkImageMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
3410 device, pInfo, pMemoryRequirements);
3411
3412 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
3413 while(extInfo)
3414 {
3415 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3416 extInfo = extInfo->pNext;
3417 }
3418
3419 vk::Cast(pInfo->image)->getMemoryRequirements(pMemoryRequirements);
3420 }
3421
vkGetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3422 VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2 *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
3423 {
3424 TRACE("(VkDevice device = %p, const VkBufferMemoryRequirementsInfo2* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
3425 device, pInfo, pMemoryRequirements);
3426
3427 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
3428 while(extInfo)
3429 {
3430 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3431 extInfo = extInfo->pNext;
3432 }
3433
3434 VkBaseOutStructure *extensionRequirements = reinterpret_cast<VkBaseOutStructure *>(pMemoryRequirements->pNext);
3435 while(extensionRequirements)
3436 {
3437 switch(extensionRequirements->sType)
3438 {
3439 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
3440 {
3441 auto requirements = reinterpret_cast<VkMemoryDedicatedRequirements *>(extensionRequirements);
3442 vk::Cast(device)->getRequirements(requirements);
3443 }
3444 break;
3445 default:
3446 UNSUPPORTED("pMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3447 break;
3448 }
3449
3450 extensionRequirements = extensionRequirements->pNext;
3451 }
3452
3453 vkGetBufferMemoryRequirements(device, pInfo->buffer, &(pMemoryRequirements->memoryRequirements));
3454 }
3455
vkGetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)3456 VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2 *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
3457 {
3458 TRACE("(VkDevice device = %p, const VkImageSparseMemoryRequirementsInfo2* pInfo = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements = %p)",
3459 device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
3460
3461 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
3462 while(extInfo)
3463 {
3464 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3465 extInfo = extInfo->pNext;
3466 }
3467
3468 auto extensionRequirements = reinterpret_cast<VkBaseInStructure const *>(pSparseMemoryRequirements->pNext);
3469 while(extensionRequirements)
3470 {
3471 UNSUPPORTED("pSparseMemoryRequirements->pNext sType = %s", vk::Stringify(extensionRequirements->sType).c_str());
3472 extensionRequirements = extensionRequirements->pNext;
3473 }
3474
3475 // The 'sparseBinding' feature is not supported, so images can not be created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag.
3476 // "If the image was not created with VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT then pSparseMemoryRequirementCount will be set to zero and pSparseMemoryRequirements will not be written to."
3477 *pSparseMemoryRequirementCount = 0;
3478 }
3479
vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)3480 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 *pFeatures)
3481 {
3482 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceFeatures2* pFeatures = %p)", physicalDevice, pFeatures);
3483
3484 vk::Cast(physicalDevice)->getFeatures2(pFeatures);
3485 }
3486
vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)3487 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 *pProperties)
3488 {
3489 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceProperties2* pProperties = %p)", physicalDevice, pProperties);
3490
3491 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pProperties->pNext);
3492 while(extensionProperties)
3493 {
3494 // Casting to an int since some structures, such as VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID,
3495 // are not enumerated in the official Vulkan headers.
3496 switch((int)(extensionProperties->sType))
3497 {
3498 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
3499 {
3500 auto properties = reinterpret_cast<VkPhysicalDeviceIDProperties *>(extensionProperties);
3501 vk::Cast(physicalDevice)->getProperties(properties);
3502 }
3503 break;
3504 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
3505 {
3506 auto properties = reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>(extensionProperties);
3507 vk::Cast(physicalDevice)->getProperties(properties);
3508 }
3509 break;
3510 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES:
3511 {
3512 auto properties = reinterpret_cast<VkPhysicalDeviceMaintenance4Properties *>(extensionProperties);
3513 vk::Cast(physicalDevice)->getProperties(properties);
3514 }
3515 break;
3516 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
3517 {
3518 auto properties = reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>(extensionProperties);
3519 vk::Cast(physicalDevice)->getProperties(properties);
3520 }
3521 break;
3522 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
3523 {
3524 auto properties = reinterpret_cast<VkPhysicalDevicePointClippingProperties *>(extensionProperties);
3525 vk::Cast(physicalDevice)->getProperties(properties);
3526 }
3527 break;
3528 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
3529 {
3530 auto properties = reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>(extensionProperties);
3531 vk::Cast(physicalDevice)->getProperties(properties);
3532 }
3533 break;
3534 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
3535 {
3536 auto properties = reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>(extensionProperties);
3537 vk::Cast(physicalDevice)->getProperties(properties);
3538 }
3539 break;
3540 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT:
3541 {
3542 auto properties = reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>(extensionProperties);
3543 vk::Cast(physicalDevice)->getProperties(properties);
3544 }
3545 break;
3546 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES:
3547 {
3548 auto properties = reinterpret_cast<VkPhysicalDeviceDriverProperties *>(extensionProperties);
3549 vk::Cast(physicalDevice)->getProperties(properties);
3550 }
3551 break;
3552 #ifdef __ANDROID__
3553 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID:
3554 {
3555 auto properties = reinterpret_cast<VkPhysicalDevicePresentationPropertiesANDROID *>(extensionProperties);
3556 vk::Cast(physicalDevice)->getProperties(properties);
3557 }
3558 break;
3559 #endif
3560 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT:
3561 {
3562 auto properties = reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>(extensionProperties);
3563 vk::Cast(physicalDevice)->getProperties(properties);
3564 }
3565 break;
3566 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT:
3567 {
3568 auto properties = reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>(extensionProperties);
3569 vk::Cast(physicalDevice)->getProperties(properties);
3570 }
3571 break;
3572 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES:
3573 {
3574 auto properties = reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>(extensionProperties);
3575 vk::Cast(physicalDevice)->getProperties(properties);
3576 }
3577 break;
3578 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
3579 {
3580 auto properties = reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>(extensionProperties);
3581 vk::Cast(physicalDevice)->getProperties(properties);
3582 }
3583 break;
3584 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES:
3585 {
3586 auto properties = reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>(extensionProperties);
3587 vk::Cast(physicalDevice)->getProperties(properties);
3588 }
3589 break;
3590 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES:
3591 {
3592 auto properties = reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>(extensionProperties);
3593 vk::Cast(physicalDevice)->getProperties(properties);
3594 }
3595 break;
3596 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
3597 {
3598 auto properties = reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>(extensionProperties);
3599 vk::Cast(physicalDevice)->getProperties(properties);
3600 }
3601 break;
3602 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES:
3603 {
3604 auto properties = reinterpret_cast<VkPhysicalDeviceVulkan13Properties *>(extensionProperties);
3605 vk::Cast(physicalDevice)->getProperties(properties);
3606 }
3607 break;
3608 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES:
3609 {
3610 auto properties = reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>(extensionProperties);
3611 vk::Cast(physicalDevice)->getProperties(properties);
3612 }
3613 break;
3614 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES:
3615 {
3616 auto properties = reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>(extensionProperties);
3617 vk::Cast(physicalDevice)->getProperties(properties);
3618 }
3619 break;
3620 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT:
3621 {
3622 auto properties = reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT *>(extensionProperties);
3623 vk::Cast(physicalDevice)->getProperties(properties);
3624 }
3625 break;
3626 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT:
3627 {
3628 auto properties = reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>(extensionProperties);
3629 vk::Cast(physicalDevice)->getProperties(properties);
3630 }
3631 break;
3632 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES:
3633 {
3634 auto properties = reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlProperties *>(extensionProperties);
3635 vk::Cast(physicalDevice)->getProperties(properties);
3636 }
3637 break;
3638 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES:
3639 {
3640 auto properties = reinterpret_cast<VkPhysicalDeviceInlineUniformBlockProperties *>(extensionProperties);
3641 vk::Cast(physicalDevice)->getProperties(properties);
3642 }
3643 break;
3644 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES:
3645 {
3646 auto properties = reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentProperties *>(extensionProperties);
3647 vk::Cast(physicalDevice)->getProperties(properties);
3648 }
3649 break;
3650 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES:
3651 {
3652 auto properties = reinterpret_cast<VkPhysicalDeviceShaderIntegerDotProductProperties *>(extensionProperties);
3653 vk::Cast(physicalDevice)->getProperties(properties);
3654 }
3655 break;
3656 default:
3657 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
3658 UNSUPPORTED("pProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3659 break;
3660 }
3661
3662 extensionProperties = extensionProperties->pNext;
3663 }
3664
3665 vkGetPhysicalDeviceProperties(physicalDevice, &(pProperties->properties));
3666 }
3667
vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)3668 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 *pFormatProperties)
3669 {
3670 TRACE("(VkPhysicalDevice physicalDevice = %p, VkFormat format = %d, VkFormatProperties2* pFormatProperties = %p)",
3671 physicalDevice, format, pFormatProperties);
3672
3673 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pFormatProperties->pNext);
3674 while(extensionProperties)
3675 {
3676 switch(extensionProperties->sType)
3677 {
3678 case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3:
3679 {
3680 auto *properties3 = reinterpret_cast<VkFormatProperties3 *>(extensionProperties);
3681 vk::Cast(physicalDevice)->GetFormatProperties(format, properties3);
3682 }
3683 break;
3684 default:
3685 // "the [driver] must skip over, without processing (other than reading the sType and pNext members) any structures in the chain with sType values not defined by [supported extenions]"
3686 UNSUPPORTED("pFormatProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3687 break;
3688 }
3689
3690 extensionProperties = extensionProperties->pNext;
3691 }
3692
3693 vkGetPhysicalDeviceFormatProperties(physicalDevice, format, &(pFormatProperties->formatProperties));
3694 }
3695
checkFormatUsage(VkImageUsageFlags usage,VkFormatFeatureFlags features)3696 static bool checkFormatUsage(VkImageUsageFlags usage, VkFormatFeatureFlags features)
3697 {
3698 // Check for usage conflict with features
3699 if((usage & VK_IMAGE_USAGE_SAMPLED_BIT) && !(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT))
3700 {
3701 return false;
3702 }
3703
3704 if((usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(features & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
3705 {
3706 return false;
3707 }
3708
3709 if((usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
3710 {
3711 return false;
3712 }
3713
3714 if((usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) && !(features & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
3715 {
3716 return false;
3717 }
3718
3719 if((usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) && !(features & (VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)))
3720 {
3721 return false;
3722 }
3723
3724 if((usage & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_SRC_BIT))
3725 {
3726 return false;
3727 }
3728
3729 if((usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT) && !(features & VK_FORMAT_FEATURE_TRANSFER_DST_BIT))
3730 {
3731 return false;
3732 }
3733
3734 return true;
3735 }
3736
vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)3737 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo, VkImageFormatProperties2 *pImageFormatProperties)
3738 {
3739 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo = %p, VkImageFormatProperties2* pImageFormatProperties = %p)",
3740 physicalDevice, pImageFormatInfo, pImageFormatProperties);
3741
3742 // "If the combination of parameters to vkGetPhysicalDeviceImageFormatProperties is not supported by the implementation
3743 // for use in vkCreateImage, then all members of VkImageFormatProperties will be filled with zero."
3744 memset(&pImageFormatProperties->imageFormatProperties, 0, sizeof(VkImageFormatProperties));
3745
3746 const VkBaseInStructure *extensionFormatInfo = reinterpret_cast<const VkBaseInStructure *>(pImageFormatInfo->pNext);
3747
3748 const VkExternalMemoryHandleTypeFlagBits *handleType = nullptr;
3749 VkImageUsageFlags stencilUsage = 0;
3750 while(extensionFormatInfo)
3751 {
3752 switch(extensionFormatInfo->sType)
3753 {
3754 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
3755 {
3756 // Per the Vulkan spec on VkImageFormatListcreateInfo:
3757 // "If the pNext chain of VkImageCreateInfo includes a
3758 // VkImageFormatListCreateInfo structure, then that
3759 // structure contains a list of all formats that can be
3760 // used when creating views of this image"
3761 // This limitation does not affect SwiftShader's behavior and
3762 // the Vulkan Validation Layers can detect Views created with a
3763 // format which is not included in that list.
3764 }
3765 break;
3766 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
3767 {
3768 const VkImageStencilUsageCreateInfo *stencilUsageInfo = reinterpret_cast<const VkImageStencilUsageCreateInfo *>(extensionFormatInfo);
3769 stencilUsage = stencilUsageInfo->stencilUsage;
3770 }
3771 break;
3772 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
3773 {
3774 const VkPhysicalDeviceExternalImageFormatInfo *imageFormatInfo = reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>(extensionFormatInfo);
3775 handleType = &(imageFormatInfo->handleType);
3776 }
3777 break;
3778 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
3779 {
3780 // Explicitly ignored, since VK_EXT_image_drm_format_modifier is not supported
3781 ASSERT(!hasDeviceExtension(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME));
3782 }
3783 break;
3784 default:
3785 UNSUPPORTED("pImageFormatInfo->pNext sType = %s", vk::Stringify(extensionFormatInfo->sType).c_str());
3786 break;
3787 }
3788
3789 extensionFormatInfo = extensionFormatInfo->pNext;
3790 }
3791
3792 VkBaseOutStructure *extensionProperties = reinterpret_cast<VkBaseOutStructure *>(pImageFormatProperties->pNext);
3793
3794 #ifdef __ANDROID__
3795 bool hasAHBUsage = false;
3796 #endif
3797
3798 while(extensionProperties)
3799 {
3800 switch(extensionProperties->sType)
3801 {
3802 case VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES:
3803 {
3804 auto properties = reinterpret_cast<VkExternalImageFormatProperties *>(extensionProperties);
3805 vk::Cast(physicalDevice)->getProperties(handleType, properties);
3806 }
3807 break;
3808 case VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES:
3809 {
3810 auto properties = reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>(extensionProperties);
3811 vk::Cast(physicalDevice)->getProperties(properties);
3812 }
3813 break;
3814 case VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD:
3815 {
3816 // Explicitly ignored, since VK_AMD_texture_gather_bias_lod is not supported
3817 ASSERT(!hasDeviceExtension(VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME));
3818 }
3819 break;
3820 #ifdef __ANDROID__
3821 case VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID:
3822 {
3823 auto properties = reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>(extensionProperties);
3824 vk::Cast(physicalDevice)->getProperties(pImageFormatInfo, properties);
3825 hasAHBUsage = true;
3826 }
3827 break;
3828 #endif
3829 default:
3830 UNSUPPORTED("pImageFormatProperties->pNext sType = %s", vk::Stringify(extensionProperties->sType).c_str());
3831 break;
3832 }
3833
3834 extensionProperties = extensionProperties->pNext;
3835 }
3836
3837 vk::Format format = pImageFormatInfo->format;
3838 VkImageType type = pImageFormatInfo->type;
3839 VkImageTiling tiling = pImageFormatInfo->tiling;
3840 VkImageUsageFlags usage = pImageFormatInfo->usage;
3841 VkImageCreateFlags flags = pImageFormatInfo->flags;
3842
3843 VkFormatProperties properties = {};
3844 vk::PhysicalDevice::GetFormatProperties(format, &properties);
3845
3846 if(flags & VK_IMAGE_CREATE_EXTENDED_USAGE_BIT)
3847 {
3848 for(vk::Format f : format.getCompatibleFormats())
3849 {
3850 VkFormatProperties extendedProperties = {};
3851 vk::PhysicalDevice::GetFormatProperties(f, &extendedProperties);
3852 properties.linearTilingFeatures |= extendedProperties.linearTilingFeatures;
3853 properties.optimalTilingFeatures |= extendedProperties.optimalTilingFeatures;
3854 properties.bufferFeatures |= extendedProperties.bufferFeatures;
3855 }
3856 }
3857
3858 VkFormatFeatureFlags features;
3859 switch(tiling)
3860 {
3861 case VK_IMAGE_TILING_LINEAR:
3862 features = properties.linearTilingFeatures;
3863 break;
3864
3865 case VK_IMAGE_TILING_OPTIMAL:
3866 features = properties.optimalTilingFeatures;
3867 break;
3868
3869 default:
3870 UNSUPPORTED("VkImageTiling %d", int(tiling));
3871 features = 0;
3872 }
3873
3874 if(features == 0)
3875 {
3876 return VK_ERROR_FORMAT_NOT_SUPPORTED;
3877 }
3878
3879 // Reject any usage or separate stencil usage that is not compatible with the specified format.
3880 if(!checkFormatUsage(usage, features))
3881 {
3882 return VK_ERROR_FORMAT_NOT_SUPPORTED;
3883 }
3884 // If stencilUsage is 0 then no separate usage was provided and it takes on the same value as usage,
3885 // which has already been checked. So only check non-zero stencilUsage.
3886 if(stencilUsage != 0 && !checkFormatUsage(stencilUsage, features))
3887 {
3888 return VK_ERROR_FORMAT_NOT_SUPPORTED;
3889 }
3890
3891 auto allRecognizedUsageBits = VK_IMAGE_USAGE_SAMPLED_BIT |
3892 VK_IMAGE_USAGE_STORAGE_BIT |
3893 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
3894 VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
3895 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT |
3896 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
3897 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
3898 VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
3899 ASSERT(!(usage & ~(allRecognizedUsageBits)));
3900
3901 if(usage & VK_IMAGE_USAGE_SAMPLED_BIT)
3902 {
3903 if(tiling == VK_IMAGE_TILING_LINEAR)
3904 {
3905 // TODO(b/171299814): Compressed formats and cube maps are not supported for sampling using VK_IMAGE_TILING_LINEAR; otherwise, sampling
3906 // in linear tiling is always supported as long as it can be sampled when using VK_IMAGE_TILING_OPTIMAL.
3907 if(!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) ||
3908 vk::Format(format).isCompressed() ||
3909 (flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT))
3910 {
3911 return VK_ERROR_FORMAT_NOT_SUPPORTED;
3912 }
3913 }
3914 else if(!(features & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT))
3915 {
3916 return VK_ERROR_FORMAT_NOT_SUPPORTED;
3917 }
3918 }
3919
3920 // "Images created with tiling equal to VK_IMAGE_TILING_LINEAR have further restrictions on their limits and capabilities
3921 // compared to images created with tiling equal to VK_IMAGE_TILING_OPTIMAL."
3922 if(tiling == VK_IMAGE_TILING_LINEAR)
3923 {
3924 if(type != VK_IMAGE_TYPE_2D)
3925 {
3926 return VK_ERROR_FORMAT_NOT_SUPPORTED;
3927 }
3928
3929 if(vk::Format(format).isDepth() || vk::Format(format).isStencil())
3930 {
3931 return VK_ERROR_FORMAT_NOT_SUPPORTED;
3932 }
3933 }
3934
3935 // "Images created with a format from one of those listed in Formats requiring sampler Y'CBCR conversion for VK_IMAGE_ASPECT_COLOR_BIT image views
3936 // have further restrictions on their limits and capabilities compared to images created with other formats."
3937 if(vk::Format(format).isYcbcrFormat())
3938 {
3939 if(type != VK_IMAGE_TYPE_2D)
3940 {
3941 return VK_ERROR_FORMAT_NOT_SUPPORTED;
3942 }
3943 }
3944
3945 vk::Cast(physicalDevice)->getImageFormatProperties(format, type, tiling, usage, flags, &pImageFormatProperties->imageFormatProperties);
3946
3947 #ifdef __ANDROID__
3948 if(hasAHBUsage)
3949 {
3950 // AHardwareBuffer_lock may only be called with a single layer.
3951 pImageFormatProperties->imageFormatProperties.maxArrayLayers = 1;
3952 pImageFormatProperties->imageFormatProperties.maxMipLevels = 1;
3953 }
3954 #endif
3955
3956 return VK_SUCCESS;
3957 }
3958
vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)3959 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2 *pQueueFamilyProperties)
3960 {
3961 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pQueueFamilyPropertyCount = %p, VkQueueFamilyProperties2* pQueueFamilyProperties = %p)",
3962 physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
3963
3964 if(pQueueFamilyProperties)
3965 {
3966 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pQueueFamilyProperties->pNext);
3967 while(extInfo)
3968 {
3969 UNSUPPORTED("pQueueFamilyProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3970 extInfo = extInfo->pNext;
3971 }
3972 }
3973
3974 if(!pQueueFamilyProperties)
3975 {
3976 *pQueueFamilyPropertyCount = vk::Cast(physicalDevice)->getQueueFamilyPropertyCount();
3977 }
3978 else
3979 {
3980 vk::Cast(physicalDevice)->getQueueFamilyProperties(*pQueueFamilyPropertyCount, pQueueFamilyProperties);
3981 }
3982 }
3983
vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)3984 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
3985 {
3986 TRACE("(VkPhysicalDevice physicalDevice = %p, VkPhysicalDeviceMemoryProperties2* pMemoryProperties = %p)", physicalDevice, pMemoryProperties);
3987
3988 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pMemoryProperties->pNext);
3989 while(extInfo)
3990 {
3991 UNSUPPORTED("pMemoryProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
3992 extInfo = extInfo->pNext;
3993 }
3994
3995 vkGetPhysicalDeviceMemoryProperties(physicalDevice, &(pMemoryProperties->memoryProperties));
3996 }
3997
vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)3998 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo, uint32_t *pPropertyCount, VkSparseImageFormatProperties2 *pProperties)
3999 {
4000 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo = %p, uint32_t* pPropertyCount = %p, VkSparseImageFormatProperties2* pProperties = %p)",
4001 physicalDevice, pFormatInfo, pPropertyCount, pProperties);
4002
4003 if(pProperties)
4004 {
4005 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pProperties->pNext);
4006 while(extInfo)
4007 {
4008 UNSUPPORTED("pProperties->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4009 extInfo = extInfo->pNext;
4010 }
4011 }
4012
4013 // We do not support sparse images.
4014 *pPropertyCount = 0;
4015 }
4016
vkGetPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice,uint32_t * pToolCount,VkPhysicalDeviceToolProperties * pToolProperties)4017 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice, uint32_t *pToolCount, VkPhysicalDeviceToolProperties *pToolProperties)
4018 {
4019 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pToolCount = %p, VkPhysicalDeviceToolProperties* pToolProperties = %p)",
4020 physicalDevice, pToolCount, pToolProperties);
4021
4022 if(!pToolProperties)
4023 {
4024 *pToolCount = 0;
4025 return VK_SUCCESS;
4026 }
4027
4028 return VK_SUCCESS;
4029 }
4030
vkTrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)4031 VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags)
4032 {
4033 TRACE("(VkDevice device = %p, VkCommandPool commandPool = %p, VkCommandPoolTrimFlags flags = %d)",
4034 device, static_cast<void *>(commandPool), flags);
4035
4036 if(flags != 0)
4037 {
4038 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
4039 UNSUPPORTED("flags %d", int(flags));
4040 }
4041
4042 vk::Cast(commandPool)->trim(flags);
4043 }
4044
vkGetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)4045 VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue)
4046 {
4047 TRACE("(VkDevice device = %p, const VkDeviceQueueInfo2* pQueueInfo = %p, VkQueue* pQueue = %p)",
4048 device, pQueueInfo, pQueue);
4049
4050 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pQueueInfo->pNext);
4051 while(extInfo)
4052 {
4053 UNSUPPORTED("pQueueInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4054 extInfo = extInfo->pNext;
4055 }
4056
4057 if(pQueueInfo->flags != 0)
4058 {
4059 // The only flag that can be set here is VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
4060 // According to the Vulkan 1.2.132 spec, 4.3.1. Queue Family Properties:
4061 // "VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT specifies that the device queue is a
4062 // protected-capable queue. If the protected memory feature is not enabled,
4063 // the VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT bit of flags must not be set."
4064 UNSUPPORTED("VkPhysicalDeviceVulkan11Features::protectedMemory");
4065 }
4066
4067 vkGetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
4068 }
4069
vkCreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)4070 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSamplerYcbcrConversion *pYcbcrConversion)
4071 {
4072 TRACE("(VkDevice device = %p, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSamplerYcbcrConversion* pYcbcrConversion = %p)",
4073 device, pCreateInfo, pAllocator, pYcbcrConversion);
4074
4075 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
4076 while(extInfo)
4077 {
4078 switch(extInfo->sType)
4079 {
4080 #ifdef __ANDROID__
4081 case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID:
4082 break;
4083 #endif
4084 default:
4085 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4086 break;
4087 }
4088 extInfo = extInfo->pNext;
4089 }
4090
4091 return vk::SamplerYcbcrConversion::Create(pAllocator, pCreateInfo, pYcbcrConversion);
4092 }
4093
vkDestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)4094 VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks *pAllocator)
4095 {
4096 TRACE("(VkDevice device = %p, VkSamplerYcbcrConversion ycbcrConversion = %p, const VkAllocationCallbacks* pAllocator = %p)",
4097 device, static_cast<void *>(ycbcrConversion), pAllocator);
4098
4099 vk::destroy(ycbcrConversion, pAllocator);
4100 }
4101
vkCreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4102 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
4103 {
4104 TRACE("(VkDevice device = %p, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate = %p)",
4105 device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
4106
4107 if(pCreateInfo->flags != 0)
4108 {
4109 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
4110 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
4111 }
4112
4113 if(pCreateInfo->templateType != VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
4114 {
4115 UNSUPPORTED("pCreateInfo->templateType %d", int(pCreateInfo->templateType));
4116 }
4117
4118 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pCreateInfo->pNext);
4119 while(extInfo)
4120 {
4121 UNSUPPORTED("pCreateInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4122 extInfo = extInfo->pNext;
4123 }
4124
4125 return vk::DescriptorUpdateTemplate::Create(pAllocator, pCreateInfo, pDescriptorUpdateTemplate);
4126 }
4127
vkDestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)4128 VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks *pAllocator)
4129 {
4130 TRACE("(VkDevice device = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const VkAllocationCallbacks* pAllocator = %p)",
4131 device, static_cast<void *>(descriptorUpdateTemplate), pAllocator);
4132
4133 vk::destroy(descriptorUpdateTemplate, pAllocator);
4134 }
4135
vkUpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)4136 VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData)
4137 {
4138 TRACE("(VkDevice device = %p, VkDescriptorSet descriptorSet = %p, VkDescriptorUpdateTemplate descriptorUpdateTemplate = %p, const void* pData = %p)",
4139 device, static_cast<void *>(descriptorSet), static_cast<void *>(descriptorUpdateTemplate), pData);
4140
4141 vk::Cast(descriptorUpdateTemplate)->updateDescriptorSet(vk::Cast(device), descriptorSet, pData);
4142 }
4143
vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)4144 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo, VkExternalBufferProperties *pExternalBufferProperties)
4145 {
4146 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo = %p, VkExternalBufferProperties* pExternalBufferProperties = %p)",
4147 physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
4148
4149 vk::Cast(physicalDevice)->getProperties(pExternalBufferInfo, pExternalBufferProperties);
4150 }
4151
vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)4152 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo, VkExternalFenceProperties *pExternalFenceProperties)
4153 {
4154 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo = %p, VkExternalFenceProperties* pExternalFenceProperties = %p)",
4155 physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
4156
4157 vk::Cast(physicalDevice)->getProperties(pExternalFenceInfo, pExternalFenceProperties);
4158 }
4159
vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)4160 VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo, VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
4161 {
4162 TRACE("(VkPhysicalDevice physicalDevice = %p, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo = %p, VkExternalSemaphoreProperties* pExternalSemaphoreProperties = %p)",
4163 physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
4164
4165 vk::Cast(physicalDevice)->getProperties(pExternalSemaphoreInfo, pExternalSemaphoreProperties);
4166 }
4167
vkGetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)4168 VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, VkDescriptorSetLayoutSupport *pSupport)
4169 {
4170 TRACE("(VkDevice device = %p, const VkDescriptorSetLayoutCreateInfo* pCreateInfo = %p, VkDescriptorSetLayoutSupport* pSupport = %p)",
4171 device, pCreateInfo, pSupport);
4172
4173 vk::Cast(device)->getDescriptorSetLayoutSupport(pCreateInfo, pSupport);
4174 }
4175
vkCreatePrivateDataSlot(VkDevice device,const VkPrivateDataSlotCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPrivateDataSlot * pPrivateDataSlot)4176 VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlot(VkDevice device, const VkPrivateDataSlotCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPrivateDataSlot *pPrivateDataSlot)
4177 {
4178 TRACE("(VkDevice device = %p, const VkPrivateDataSlotCreateInfo* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkPrivateDataSlot* pPrivateDataSlot = %p)",
4179 device, pCreateInfo, pAllocator, pPrivateDataSlot);
4180
4181 return vk::PrivateData::Create(pAllocator, pCreateInfo, pPrivateDataSlot);
4182 }
4183
vkDestroyPrivateDataSlot(VkDevice device,VkPrivateDataSlot privateDataSlot,const VkAllocationCallbacks * pAllocator)4184 VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlot(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks *pAllocator)
4185 {
4186 TRACE("(VkDevice device = %p, VkPrivateDataSlot privateDataSlot = %p, const VkAllocationCallbacks* pAllocator = %p)",
4187 device, static_cast<void *>(privateDataSlot), pAllocator);
4188
4189 vk::Cast(device)->removePrivateDataSlot(vk::Cast(privateDataSlot));
4190 vk::destroy(privateDataSlot, pAllocator);
4191 }
4192
vkSetPrivateData(VkDevice device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlot privateDataSlot,uint64_t data)4193 VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateData(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data)
4194 {
4195 TRACE("(VkDevice device = %p, VkObjectType objectType = %d, uint64_t objectHandle = %" PRIu64 ", VkPrivateDataSlot privateDataSlot = %p, uint64_t data = %" PRIu64 ")",
4196 device, objectType, objectHandle, static_cast<void *>(privateDataSlot), data);
4197
4198 return vk::Cast(device)->setPrivateData(objectType, objectHandle, vk::Cast(privateDataSlot), data);
4199 }
4200
vkGetPrivateData(VkDevice device,VkObjectType objectType,uint64_t objectHandle,VkPrivateDataSlot privateDataSlot,uint64_t * pData)4201 VKAPI_ATTR void VKAPI_CALL vkGetPrivateData(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t *pData)
4202 {
4203 TRACE("(VkDevice device = %p, VkObjectType objectType = %d, uint64_t objectHandle = %" PRIu64 ", VkPrivateDataSlot privateDataSlot = %p, uint64_t data = %p)",
4204 device, objectType, objectHandle, static_cast<void *>(privateDataSlot), pData);
4205
4206 vk::Cast(device)->getPrivateData(objectType, objectHandle, vk::Cast(privateDataSlot), pData);
4207 }
4208
vkGetDeviceBufferMemoryRequirements(VkDevice device,const VkDeviceBufferMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4209 VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirements(VkDevice device, const VkDeviceBufferMemoryRequirements *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
4210 {
4211 TRACE("(VkDevice device = %p, const VkDeviceBufferMemoryRequirements* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
4212 device, pInfo, pMemoryRequirements);
4213
4214 pMemoryRequirements->memoryRequirements =
4215 vk::Buffer::GetMemoryRequirements(pInfo->pCreateInfo->size, pInfo->pCreateInfo->usage);
4216 }
4217
vkGetDeviceImageMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)4218 VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirements(VkDevice device, const VkDeviceImageMemoryRequirements *pInfo, VkMemoryRequirements2 *pMemoryRequirements)
4219 {
4220 TRACE("(VkDevice device = %p, const VkDeviceImageMemoryRequirements* pInfo = %p, VkMemoryRequirements2* pMemoryRequirements = %p)",
4221 device, pInfo, pMemoryRequirements);
4222
4223 auto extInfo = reinterpret_cast<VkBaseInStructure const *>(pInfo->pNext);
4224 while(extInfo)
4225 {
4226 UNSUPPORTED("pInfo->pNext sType = %s", vk::Stringify(extInfo->sType).c_str());
4227 extInfo = extInfo->pNext;
4228 }
4229
4230 // Create a temporary image object to obtain the memory requirements.
4231 // TODO(b/221299948): Reduce overhead by using a lightweight local proxy.
4232 pMemoryRequirements->memoryRequirements = {};
4233 const VkAllocationCallbacks *pAllocator = nullptr;
4234 VkImage image = { VK_NULL_HANDLE };
4235 VkResult result = vk::Image::Create(pAllocator, pInfo->pCreateInfo, &image, vk::Cast(device));
4236 if(result == VK_SUCCESS)
4237 {
4238 vk::Cast(image)->getMemoryRequirements(pMemoryRequirements);
4239 }
4240 vk::destroy(image, pAllocator);
4241 }
4242
vkGetDeviceImageSparseMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)4243 VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements(VkDevice device, const VkDeviceImageMemoryRequirements *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
4244 {
4245 TRACE("(VkDevice device = %p, const VkDeviceImageMemoryRequirements* pInfo = %p, uint32_t* pSparseMemoryRequirementCount = %p, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements = %p)",
4246 device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
4247
4248 *pSparseMemoryRequirementCount = 0;
4249 }
4250
vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer,uint32_t lineStippleFactor,uint16_t lineStipplePattern)4251 VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern)
4252 {
4253 TRACE("(VkCommandBuffer commandBuffer = %p, uint32_t lineStippleFactor = %u, uint16_t lineStipplePattern = %u)",
4254 commandBuffer, lineStippleFactor, lineStipplePattern);
4255
4256 static constexpr uint16_t solidLine = 0xFFFFu;
4257 if(lineStipplePattern != solidLine)
4258 {
4259 // VkPhysicalDeviceLineRasterizationFeaturesEXT::stippled*Lines are all set to VK_FALSE and,
4260 // according to the Vulkan spec for VkPipelineRasterizationLineStateCreateInfoEXT:
4261 // "If stippledLineEnable is VK_FALSE, the values of lineStippleFactor and lineStipplePattern are ignored."
4262 WARN("vkCmdSetLineStippleEXT: line stipple pattern ignored : 0x%04X", lineStipplePattern);
4263 }
4264 }
4265
vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)4266 VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
4267 {
4268 TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4269 commandBuffer, pLabelInfo);
4270
4271 vk::Cast(commandBuffer)->beginDebugUtilsLabel(pLabelInfo);
4272 }
4273
vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)4274 VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT(VkCommandBuffer commandBuffer)
4275 {
4276 TRACE("(VkCommandBuffer commandBuffer = %p)", commandBuffer);
4277
4278 vk::Cast(commandBuffer)->endDebugUtilsLabel();
4279 }
4280
vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer,const VkDebugUtilsLabelEXT * pLabelInfo)4281 VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT *pLabelInfo)
4282 {
4283 TRACE("(VkCommandBuffer commandBuffer = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4284 commandBuffer, pLabelInfo);
4285
4286 vk::Cast(commandBuffer)->insertDebugUtilsLabel(pLabelInfo);
4287 }
4288
vkCreateDebugUtilsMessengerEXT(VkInstance instance,const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDebugUtilsMessengerEXT * pMessenger)4289 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDebugUtilsMessengerEXT *pMessenger)
4290 {
4291 TRACE("(VkInstance instance = %p, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkDebugUtilsMessengerEXT* pMessenger = %p)",
4292 instance, pCreateInfo, pAllocator, pMessenger);
4293
4294 if(pCreateInfo->flags != 0)
4295 {
4296 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
4297 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
4298 }
4299
4300 return vk::DebugUtilsMessenger::Create(pAllocator, pCreateInfo, pMessenger);
4301 }
4302
vkDestroyDebugUtilsMessengerEXT(VkInstance instance,VkDebugUtilsMessengerEXT messenger,const VkAllocationCallbacks * pAllocator)4303 VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks *pAllocator)
4304 {
4305 TRACE("(VkInstance instance = %p, VkDebugUtilsMessengerEXT messenger = %p, const VkAllocationCallbacks* pAllocator = %p)",
4306 instance, static_cast<void *>(messenger), pAllocator);
4307
4308 vk::destroy(messenger, pAllocator);
4309 }
4310
vkQueueBeginDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)4311 VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
4312 {
4313 TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4314 queue, pLabelInfo);
4315
4316 vk::Cast(queue)->beginDebugUtilsLabel(pLabelInfo);
4317 }
4318
vkQueueEndDebugUtilsLabelEXT(VkQueue queue)4319 VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT(VkQueue queue)
4320 {
4321 TRACE("(VkQueue queue = %p)", queue);
4322
4323 vk::Cast(queue)->endDebugUtilsLabel();
4324 }
4325
vkQueueInsertDebugUtilsLabelEXT(VkQueue queue,const VkDebugUtilsLabelEXT * pLabelInfo)4326 VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT(VkQueue queue, const VkDebugUtilsLabelEXT *pLabelInfo)
4327 {
4328 TRACE("(VkQueue queue = %p, const VkDebugUtilsLabelEXT* pLabelInfo = %p)",
4329 queue, pLabelInfo);
4330
4331 vk::Cast(queue)->insertDebugUtilsLabel(pLabelInfo);
4332 }
4333
vkSetDebugUtilsObjectNameEXT(VkDevice device,const VkDebugUtilsObjectNameInfoEXT * pNameInfo)4334 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT(VkDevice device, const VkDebugUtilsObjectNameInfoEXT *pNameInfo)
4335 {
4336 TRACE("(VkDevice device = %p, const VkDebugUtilsObjectNameInfoEXT* pNameInfo = %p)",
4337 device, pNameInfo);
4338
4339 return vk::Cast(device)->setDebugUtilsObjectName(pNameInfo);
4340 }
4341
vkSetDebugUtilsObjectTagEXT(VkDevice device,const VkDebugUtilsObjectTagInfoEXT * pTagInfo)4342 VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT(VkDevice device, const VkDebugUtilsObjectTagInfoEXT *pTagInfo)
4343 {
4344 TRACE("(VkDevice device = %p, const VkDebugUtilsObjectTagInfoEXT* pTagInfo = %p)",
4345 device, pTagInfo);
4346
4347 return vk::Cast(device)->setDebugUtilsObjectTag(pTagInfo);
4348 }
4349
vkSubmitDebugUtilsMessageEXT(VkInstance instance,VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData)4350 VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData)
4351 {
4352 TRACE("(VkInstance instance = %p, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity = %d, VkDebugUtilsMessageTypeFlagsEXT messageTypes = %d, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData = %p)",
4353 instance, messageSeverity, messageTypes, pCallbackData);
4354
4355 vk::Cast(instance)->submitDebugUtilsMessage(messageSeverity, messageTypes, pCallbackData);
4356 }
4357
4358 #ifdef VK_USE_PLATFORM_XCB_KHR
vkCreateXcbSurfaceKHR(VkInstance instance,const VkXcbSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4359 VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR(VkInstance instance, const VkXcbSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4360 {
4361 TRACE("(VkInstance instance = %p, VkXcbSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4362 instance, pCreateInfo, pAllocator, pSurface);
4363
4364 // VUID-VkXcbSurfaceCreateInfoKHR-connection-01310 : connection must point to a valid X11 xcb_connection_t
4365 ASSERT(pCreateInfo->connection);
4366
4367 return vk::XcbSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4368 }
4369
vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,xcb_connection_t * connection,xcb_visualid_t visual_id)4370 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t *connection, xcb_visualid_t visual_id)
4371 {
4372 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, xcb_connection_t* connection = %p, xcb_visualid_t visual_id = %d)",
4373 physicalDevice, int(queueFamilyIndex), connection, int(visual_id));
4374
4375 return VK_TRUE;
4376 }
4377 #endif
4378
4379 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
vkCreateWaylandSurfaceKHR(VkInstance instance,const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4380 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4381 {
4382 TRACE("(VkInstance instance = %p, VkWaylandSurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4383 instance, pCreateInfo, pAllocator, pSurface);
4384
4385 return vk::WaylandSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4386 }
4387
vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,struct wl_display * display)4388 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display *display)
4389 {
4390 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, struct wl_display* display = %p)",
4391 physicalDevice, int(queueFamilyIndex), display);
4392
4393 return VK_TRUE;
4394 }
4395 #endif
4396
4397 #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
vkCreateDirectFBSurfaceEXT(VkInstance instance,const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4398 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4399 {
4400 TRACE("(VkInstance instance = %p, VkDirectFBSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4401 instance, pCreateInfo, pAllocator, pSurface);
4402
4403 return vk::DirectFBSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
4404 }
4405
vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,IDirectFB * dfb)4406 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB *dfb)
4407 {
4408 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, IDirectFB* dfb = %p)",
4409 physicalDevice, int(queueFamilyIndex), dfb);
4410
4411 return VK_TRUE;
4412 }
4413 #endif
4414
4415 #ifdef VK_USE_PLATFORM_DISPLAY_KHR
vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,const VkDisplayModeCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDisplayModeKHR * pMode)4416 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode)
4417 {
4418 TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, VkDisplayModeCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkDisplayModeKHR* pModei = %p)",
4419 physicalDevice, static_cast<void *>(display), pCreateInfo, pAllocator, pMode);
4420
4421 return VK_SUCCESS;
4422 }
4423
vkCreateDisplayPlaneSurfaceKHR(VkInstance instance,const VkDisplaySurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4424 VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4425 {
4426 TRACE("(VkInstance instance = %p, VkDisplaySurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4427 instance, pCreateInfo, pAllocator, pSurface);
4428
4429 return vk::DisplaySurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4430 }
4431
vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice,VkDisplayKHR display,uint32_t * pPropertyCount,VkDisplayModePropertiesKHR * pProperties)4432 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties)
4433 {
4434 TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayKHR display = %p, uint32_t* pPropertyCount = %p, VkDisplayModePropertiesKHR* pProperties = %p)",
4435 physicalDevice, static_cast<void *>(display), pPropertyCount, pProperties);
4436
4437 return vk::DisplaySurfaceKHR::GetDisplayModeProperties(pPropertyCount, pProperties);
4438 }
4439
vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkDisplayModeKHR mode,uint32_t planeIndex,VkDisplayPlaneCapabilitiesKHR * pCapabilities)4440 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities)
4441 {
4442 TRACE("(VkPhysicalDevice physicalDevice = %p, VkDisplayModeKHR mode = %p, uint32_t planeIndex = %d, VkDisplayPlaneCapabilitiesKHR* pCapabilities = %p)",
4443 physicalDevice, static_cast<void *>(mode), planeIndex, pCapabilities);
4444
4445 return vk::DisplaySurfaceKHR::GetDisplayPlaneCapabilities(pCapabilities);
4446 }
4447
vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice,uint32_t planeIndex,uint32_t * pDisplayCount,VkDisplayKHR * pDisplays)4448 VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount, VkDisplayKHR *pDisplays)
4449 {
4450 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t planeIndex = %d, uint32_t* pDisplayCount = %p, VkDisplayKHR* pDisplays = %p)",
4451 physicalDevice, planeIndex, pDisplayCount, pDisplays);
4452
4453 return vk::DisplaySurfaceKHR::GetDisplayPlaneSupportedDisplays(pDisplayCount, pDisplays);
4454 }
4455
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPlanePropertiesKHR * pProperties)4456 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPlanePropertiesKHR *pProperties)
4457 {
4458 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPlanePropertiesKHR* pProperties = %p)",
4459 physicalDevice, pPropertyCount, pProperties);
4460
4461 return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayPlaneProperties(pPropertyCount, pProperties);
4462 }
4463
vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice,uint32_t * pPropertyCount,VkDisplayPropertiesKHR * pProperties)4464 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPropertiesKHR *pProperties)
4465 {
4466 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t* pPropertyCount = %p, VkDisplayPropertiesKHR* pProperties = %p)",
4467 physicalDevice, pPropertyCount, pProperties);
4468
4469 return vk::DisplaySurfaceKHR::GetPhysicalDeviceDisplayProperties(pPropertyCount, pProperties);
4470 }
4471 #endif
4472
4473 #ifdef VK_USE_PLATFORM_MACOS_MVK
vkCreateMacOSSurfaceMVK(VkInstance instance,const VkMacOSSurfaceCreateInfoMVK * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4474 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4475 {
4476 TRACE("(VkInstance instance = %p, VkMacOSSurfaceCreateInfoMVK* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4477 instance, pCreateInfo, pAllocator, pSurface);
4478
4479 return vk::MacOSSurfaceMVK::Create(pAllocator, pCreateInfo, pSurface);
4480 }
4481 #endif
4482
4483 #ifdef VK_USE_PLATFORM_METAL_EXT
vkCreateMetalSurfaceEXT(VkInstance instance,const VkMetalSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4484 VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(VkInstance instance, const VkMetalSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4485 {
4486 TRACE("(VkInstance instance = %p, VkMetalSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4487 instance, pCreateInfo, pAllocator, pSurface);
4488
4489 return vk::MetalSurfaceEXT::Create(pAllocator, pCreateInfo, pSurface);
4490 }
4491 #endif
4492
4493 #ifdef VK_USE_PLATFORM_WIN32_KHR
vkCreateWin32SurfaceKHR(VkInstance instance,const VkWin32SurfaceCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4494 VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR(VkInstance instance, const VkWin32SurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4495 {
4496 TRACE("(VkInstance instance = %p, VkWin32SurfaceCreateInfoKHR* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4497 instance, pCreateInfo, pAllocator, pSurface);
4498
4499 return vk::Win32SurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4500 }
4501
vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex)4502 VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex)
4503 {
4504 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d)",
4505 physicalDevice, queueFamilyIndex);
4506 return VK_TRUE;
4507 }
4508 #endif
4509
vkCreateHeadlessSurfaceEXT(VkInstance instance,const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)4510 VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface)
4511 {
4512 TRACE("(VkInstance instance = %p, VkHeadlessSurfaceCreateInfoEXT* pCreateInfo = %p, VkAllocationCallbacks* pAllocator = %p, VkSurface* pSurface = %p)",
4513 instance, pCreateInfo, pAllocator, pSurface);
4514
4515 return vk::HeadlessSurfaceKHR::Create(pAllocator, pCreateInfo, pSurface);
4516 }
4517
4518 #ifndef __ANDROID__
vkDestroySurfaceKHR(VkInstance instance,VkSurfaceKHR surface,const VkAllocationCallbacks * pAllocator)4519 VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator)
4520 {
4521 TRACE("(VkInstance instance = %p, VkSurfaceKHR surface = %p, const VkAllocationCallbacks* pAllocator = %p)",
4522 instance, static_cast<void *>(surface), pAllocator);
4523
4524 vk::destroy(surface, pAllocator);
4525 }
4526
vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,uint32_t queueFamilyIndex,VkSurfaceKHR surface,VkBool32 * pSupported)4527 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32 *pSupported)
4528 {
4529 TRACE("(VkPhysicalDevice physicalDevice = %p, uint32_t queueFamilyIndex = %d, VkSurface surface = %p, VKBool32* pSupported = %p)",
4530 physicalDevice, int(queueFamilyIndex), static_cast<void *>(surface), pSupported);
4531
4532 *pSupported = VK_TRUE;
4533 return VK_SUCCESS;
4534 }
4535
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,VkSurfaceCapabilitiesKHR * pSurfaceCapabilities)4536 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities)
4537 {
4538 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities = %p)",
4539 physicalDevice, static_cast<void *>(surface), pSurfaceCapabilities);
4540
4541 return vk::Cast(surface)->getSurfaceCapabilities(pSurfaceCapabilities);
4542 }
4543
vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)4544 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pSurfaceFormatCount, VkSurfaceFormatKHR *pSurfaceFormats)
4545 {
4546 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p. uint32_t* pSurfaceFormatCount = %p, VkSurfaceFormatKHR* pSurfaceFormats = %p)",
4547 physicalDevice, static_cast<void *>(surface), pSurfaceFormatCount, pSurfaceFormats);
4548
4549 if(!pSurfaceFormats)
4550 {
4551 *pSurfaceFormatCount = vk::Cast(surface)->getSurfaceFormatsCount();
4552 return VK_SUCCESS;
4553 }
4554
4555 return vk::Cast(surface)->getSurfaceFormats(pSurfaceFormatCount, pSurfaceFormats);
4556 }
4557
vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)4558 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pPresentModeCount, VkPresentModeKHR *pPresentModes)
4559 {
4560 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p uint32_t* pPresentModeCount = %p, VkPresentModeKHR* pPresentModes = %p)",
4561 physicalDevice, static_cast<void *>(surface), pPresentModeCount, pPresentModes);
4562
4563 if(!pPresentModes)
4564 {
4565 *pPresentModeCount = vk::Cast(surface)->getPresentModeCount();
4566 return VK_SUCCESS;
4567 }
4568
4569 return vk::Cast(surface)->getPresentModes(pPresentModeCount, pPresentModes);
4570 }
4571
vkCreateSwapchainKHR(VkDevice device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSwapchainKHR * pSwapchain)4572 VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain)
4573 {
4574 TRACE("(VkDevice device = %p, const VkSwapchainCreateInfoKHR* pCreateInfo = %p, const VkAllocationCallbacks* pAllocator = %p, VkSwapchainKHR* pSwapchain = %p)",
4575 device, pCreateInfo, pAllocator, pSwapchain);
4576
4577 if(pCreateInfo->oldSwapchain)
4578 {
4579 vk::Cast(pCreateInfo->oldSwapchain)->retire();
4580 }
4581
4582 if(vk::Cast(pCreateInfo->surface)->hasAssociatedSwapchain())
4583 {
4584 return VK_ERROR_NATIVE_WINDOW_IN_USE_KHR;
4585 }
4586
4587 VkResult status = vk::SwapchainKHR::Create(pAllocator, pCreateInfo, pSwapchain);
4588
4589 if(status != VK_SUCCESS)
4590 {
4591 return status;
4592 }
4593
4594 auto swapchain = vk::Cast(*pSwapchain);
4595 status = swapchain->createImages(device, pCreateInfo);
4596
4597 if(status != VK_SUCCESS)
4598 {
4599 vk::destroy(*pSwapchain, pAllocator);
4600 return status;
4601 }
4602
4603 vk::Cast(pCreateInfo->surface)->associateSwapchain(swapchain);
4604
4605 return VK_SUCCESS;
4606 }
4607
vkDestroySwapchainKHR(VkDevice device,VkSwapchainKHR swapchain,const VkAllocationCallbacks * pAllocator)4608 VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator)
4609 {
4610 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, const VkAllocationCallbacks* pAllocator = %p)",
4611 device, static_cast<void *>(swapchain), pAllocator);
4612
4613 vk::destroy(swapchain, pAllocator);
4614 }
4615
vkGetSwapchainImagesKHR(VkDevice device,VkSwapchainKHR swapchain,uint32_t * pSwapchainImageCount,VkImage * pSwapchainImages)4616 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages)
4617 {
4618 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint32_t* pSwapchainImageCount = %p, VkImage* pSwapchainImages = %p)",
4619 device, static_cast<void *>(swapchain), pSwapchainImageCount, pSwapchainImages);
4620
4621 if(!pSwapchainImages)
4622 {
4623 *pSwapchainImageCount = vk::Cast(swapchain)->getImageCount();
4624 return VK_SUCCESS;
4625 }
4626
4627 return vk::Cast(swapchain)->getImages(pSwapchainImageCount, pSwapchainImages);
4628 }
4629
vkAcquireNextImageKHR(VkDevice device,VkSwapchainKHR swapchain,uint64_t timeout,VkSemaphore semaphore,VkFence fence,uint32_t * pImageIndex)4630 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex)
4631 {
4632 TRACE("(VkDevice device = %p, VkSwapchainKHR swapchain = %p, uint64_t timeout = %" PRIu64 ", VkSemaphore semaphore = %p, VkFence fence = %p, uint32_t* pImageIndex = %p)",
4633 device, static_cast<void *>(swapchain), timeout, static_cast<void *>(semaphore), static_cast<void *>(fence), pImageIndex);
4634
4635 return vk::Cast(swapchain)->getNextImage(timeout, vk::DynamicCast<vk::BinarySemaphore>(semaphore), vk::Cast(fence), pImageIndex);
4636 }
4637
vkQueuePresentKHR(VkQueue queue,const VkPresentInfoKHR * pPresentInfo)4638 VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(VkQueue queue, const VkPresentInfoKHR *pPresentInfo)
4639 {
4640 TRACE("(VkQueue queue = %p, const VkPresentInfoKHR* pPresentInfo = %p)",
4641 queue, pPresentInfo);
4642
4643 return vk::Cast(queue)->present(pPresentInfo);
4644 }
4645
vkAcquireNextImage2KHR(VkDevice device,const VkAcquireNextImageInfoKHR * pAcquireInfo,uint32_t * pImageIndex)4646 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(VkDevice device, const VkAcquireNextImageInfoKHR *pAcquireInfo, uint32_t *pImageIndex)
4647 {
4648 TRACE("(VkDevice device = %p, const VkAcquireNextImageInfoKHR *pAcquireInfo = %p, uint32_t *pImageIndex = %p",
4649 device, pAcquireInfo, pImageIndex);
4650
4651 return vk::Cast(pAcquireInfo->swapchain)->getNextImage(pAcquireInfo->timeout, vk::DynamicCast<vk::BinarySemaphore>(pAcquireInfo->semaphore), vk::Cast(pAcquireInfo->fence), pImageIndex);
4652 }
4653
vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device,VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities)4654 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR *pDeviceGroupPresentCapabilities)
4655 {
4656 TRACE("(VkDevice device = %p, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities = %p)",
4657 device, pDeviceGroupPresentCapabilities);
4658
4659 for(unsigned int i = 0; i < VK_MAX_DEVICE_GROUP_SIZE; i++)
4660 {
4661 // The only real physical device in the presentation group is device 0,
4662 // and it can present to itself.
4663 pDeviceGroupPresentCapabilities->presentMask[i] = (i == 0) ? 1 : 0;
4664 }
4665
4666 pDeviceGroupPresentCapabilities->modes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4667
4668 return VK_SUCCESS;
4669 }
4670
vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device,VkSurfaceKHR surface,VkDeviceGroupPresentModeFlagsKHR * pModes)4671 VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR *pModes)
4672 {
4673 TRACE("(VkDevice device = %p, VkSurfaceKHR surface = %p, VkDeviceGroupPresentModeFlagsKHR *pModes = %p)",
4674 device, static_cast<void *>(surface), pModes);
4675
4676 *pModes = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR;
4677 return VK_SUCCESS;
4678 }
4679
vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice,VkSurfaceKHR surface,uint32_t * pRectCount,VkRect2D * pRects)4680 VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pRectCount, VkRect2D *pRects)
4681 {
4682 TRACE("(VkPhysicalDevice physicalDevice = %p, VkSurfaceKHR surface = %p, uint32_t* pRectCount = %p, VkRect2D* pRects = %p)",
4683 physicalDevice, static_cast<void *>(surface), pRectCount, pRects);
4684
4685 return vk::Cast(surface)->getPresentRectangles(pRectCount, pRects);
4686 }
4687
4688 #endif // ! __ANDROID__
4689
4690 #ifdef __ANDROID__
4691
vkGetSwapchainGrallocUsage2ANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)4692 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsage2ANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, VkSwapchainImageUsageFlagsANDROID swapchainUsage, uint64_t *grallocConsumerUsage, uint64_t *grallocProducerUsage)
4693 {
4694 TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, VkSwapchainImageUsageFlagsANDROID swapchainUsage = %d, uint64_t* grallocConsumerUsage = %p, uin64_t* grallocProducerUsage = %p)",
4695 device, format, imageUsage, swapchainUsage, grallocConsumerUsage, grallocProducerUsage);
4696
4697 *grallocConsumerUsage = 0;
4698 *grallocProducerUsage = GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN;
4699
4700 return VK_SUCCESS;
4701 }
4702
vkGetSwapchainGrallocUsageANDROID(VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)4703 VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainGrallocUsageANDROID(VkDevice device, VkFormat format, VkImageUsageFlags imageUsage, int *grallocUsage)
4704 {
4705 TRACE("(VkDevice device = %p, VkFormat format = %d, VkImageUsageFlags imageUsage = %d, int* grallocUsage = %p)",
4706 device, format, imageUsage, grallocUsage);
4707
4708 *grallocUsage = GRALLOC_USAGE_SW_WRITE_OFTEN;
4709
4710 return VK_SUCCESS;
4711 }
4712
vkAcquireImageANDROID(VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)4713 VKAPI_ATTR VkResult VKAPI_CALL vkAcquireImageANDROID(VkDevice device, VkImage image, int nativeFenceFd, VkSemaphore semaphore, VkFence fence)
4714 {
4715 TRACE("(VkDevice device = %p, VkImage image = %p, int nativeFenceFd = %d, VkSemaphore semaphore = %p, VkFence fence = %p)",
4716 device, static_cast<void *>(image), nativeFenceFd, static_cast<void *>(semaphore), static_cast<void *>(fence));
4717
4718 if(nativeFenceFd >= 0)
4719 {
4720 sync_wait(nativeFenceFd, -1);
4721 close(nativeFenceFd);
4722 }
4723
4724 if(fence != VK_NULL_HANDLE)
4725 {
4726 vk::Cast(fence)->complete();
4727 }
4728
4729 if(semaphore != VK_NULL_HANDLE)
4730 {
4731 vk::DynamicCast<vk::BinarySemaphore>(semaphore)->signal();
4732 }
4733
4734 return VK_SUCCESS;
4735 }
4736
vkQueueSignalReleaseImageANDROID(VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)4737 VKAPI_ATTR VkResult VKAPI_CALL vkQueueSignalReleaseImageANDROID(VkQueue queue, uint32_t waitSemaphoreCount, const VkSemaphore *pWaitSemaphores, VkImage image, int *pNativeFenceFd)
4738 {
4739 TRACE("(VkQueue queue = %p, uint32_t waitSemaphoreCount = %d, const VkSemaphore* pWaitSemaphores = %p, VkImage image = %p, int* pNativeFenceFd = %p)",
4740 queue, waitSemaphoreCount, pWaitSemaphores, static_cast<void *>(image), pNativeFenceFd);
4741
4742 // This is a hack to deal with screen tearing for now.
4743 // Need to correctly implement threading using VkSemaphore
4744 // to get rid of it. b/132458423
4745 vkQueueWaitIdle(queue);
4746
4747 *pNativeFenceFd = -1;
4748
4749 return vk::Cast(image)->prepareForExternalUseANDROID();
4750 }
4751 #endif // __ANDROID__
4752 }
4753