• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright (c) 2017-2021 Advanced Micro Devices, Inc. All rights reserved.
3 //
4 // Permission is hereby granted, free of charge, to any person obtaining a copy
5 // of this software and associated documentation files (the "Software"), to deal
6 // in the Software without restriction, including without limitation the rights
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 // copies of the Software, and to permit persons to whom the Software is
9 // furnished to do so, subject to the following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included in
12 // all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 // THE SOFTWARE.
21 //
22 
23 #ifdef _WIN32
24 
25 #include "SparseBindingTest.h"
26 #include "Tests.h"
27 #include "VmaUsage.h"
28 #include "Common.h"
29 #include <atomic>
30 #include <Shlwapi.h>
31 
32 #pragma comment(lib, "shlwapi.lib")
33 
34 static const char* const SHADER_PATH1 = "./";
35 static const char* const SHADER_PATH2 = "../bin/";
36 static const wchar_t* const WINDOW_CLASS_NAME = L"VULKAN_MEMORY_ALLOCATOR_SAMPLE";
37 static const char* const VALIDATION_LAYER_NAME = "VK_LAYER_KHRONOS_validation";
38 static const char* const APP_TITLE_A =     "Vulkan Memory Allocator Sample 2.4.0";
39 static const wchar_t* const APP_TITLE_W = L"Vulkan Memory Allocator Sample 2.4.0";
40 
41 static const bool VSYNC = true;
42 static const uint32_t COMMAND_BUFFER_COUNT = 2;
43 static void* const CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA = (void*)(intptr_t)43564544;
44 static const bool USE_CUSTOM_CPU_ALLOCATION_CALLBACKS = true;
45 
46 enum class ExitCode : int
47 {
48     GPUList = 2,
49     Help = 1,
50     Success = 0,
51     RuntimeError = -1,
52     CommandLineError = -2,
53 };
54 
55 VkPhysicalDevice g_hPhysicalDevice;
56 VkDevice g_hDevice;
57 VmaAllocator g_hAllocator;
58 VkInstance g_hVulkanInstance;
59 
60 bool g_EnableValidationLayer = true;
61 bool VK_KHR_get_memory_requirements2_enabled = false;
62 bool VK_KHR_get_physical_device_properties2_enabled = false;
63 bool VK_KHR_dedicated_allocation_enabled = false;
64 bool VK_KHR_bind_memory2_enabled = false;
65 bool VK_EXT_memory_budget_enabled = false;
66 bool VK_AMD_device_coherent_memory_enabled = false;
67 bool VK_KHR_buffer_device_address_enabled = false;
68 bool VK_EXT_memory_priority_enabled = false;
69 bool VK_EXT_debug_utils_enabled = false;
70 bool g_SparseBindingEnabled = false;
71 
72 // # Pointers to functions from extensions
73 PFN_vkGetBufferDeviceAddressKHR g_vkGetBufferDeviceAddressKHR;
74 
75 static HINSTANCE g_hAppInstance;
76 static HWND g_hWnd;
77 static LONG g_SizeX = 1280, g_SizeY = 720;
78 static VkSurfaceKHR g_hSurface;
79 static VkQueue g_hPresentQueue;
80 static VkSurfaceFormatKHR g_SurfaceFormat;
81 static VkExtent2D g_Extent;
82 static VkSwapchainKHR g_hSwapchain;
83 static std::vector<VkImage> g_SwapchainImages;
84 static std::vector<VkImageView> g_SwapchainImageViews;
85 static std::vector<VkFramebuffer> g_Framebuffers;
86 static VkCommandPool g_hCommandPool;
87 static VkCommandBuffer g_MainCommandBuffers[COMMAND_BUFFER_COUNT];
88 static VkFence g_MainCommandBufferExecutedFances[COMMAND_BUFFER_COUNT];
89 VkFence g_ImmediateFence;
90 static uint32_t g_NextCommandBufferIndex;
91 static VkSemaphore g_hImageAvailableSemaphore;
92 static VkSemaphore g_hRenderFinishedSemaphore;
93 static uint32_t g_GraphicsQueueFamilyIndex = UINT_MAX;
94 static uint32_t g_PresentQueueFamilyIndex = UINT_MAX;
95 static uint32_t g_SparseBindingQueueFamilyIndex = UINT_MAX;
96 static VkDescriptorSetLayout g_hDescriptorSetLayout;
97 static VkDescriptorPool g_hDescriptorPool;
98 static VkDescriptorSet g_hDescriptorSet; // Automatically destroyed with m_DescriptorPool.
99 static VkSampler g_hSampler;
100 static VkFormat g_DepthFormat;
101 static VkImage g_hDepthImage;
102 static VmaAllocation g_hDepthImageAlloc;
103 static VkImageView g_hDepthImageView;
104 
105 static VkSurfaceCapabilitiesKHR g_SurfaceCapabilities;
106 static std::vector<VkSurfaceFormatKHR> g_SurfaceFormats;
107 static std::vector<VkPresentModeKHR> g_PresentModes;
108 
109 static const VkDebugUtilsMessageSeverityFlagsEXT DEBUG_UTILS_MESSENGER_MESSAGE_SEVERITY =
110     //VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
111     //VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
112     VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
113     VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
114 static const VkDebugUtilsMessageTypeFlagsEXT DEBUG_UTILS_MESSENGER_MESSAGE_TYPE =
115     VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
116     VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
117     VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
118 static PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT_Func;
119 static PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT_Func;
120 static PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT_Func;
121 
122 static VkQueue g_hGraphicsQueue;
123 VkQueue g_hSparseBindingQueue;
124 VkCommandBuffer g_hTemporaryCommandBuffer;
125 
126 static VkPipelineLayout g_hPipelineLayout;
127 static VkRenderPass g_hRenderPass;
128 static VkPipeline g_hPipeline;
129 
130 static VkBuffer g_hVertexBuffer;
131 static VmaAllocation g_hVertexBufferAlloc;
132 static VkBuffer g_hIndexBuffer;
133 static VmaAllocation g_hIndexBufferAlloc;
134 static uint32_t g_VertexCount;
135 static uint32_t g_IndexCount;
136 
137 static VkImage g_hTextureImage;
138 static VmaAllocation g_hTextureImageAlloc;
139 static VkImageView g_hTextureImageView;
140 
141 static std::atomic_uint32_t g_CpuAllocCount;
142 
CustomCpuAllocation(void * pUserData,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)143 static void* CustomCpuAllocation(
144     void* pUserData, size_t size, size_t alignment,
145     VkSystemAllocationScope allocationScope)
146 {
147     assert(pUserData == CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA);
148     void* const result = _aligned_malloc(size, alignment);
149     if(result)
150     {
151         ++g_CpuAllocCount;
152     }
153     return result;
154 }
155 
CustomCpuReallocation(void * pUserData,void * pOriginal,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)156 static void* CustomCpuReallocation(
157     void* pUserData, void* pOriginal, size_t size, size_t alignment,
158     VkSystemAllocationScope allocationScope)
159 {
160     assert(pUserData == CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA);
161     void* const result = _aligned_realloc(pOriginal, size, alignment);
162     if(pOriginal && !result)
163     {
164         --g_CpuAllocCount;
165     }
166     else if(!pOriginal && result)
167     {
168         ++g_CpuAllocCount;
169     }
170     return result;
171 }
172 
CustomCpuFree(void * pUserData,void * pMemory)173 static void CustomCpuFree(void* pUserData, void* pMemory)
174 {
175     assert(pUserData == CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA);
176     if(pMemory)
177     {
178         const uint32_t oldAllocCount = g_CpuAllocCount.fetch_sub(1);
179         TEST(oldAllocCount > 0);
180         _aligned_free(pMemory);
181     }
182 }
183 
184 static const VkAllocationCallbacks g_CpuAllocationCallbacks = {
185     CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA, // pUserData
186     &CustomCpuAllocation, // pfnAllocation
187     &CustomCpuReallocation, // pfnReallocation
188     &CustomCpuFree // pfnFree
189 };
190 
191 const VkAllocationCallbacks* g_Allocs;
192 
193 struct GPUSelection
194 {
195     uint32_t Index = UINT32_MAX;
196     std::wstring Substring;
197 };
198 
199 class VulkanUsage
200 {
201 public:
202     void Init();
203     ~VulkanUsage();
204     void PrintPhysicalDeviceList() const;
205     // If failed, returns VK_NULL_HANDLE.
206     VkPhysicalDevice SelectPhysicalDevice(const GPUSelection& GPUSelection) const;
207 
208 private:
209     VkDebugUtilsMessengerEXT m_DebugUtilsMessenger = VK_NULL_HANDLE;
210 
211     void RegisterDebugCallbacks();
212     static bool IsLayerSupported(const VkLayerProperties* pProps, size_t propCount, const char* pLayerName);
213 };
214 
215 struct CommandLineParameters
216 {
217     bool m_Help = false;
218     bool m_List = false;
219     GPUSelection m_GPUSelection;
220 
ParseCommandLineParameters221     bool Parse(int argc, wchar_t** argv)
222     {
223         for(int i = 1; i < argc; ++i)
224         {
225             if(_wcsicmp(argv[i], L"-h") == 0 || _wcsicmp(argv[i], L"--Help") == 0)
226             {
227                 m_Help = true;
228             }
229             else if(_wcsicmp(argv[i], L"-l") == 0 || _wcsicmp(argv[i], L"--List") == 0)
230             {
231                 m_List = true;
232             }
233             else if((_wcsicmp(argv[i], L"-g") == 0 || _wcsicmp(argv[i], L"--GPU") == 0) && i + 1 < argc)
234             {
235                 m_GPUSelection.Substring = argv[i + 1];
236                 ++i;
237             }
238             else if((_wcsicmp(argv[i], L"-i") == 0 || _wcsicmp(argv[i], L"--GPUIndex") == 0) && i + 1 < argc)
239             {
240                 m_GPUSelection.Index = _wtoi(argv[i + 1]);
241                 ++i;
242             }
243             else
244                 return false;
245         }
246         return true;
247     }
248 } g_CommandLineParameters;
249 
SetDebugUtilsObjectName(VkObjectType type,uint64_t handle,const char * name)250 void SetDebugUtilsObjectName(VkObjectType type, uint64_t handle, const char* name)
251 {
252     if(vkSetDebugUtilsObjectNameEXT_Func == nullptr)
253         return;
254 
255     VkDebugUtilsObjectNameInfoEXT info = { VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT };
256     info.objectType = type;
257     info.objectHandle = handle;
258     info.pObjectName = name;
259     vkSetDebugUtilsObjectNameEXT_Func(g_hDevice, &info);
260 }
261 
BeginSingleTimeCommands()262 void BeginSingleTimeCommands()
263 {
264     VkCommandBufferBeginInfo cmdBufBeginInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
265     cmdBufBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
266     ERR_GUARD_VULKAN( vkBeginCommandBuffer(g_hTemporaryCommandBuffer, &cmdBufBeginInfo) );
267 }
268 
EndSingleTimeCommands()269 void EndSingleTimeCommands()
270 {
271     ERR_GUARD_VULKAN( vkEndCommandBuffer(g_hTemporaryCommandBuffer) );
272 
273     VkSubmitInfo submitInfo = { VK_STRUCTURE_TYPE_SUBMIT_INFO };
274     submitInfo.commandBufferCount = 1;
275     submitInfo.pCommandBuffers = &g_hTemporaryCommandBuffer;
276 
277     ERR_GUARD_VULKAN( vkQueueSubmit(g_hGraphicsQueue, 1, &submitInfo, VK_NULL_HANDLE) );
278     ERR_GUARD_VULKAN( vkQueueWaitIdle(g_hGraphicsQueue) );
279 }
280 
LoadShader(std::vector<char> & out,const char * fileName)281 void LoadShader(std::vector<char>& out, const char* fileName)
282 {
283     std::ifstream file(std::string(SHADER_PATH1) + fileName, std::ios::ate | std::ios::binary);
284     if(file.is_open() == false)
285         file.open(std::string(SHADER_PATH2) + fileName, std::ios::ate | std::ios::binary);
286     assert(file.is_open());
287     size_t fileSize = (size_t)file.tellg();
288     if(fileSize > 0)
289     {
290         out.resize(fileSize);
291         file.seekg(0);
292         file.read(out.data(), fileSize);
293         file.close();
294     }
295     else
296         out.clear();
297 }
298 
MyDebugReportCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData,void * pUserData)299 static VkBool32 VKAPI_PTR MyDebugReportCallback(
300     VkDebugUtilsMessageSeverityFlagBitsEXT           messageSeverity,
301     VkDebugUtilsMessageTypeFlagsEXT                  messageTypes,
302     const VkDebugUtilsMessengerCallbackDataEXT*      pCallbackData,
303     void*                                            pUserData)
304 {
305     assert(pCallbackData && pCallbackData->pMessageIdName && pCallbackData->pMessage);
306 
307     switch(messageSeverity)
308     {
309     case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
310         SetConsoleColor(CONSOLE_COLOR::WARNING);
311         break;
312     case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
313         SetConsoleColor(CONSOLE_COLOR::ERROR_);
314         break;
315     case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
316         SetConsoleColor(CONSOLE_COLOR::NORMAL);
317         break;
318     default: // VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT
319         SetConsoleColor(CONSOLE_COLOR::INFO);
320     }
321 
322     printf("%s \xBA %s\n", pCallbackData->pMessageIdName, pCallbackData->pMessage);
323 
324     SetConsoleColor(CONSOLE_COLOR::NORMAL);
325 
326     if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT ||
327         messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT)
328     {
329         OutputDebugStringA(pCallbackData->pMessage);
330         OutputDebugStringA("\n");
331     }
332 
333     return VK_FALSE;
334 }
335 
ChooseSurfaceFormat()336 static VkSurfaceFormatKHR ChooseSurfaceFormat()
337 {
338     assert(!g_SurfaceFormats.empty());
339 
340     if((g_SurfaceFormats.size() == 1) && (g_SurfaceFormats[0].format == VK_FORMAT_UNDEFINED))
341     {
342         VkSurfaceFormatKHR result = { VK_FORMAT_B8G8R8A8_UNORM, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR };
343         return result;
344     }
345 
346     for(const auto& format : g_SurfaceFormats)
347     {
348         if((format.format == VK_FORMAT_B8G8R8A8_UNORM) &&
349             (format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR))
350         {
351             return format;
352         }
353     }
354 
355     return g_SurfaceFormats[0];
356 }
357 
ChooseSwapPresentMode()358 VkPresentModeKHR ChooseSwapPresentMode()
359 {
360     VkPresentModeKHR preferredMode = VSYNC ? VK_PRESENT_MODE_MAILBOX_KHR : VK_PRESENT_MODE_IMMEDIATE_KHR;
361 
362     if(std::find(g_PresentModes.begin(), g_PresentModes.end(), preferredMode) !=
363         g_PresentModes.end())
364     {
365         return preferredMode;
366     }
367 
368     return VK_PRESENT_MODE_FIFO_KHR;
369 }
370 
ChooseSwapExtent()371 static VkExtent2D ChooseSwapExtent()
372 {
373     if(g_SurfaceCapabilities.currentExtent.width != UINT_MAX)
374         return g_SurfaceCapabilities.currentExtent;
375 
376     VkExtent2D result = {
377         std::max(g_SurfaceCapabilities.minImageExtent.width,
378             std::min(g_SurfaceCapabilities.maxImageExtent.width, (uint32_t)g_SizeX)),
379         std::max(g_SurfaceCapabilities.minImageExtent.height,
380             std::min(g_SurfaceCapabilities.maxImageExtent.height, (uint32_t)g_SizeY)) };
381     return result;
382 }
383 
GetVulkanApiVersion()384 static constexpr uint32_t GetVulkanApiVersion()
385 {
386 #if VMA_VULKAN_VERSION == 1002000
387     return VK_API_VERSION_1_2;
388 #elif VMA_VULKAN_VERSION == 1001000
389     return VK_API_VERSION_1_1;
390 #elif VMA_VULKAN_VERSION == 1000000
391     return VK_API_VERSION_1_0;
392 #else
393 #error Invalid VMA_VULKAN_VERSION.
394     return UINT32_MAX;
395 #endif
396 }
397 
Init()398 void VulkanUsage::Init()
399 {
400     g_hAppInstance = (HINSTANCE)GetModuleHandle(NULL);
401 
402     if(USE_CUSTOM_CPU_ALLOCATION_CALLBACKS)
403     {
404         g_Allocs = &g_CpuAllocationCallbacks;
405     }
406 
407     uint32_t instanceLayerPropCount = 0;
408     ERR_GUARD_VULKAN( vkEnumerateInstanceLayerProperties(&instanceLayerPropCount, nullptr) );
409     std::vector<VkLayerProperties> instanceLayerProps(instanceLayerPropCount);
410     if(instanceLayerPropCount > 0)
411     {
412         ERR_GUARD_VULKAN( vkEnumerateInstanceLayerProperties(&instanceLayerPropCount, instanceLayerProps.data()) );
413     }
414 
415     if(g_EnableValidationLayer)
416     {
417         if(IsLayerSupported(instanceLayerProps.data(), instanceLayerProps.size(), VALIDATION_LAYER_NAME) == false)
418         {
419             wprintf(L"Layer \"%hs\" not supported.", VALIDATION_LAYER_NAME);
420             g_EnableValidationLayer = false;
421         }
422     }
423 
424     uint32_t availableInstanceExtensionCount = 0;
425     ERR_GUARD_VULKAN( vkEnumerateInstanceExtensionProperties(nullptr, &availableInstanceExtensionCount, nullptr) );
426     std::vector<VkExtensionProperties> availableInstanceExtensions(availableInstanceExtensionCount);
427     if(availableInstanceExtensionCount > 0)
428     {
429         ERR_GUARD_VULKAN( vkEnumerateInstanceExtensionProperties(nullptr, &availableInstanceExtensionCount, availableInstanceExtensions.data()) );
430     }
431 
432     std::vector<const char*> enabledInstanceExtensions;
433     enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
434     enabledInstanceExtensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
435 
436     std::vector<const char*> instanceLayers;
437     if(g_EnableValidationLayer)
438     {
439         instanceLayers.push_back(VALIDATION_LAYER_NAME);
440     }
441 
442     for(const auto& extensionProperties : availableInstanceExtensions)
443     {
444         if(strcmp(extensionProperties.extensionName, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) == 0)
445         {
446             if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
447             {
448                 enabledInstanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
449                 VK_KHR_get_physical_device_properties2_enabled = true;
450             }
451         }
452         else if(strcmp(extensionProperties.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0)
453         {
454             enabledInstanceExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
455             VK_EXT_debug_utils_enabled = true;
456         }
457     }
458 
459     VkApplicationInfo appInfo = { VK_STRUCTURE_TYPE_APPLICATION_INFO };
460     appInfo.pApplicationName = APP_TITLE_A;
461     appInfo.applicationVersion = VK_MAKE_VERSION(1, 0, 0);
462     appInfo.pEngineName = "Adam Sawicki Engine";
463     appInfo.engineVersion = VK_MAKE_VERSION(1, 0, 0);
464     appInfo.apiVersion = GetVulkanApiVersion();
465 
466     VkInstanceCreateInfo instInfo = { VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO };
467     instInfo.pApplicationInfo = &appInfo;
468     instInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
469     instInfo.ppEnabledExtensionNames = enabledInstanceExtensions.data();
470     instInfo.enabledLayerCount = static_cast<uint32_t>(instanceLayers.size());
471     instInfo.ppEnabledLayerNames = instanceLayers.data();
472 
473     wprintf(L"Vulkan API version used: ");
474     switch(appInfo.apiVersion)
475     {
476     case VK_API_VERSION_1_0: wprintf(L"1.0\n"); break;
477     case VK_API_VERSION_1_1: wprintf(L"1.1\n"); break;
478     case VK_API_VERSION_1_2: wprintf(L"1.2\n"); break;
479     default: assert(0);
480     }
481 
482     ERR_GUARD_VULKAN( vkCreateInstance(&instInfo, g_Allocs, &g_hVulkanInstance) );
483 
484     if(VK_EXT_debug_utils_enabled)
485     {
486         RegisterDebugCallbacks();
487     }
488 }
489 
~VulkanUsage()490 VulkanUsage::~VulkanUsage()
491 {
492     if(m_DebugUtilsMessenger)
493     {
494         vkDestroyDebugUtilsMessengerEXT_Func(g_hVulkanInstance, m_DebugUtilsMessenger, g_Allocs);
495     }
496 
497     if(g_hVulkanInstance)
498     {
499         vkDestroyInstance(g_hVulkanInstance, g_Allocs);
500         g_hVulkanInstance = VK_NULL_HANDLE;
501     }
502 }
503 
PrintPhysicalDeviceList() const504 void VulkanUsage::PrintPhysicalDeviceList() const
505 {
506     uint32_t deviceCount = 0;
507     ERR_GUARD_VULKAN(vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, nullptr));
508     std::vector<VkPhysicalDevice> physicalDevices(deviceCount);
509     if(deviceCount > 0)
510     {
511         ERR_GUARD_VULKAN(vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, physicalDevices.data()));
512     }
513 
514     for(size_t i = 0; i < deviceCount; ++i)
515     {
516         VkPhysicalDeviceProperties props = {};
517         vkGetPhysicalDeviceProperties(physicalDevices[i], &props);
518         wprintf(L"Physical device %zu: %hs\n", i, props.deviceName);
519     }
520 }
521 
SelectPhysicalDevice(const GPUSelection & GPUSelection) const522 VkPhysicalDevice VulkanUsage::SelectPhysicalDevice(const GPUSelection& GPUSelection) const
523 {
524     uint32_t deviceCount = 0;
525     ERR_GUARD_VULKAN(vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, nullptr));
526     std::vector<VkPhysicalDevice> physicalDevices(deviceCount);
527     if(deviceCount > 0)
528     {
529         ERR_GUARD_VULKAN(vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, physicalDevices.data()));
530     }
531 
532     if(GPUSelection.Index != UINT32_MAX)
533     {
534         // Cannot specify both index and name.
535         if(!GPUSelection.Substring.empty())
536         {
537             return VK_NULL_HANDLE;
538         }
539 
540         return GPUSelection.Index < deviceCount ? physicalDevices[GPUSelection.Index] : VK_NULL_HANDLE;
541     }
542 
543     if(!GPUSelection.Substring.empty())
544     {
545         VkPhysicalDevice result = VK_NULL_HANDLE;
546         std::wstring name;
547         for(uint32_t i = 0; i < deviceCount; ++i)
548         {
549             VkPhysicalDeviceProperties props = {};
550             vkGetPhysicalDeviceProperties(physicalDevices[i], &props);
551             if(ConvertCharsToUnicode(&name, props.deviceName, strlen(props.deviceName), CP_UTF8) &&
552                 StrStrI(name.c_str(), GPUSelection.Substring.c_str()))
553             {
554                 // Second matching device found - error.
555                 if(result != VK_NULL_HANDLE)
556                 {
557                     return VK_NULL_HANDLE;
558                 }
559                 // First matching device found.
560                 result = physicalDevices[i];
561             }
562         }
563         // Found or not, return it.
564         return result;
565     }
566 
567     // Select first one.
568     return deviceCount > 0 ? physicalDevices[0] : VK_NULL_HANDLE;
569 }
570 
RegisterDebugCallbacks()571 void VulkanUsage::RegisterDebugCallbacks()
572 {
573     vkCreateDebugUtilsMessengerEXT_Func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(
574         g_hVulkanInstance, "vkCreateDebugUtilsMessengerEXT");
575     vkDestroyDebugUtilsMessengerEXT_Func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(
576         g_hVulkanInstance, "vkDestroyDebugUtilsMessengerEXT");
577     vkSetDebugUtilsObjectNameEXT_Func = (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(
578         g_hVulkanInstance, "vkSetDebugUtilsObjectNameEXT");
579     assert(vkCreateDebugUtilsMessengerEXT_Func);
580     assert(vkDestroyDebugUtilsMessengerEXT_Func);
581     assert(vkSetDebugUtilsObjectNameEXT_Func);
582 
583     VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = { VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT };
584     messengerCreateInfo.messageSeverity = DEBUG_UTILS_MESSENGER_MESSAGE_SEVERITY;
585     messengerCreateInfo.messageType = DEBUG_UTILS_MESSENGER_MESSAGE_TYPE;
586     messengerCreateInfo.pfnUserCallback = MyDebugReportCallback;
587     ERR_GUARD_VULKAN( vkCreateDebugUtilsMessengerEXT_Func(g_hVulkanInstance, &messengerCreateInfo, g_Allocs, &m_DebugUtilsMessenger) );
588 }
589 
IsLayerSupported(const VkLayerProperties * pProps,size_t propCount,const char * pLayerName)590 bool VulkanUsage::IsLayerSupported(const VkLayerProperties* pProps, size_t propCount, const char* pLayerName)
591 {
592     const VkLayerProperties* propsEnd = pProps + propCount;
593     return std::find_if(
594         pProps,
595         propsEnd,
596         [pLayerName](const VkLayerProperties& prop) -> bool {
597         return strcmp(pLayerName, prop.layerName) == 0;
598     }) != propsEnd;
599 }
600 
601 struct Vertex
602 {
603     float pos[3];
604     float color[3];
605     float texCoord[2];
606 };
607 
CreateMesh()608 static void CreateMesh()
609 {
610     assert(g_hAllocator);
611 
612     static Vertex vertices[] = {
613         // -X
614         { { -1.f, -1.f, -1.f}, {1.0f, 0.0f, 0.0f}, {0.f, 0.f} },
615         { { -1.f, -1.f,  1.f}, {1.0f, 0.0f, 0.0f}, {1.f, 0.f} },
616         { { -1.f,  1.f, -1.f}, {1.0f, 0.0f, 0.0f}, {0.f, 1.f} },
617         { { -1.f,  1.f,  1.f}, {1.0f, 0.0f, 0.0f}, {1.f, 1.f} },
618         // +X
619         { { 1.f, -1.f,  1.f}, {0.0f, 1.0f, 0.0f}, {0.f, 0.f} },
620         { { 1.f, -1.f, -1.f}, {0.0f, 1.0f, 0.0f}, {1.f, 0.f} },
621         { { 1.f,  1.f,  1.f}, {0.0f, 1.0f, 0.0f}, {0.f, 1.f} },
622         { { 1.f,  1.f, -1.f}, {0.0f, 1.0f, 0.0f}, {1.f, 1.f} },
623         // -Z
624         { { 1.f, -1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {0.f, 0.f} },
625         { {-1.f, -1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {1.f, 0.f} },
626         { { 1.f,  1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {0.f, 1.f} },
627         { {-1.f,  1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {1.f, 1.f} },
628         // +Z
629         { {-1.f, -1.f,  1.f}, {1.0f, 1.0f, 0.0f}, {0.f, 0.f} },
630         { { 1.f, -1.f,  1.f}, {1.0f, 1.0f, 0.0f}, {1.f, 0.f} },
631         { {-1.f,  1.f,  1.f}, {1.0f, 1.0f, 0.0f}, {0.f, 1.f} },
632         { { 1.f,  1.f,  1.f}, {1.0f, 1.0f, 0.0f}, {1.f, 1.f} },
633         // -Y
634         { {-1.f, -1.f, -1.f}, {0.0f, 1.0f, 1.0f}, {0.f, 0.f} },
635         { { 1.f, -1.f, -1.f}, {0.0f, 1.0f, 1.0f}, {1.f, 0.f} },
636         { {-1.f, -1.f,  1.f}, {0.0f, 1.0f, 1.0f}, {0.f, 1.f} },
637         { { 1.f, -1.f,  1.f}, {0.0f, 1.0f, 1.0f}, {1.f, 1.f} },
638         // +Y
639         { { 1.f,  1.f, -1.f}, {1.0f, 0.0f, 1.0f}, {0.f, 0.f} },
640         { {-1.f,  1.f, -1.f}, {1.0f, 0.0f, 1.0f}, {1.f, 0.f} },
641         { { 1.f,  1.f,  1.f}, {1.0f, 0.0f, 1.0f}, {0.f, 1.f} },
642         { {-1.f,  1.f,  1.f}, {1.0f, 0.0f, 1.0f}, {1.f, 1.f} },
643     };
644     static uint16_t indices[] = {
645          0,  1,  2,  3, USHRT_MAX,
646          4,  5,  6,  7, USHRT_MAX,
647          8,  9, 10, 11, USHRT_MAX,
648         12, 13, 14, 15, USHRT_MAX,
649         16, 17, 18, 19, USHRT_MAX,
650         20, 21, 22, 23, USHRT_MAX,
651     };
652 
653     size_t vertexBufferSize = sizeof(Vertex) * _countof(vertices);
654     size_t indexBufferSize = sizeof(uint16_t) * _countof(indices);
655     g_IndexCount = (uint32_t)_countof(indices);
656 
657     // Create vertex buffer
658 
659     VkBufferCreateInfo vbInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
660     vbInfo.size = vertexBufferSize;
661     vbInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
662     vbInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
663 
664     VmaAllocationCreateInfo vbAllocCreateInfo = {};
665     vbAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
666     vbAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
667 
668     VkBuffer stagingVertexBuffer = VK_NULL_HANDLE;
669     VmaAllocation stagingVertexBufferAlloc = VK_NULL_HANDLE;
670     VmaAllocationInfo stagingVertexBufferAllocInfo = {};
671     ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &vbInfo, &vbAllocCreateInfo, &stagingVertexBuffer, &stagingVertexBufferAlloc, &stagingVertexBufferAllocInfo) );
672 
673     memcpy(stagingVertexBufferAllocInfo.pMappedData, vertices, vertexBufferSize);
674 
675     // No need to flush stagingVertexBuffer memory because CPU_ONLY memory is always HOST_COHERENT.
676 
677     vbInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
678     vbAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
679     vbAllocCreateInfo.flags = 0;
680     ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &vbInfo, &vbAllocCreateInfo, &g_hVertexBuffer, &g_hVertexBufferAlloc, nullptr) );
681 
682     // Create index buffer
683 
684     VkBufferCreateInfo ibInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
685     ibInfo.size = indexBufferSize;
686     ibInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
687     ibInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
688 
689     VmaAllocationCreateInfo ibAllocCreateInfo = {};
690     ibAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
691     ibAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
692 
693     VkBuffer stagingIndexBuffer = VK_NULL_HANDLE;
694     VmaAllocation stagingIndexBufferAlloc = VK_NULL_HANDLE;
695     VmaAllocationInfo stagingIndexBufferAllocInfo = {};
696     ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &ibInfo, &ibAllocCreateInfo, &stagingIndexBuffer, &stagingIndexBufferAlloc, &stagingIndexBufferAllocInfo) );
697 
698     memcpy(stagingIndexBufferAllocInfo.pMappedData, indices, indexBufferSize);
699 
700     // No need to flush stagingIndexBuffer memory because CPU_ONLY memory is always HOST_COHERENT.
701 
702     ibInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
703     ibAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
704     ibAllocCreateInfo.flags = 0;
705     ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &ibInfo, &ibAllocCreateInfo, &g_hIndexBuffer, &g_hIndexBufferAlloc, nullptr) );
706 
707     // Copy buffers
708 
709     BeginSingleTimeCommands();
710 
711     VkBufferCopy vbCopyRegion = {};
712     vbCopyRegion.srcOffset = 0;
713     vbCopyRegion.dstOffset = 0;
714     vbCopyRegion.size = vbInfo.size;
715     vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingVertexBuffer, g_hVertexBuffer, 1, &vbCopyRegion);
716 
717     VkBufferCopy ibCopyRegion = {};
718     ibCopyRegion.srcOffset = 0;
719     ibCopyRegion.dstOffset = 0;
720     ibCopyRegion.size = ibInfo.size;
721     vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingIndexBuffer, g_hIndexBuffer, 1, &ibCopyRegion);
722 
723     EndSingleTimeCommands();
724 
725     vmaDestroyBuffer(g_hAllocator, stagingIndexBuffer, stagingIndexBufferAlloc);
726     vmaDestroyBuffer(g_hAllocator, stagingVertexBuffer, stagingVertexBufferAlloc);
727 }
728 
CreateTexture(uint32_t sizeX,uint32_t sizeY)729 static void CreateTexture(uint32_t sizeX, uint32_t sizeY)
730 {
731     // Create staging buffer.
732 
733     const VkDeviceSize imageSize = sizeX * sizeY * 4;
734 
735     VkBufferCreateInfo stagingBufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
736     stagingBufInfo.size = imageSize;
737     stagingBufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
738 
739     VmaAllocationCreateInfo stagingBufAllocCreateInfo = {};
740     stagingBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
741     stagingBufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
742 
743     VkBuffer stagingBuf = VK_NULL_HANDLE;
744     VmaAllocation stagingBufAlloc = VK_NULL_HANDLE;
745     VmaAllocationInfo stagingBufAllocInfo = {};
746     ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &stagingBufInfo, &stagingBufAllocCreateInfo, &stagingBuf, &stagingBufAlloc, &stagingBufAllocInfo) );
747 
748     char* const pImageData = (char*)stagingBufAllocInfo.pMappedData;
749     uint8_t* pRowData = (uint8_t*)pImageData;
750     for(uint32_t y = 0; y < sizeY; ++y)
751     {
752         uint32_t* pPixelData = (uint32_t*)pRowData;
753         for(uint32_t x = 0; x < sizeY; ++x)
754         {
755             *pPixelData =
756                 ((x & 0x18) == 0x08 ? 0x000000FF : 0x00000000) |
757                 ((x & 0x18) == 0x10 ? 0x0000FFFF : 0x00000000) |
758                 ((y & 0x18) == 0x08 ? 0x0000FF00 : 0x00000000) |
759                 ((y & 0x18) == 0x10 ? 0x00FF0000 : 0x00000000);
760             ++pPixelData;
761         }
762         pRowData += sizeX * 4;
763     }
764 
765     // No need to flush stagingImage memory because CPU_ONLY memory is always HOST_COHERENT.
766 
767     // Create g_hTextureImage in GPU memory.
768 
769     VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
770     imageInfo.imageType = VK_IMAGE_TYPE_2D;
771     imageInfo.extent.width = sizeX;
772     imageInfo.extent.height = sizeY;
773     imageInfo.extent.depth = 1;
774     imageInfo.mipLevels = 1;
775     imageInfo.arrayLayers = 1;
776     imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
777     imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
778     imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
779     imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
780     imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
781     imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
782     imageInfo.flags = 0;
783 
784     VmaAllocationCreateInfo imageAllocCreateInfo = {};
785     imageAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
786 
787     ERR_GUARD_VULKAN( vmaCreateImage(g_hAllocator, &imageInfo, &imageAllocCreateInfo, &g_hTextureImage, &g_hTextureImageAlloc, nullptr) );
788 
789     // Transition image layouts, copy image.
790 
791     BeginSingleTimeCommands();
792 
793     VkImageMemoryBarrier imgMemBarrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
794     imgMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
795     imgMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
796     imgMemBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
797     imgMemBarrier.subresourceRange.baseMipLevel = 0;
798     imgMemBarrier.subresourceRange.levelCount = 1;
799     imgMemBarrier.subresourceRange.baseArrayLayer = 0;
800     imgMemBarrier.subresourceRange.layerCount = 1;
801     imgMemBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
802     imgMemBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
803     imgMemBarrier.image = g_hTextureImage;
804     imgMemBarrier.srcAccessMask = 0;
805     imgMemBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
806 
807     vkCmdPipelineBarrier(
808         g_hTemporaryCommandBuffer,
809         VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
810         VK_PIPELINE_STAGE_TRANSFER_BIT,
811         0,
812         0, nullptr,
813         0, nullptr,
814         1, &imgMemBarrier);
815 
816     VkBufferImageCopy region = {};
817     region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
818     region.imageSubresource.layerCount = 1;
819     region.imageExtent.width = sizeX;
820     region.imageExtent.height = sizeY;
821     region.imageExtent.depth = 1;
822 
823     vkCmdCopyBufferToImage(g_hTemporaryCommandBuffer, stagingBuf, g_hTextureImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
824 
825     imgMemBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
826     imgMemBarrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
827     imgMemBarrier.image = g_hTextureImage;
828     imgMemBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
829     imgMemBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
830 
831     vkCmdPipelineBarrier(
832         g_hTemporaryCommandBuffer,
833         VK_PIPELINE_STAGE_TRANSFER_BIT,
834         VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
835         0,
836         0, nullptr,
837         0, nullptr,
838         1, &imgMemBarrier);
839 
840     EndSingleTimeCommands();
841 
842     vmaDestroyBuffer(g_hAllocator, stagingBuf, stagingBufAlloc);
843 
844     // Create ImageView
845 
846     VkImageViewCreateInfo textureImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
847     textureImageViewInfo.image = g_hTextureImage;
848     textureImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
849     textureImageViewInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
850     textureImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
851     textureImageViewInfo.subresourceRange.baseMipLevel = 0;
852     textureImageViewInfo.subresourceRange.levelCount = 1;
853     textureImageViewInfo.subresourceRange.baseArrayLayer = 0;
854     textureImageViewInfo.subresourceRange.layerCount = 1;
855     ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &textureImageViewInfo, g_Allocs, &g_hTextureImageView) );
856 }
857 
858 struct UniformBufferObject
859 {
860     mat4 ModelViewProj;
861 };
862 
FindSupportedFormat(const std::vector<VkFormat> & candidates,VkImageTiling tiling,VkFormatFeatureFlags features)863 static VkFormat FindSupportedFormat(
864     const std::vector<VkFormat>& candidates,
865     VkImageTiling tiling,
866     VkFormatFeatureFlags features)
867 {
868     for (VkFormat format : candidates)
869     {
870         VkFormatProperties props;
871         vkGetPhysicalDeviceFormatProperties(g_hPhysicalDevice, format, &props);
872 
873         if ((tiling == VK_IMAGE_TILING_LINEAR) &&
874             ((props.linearTilingFeatures & features) == features))
875         {
876             return format;
877         }
878         else if ((tiling == VK_IMAGE_TILING_OPTIMAL) &&
879             ((props.optimalTilingFeatures & features) == features))
880         {
881             return format;
882         }
883     }
884     return VK_FORMAT_UNDEFINED;
885 }
886 
FindDepthFormat()887 static VkFormat FindDepthFormat()
888 {
889     std::vector<VkFormat> formats;
890     formats.push_back(VK_FORMAT_D32_SFLOAT);
891     formats.push_back(VK_FORMAT_D32_SFLOAT_S8_UINT);
892     formats.push_back(VK_FORMAT_D24_UNORM_S8_UINT);
893 
894     return FindSupportedFormat(
895         formats,
896         VK_IMAGE_TILING_OPTIMAL,
897         VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT);
898 }
899 
CreateSwapchain()900 static void CreateSwapchain()
901 {
902     // Query surface formats.
903 
904     ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceCapabilitiesKHR(g_hPhysicalDevice, g_hSurface, &g_SurfaceCapabilities) );
905 
906     uint32_t formatCount = 0;
907     ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceFormatsKHR(g_hPhysicalDevice, g_hSurface, &formatCount, nullptr) );
908     g_SurfaceFormats.resize(formatCount);
909     ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceFormatsKHR(g_hPhysicalDevice, g_hSurface, &formatCount, g_SurfaceFormats.data()) );
910 
911     uint32_t presentModeCount = 0;
912     ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfacePresentModesKHR(g_hPhysicalDevice, g_hSurface, &presentModeCount, nullptr) );
913     g_PresentModes.resize(presentModeCount);
914     ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfacePresentModesKHR(g_hPhysicalDevice, g_hSurface, &presentModeCount, g_PresentModes.data()) );
915 
916     // Create swap chain
917 
918     g_SurfaceFormat = ChooseSurfaceFormat();
919     VkPresentModeKHR presentMode = ChooseSwapPresentMode();
920     g_Extent = ChooseSwapExtent();
921 
922     uint32_t imageCount = g_SurfaceCapabilities.minImageCount + 1;
923     if((g_SurfaceCapabilities.maxImageCount > 0) &&
924         (imageCount > g_SurfaceCapabilities.maxImageCount))
925     {
926         imageCount = g_SurfaceCapabilities.maxImageCount;
927     }
928 
929     VkSwapchainCreateInfoKHR swapChainInfo = { VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
930     swapChainInfo.surface = g_hSurface;
931     swapChainInfo.minImageCount = imageCount;
932     swapChainInfo.imageFormat = g_SurfaceFormat.format;
933     swapChainInfo.imageColorSpace = g_SurfaceFormat.colorSpace;
934     swapChainInfo.imageExtent = g_Extent;
935     swapChainInfo.imageArrayLayers = 1;
936     swapChainInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
937     swapChainInfo.preTransform = g_SurfaceCapabilities.currentTransform;
938     swapChainInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
939     swapChainInfo.presentMode = presentMode;
940     swapChainInfo.clipped = VK_TRUE;
941     swapChainInfo.oldSwapchain = g_hSwapchain;
942 
943     uint32_t queueFamilyIndices[] = { g_GraphicsQueueFamilyIndex, g_PresentQueueFamilyIndex };
944     if(g_PresentQueueFamilyIndex != g_GraphicsQueueFamilyIndex)
945     {
946         swapChainInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
947         swapChainInfo.queueFamilyIndexCount = 2;
948         swapChainInfo.pQueueFamilyIndices = queueFamilyIndices;
949     }
950     else
951     {
952         swapChainInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
953     }
954 
955     VkSwapchainKHR hNewSwapchain = VK_NULL_HANDLE;
956     ERR_GUARD_VULKAN( vkCreateSwapchainKHR(g_hDevice, &swapChainInfo, g_Allocs, &hNewSwapchain) );
957     if(g_hSwapchain != VK_NULL_HANDLE)
958         vkDestroySwapchainKHR(g_hDevice, g_hSwapchain, g_Allocs);
959     g_hSwapchain = hNewSwapchain;
960 
961     // Retrieve swapchain images.
962 
963     uint32_t swapchainImageCount = 0;
964     ERR_GUARD_VULKAN( vkGetSwapchainImagesKHR(g_hDevice, g_hSwapchain, &swapchainImageCount, nullptr) );
965     g_SwapchainImages.resize(swapchainImageCount);
966     ERR_GUARD_VULKAN( vkGetSwapchainImagesKHR(g_hDevice, g_hSwapchain, &swapchainImageCount, g_SwapchainImages.data()) );
967 
968     // Create swapchain image views.
969 
970     for(size_t i = g_SwapchainImageViews.size(); i--; )
971         vkDestroyImageView(g_hDevice, g_SwapchainImageViews[i], g_Allocs);
972     g_SwapchainImageViews.clear();
973 
974     VkImageViewCreateInfo swapchainImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
975     g_SwapchainImageViews.resize(swapchainImageCount);
976     for(uint32_t i = 0; i < swapchainImageCount; ++i)
977     {
978         swapchainImageViewInfo.image = g_SwapchainImages[i];
979         swapchainImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
980         swapchainImageViewInfo.format = g_SurfaceFormat.format;
981         swapchainImageViewInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
982         swapchainImageViewInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
983         swapchainImageViewInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
984         swapchainImageViewInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
985         swapchainImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
986         swapchainImageViewInfo.subresourceRange.baseMipLevel = 0;
987         swapchainImageViewInfo.subresourceRange.levelCount = 1;
988         swapchainImageViewInfo.subresourceRange.baseArrayLayer = 0;
989         swapchainImageViewInfo.subresourceRange.layerCount = 1;
990         ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &swapchainImageViewInfo, g_Allocs, &g_SwapchainImageViews[i]) );
991     }
992 
993     // Create depth buffer
994 
995     g_DepthFormat = FindDepthFormat();
996     assert(g_DepthFormat != VK_FORMAT_UNDEFINED);
997 
998     VkImageCreateInfo depthImageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
999     depthImageInfo.imageType = VK_IMAGE_TYPE_2D;
1000     depthImageInfo.extent.width = g_Extent.width;
1001     depthImageInfo.extent.height = g_Extent.height;
1002     depthImageInfo.extent.depth = 1;
1003     depthImageInfo.mipLevels = 1;
1004     depthImageInfo.arrayLayers = 1;
1005     depthImageInfo.format = g_DepthFormat;
1006     depthImageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1007     depthImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1008     depthImageInfo.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
1009     depthImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
1010     depthImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1011     depthImageInfo.flags = 0;
1012 
1013     VmaAllocationCreateInfo depthImageAllocCreateInfo = {};
1014     depthImageAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1015 
1016     ERR_GUARD_VULKAN( vmaCreateImage(g_hAllocator, &depthImageInfo, &depthImageAllocCreateInfo, &g_hDepthImage, &g_hDepthImageAlloc, nullptr) );
1017 
1018     VkImageViewCreateInfo depthImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
1019     depthImageViewInfo.image = g_hDepthImage;
1020     depthImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
1021     depthImageViewInfo.format = g_DepthFormat;
1022     depthImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1023     depthImageViewInfo.subresourceRange.baseMipLevel = 0;
1024     depthImageViewInfo.subresourceRange.levelCount = 1;
1025     depthImageViewInfo.subresourceRange.baseArrayLayer = 0;
1026     depthImageViewInfo.subresourceRange.layerCount = 1;
1027 
1028     ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &depthImageViewInfo, g_Allocs, &g_hDepthImageView) );
1029 
1030     // Create pipeline layout
1031     {
1032         if(g_hPipelineLayout != VK_NULL_HANDLE)
1033         {
1034             vkDestroyPipelineLayout(g_hDevice, g_hPipelineLayout, g_Allocs);
1035             g_hPipelineLayout = VK_NULL_HANDLE;
1036         }
1037 
1038         VkPushConstantRange pushConstantRanges[1];
1039         ZeroMemory(&pushConstantRanges, sizeof pushConstantRanges);
1040         pushConstantRanges[0].offset = 0;
1041         pushConstantRanges[0].size = sizeof(UniformBufferObject);
1042         pushConstantRanges[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
1043 
1044         VkDescriptorSetLayout descriptorSetLayouts[] = { g_hDescriptorSetLayout };
1045         VkPipelineLayoutCreateInfo pipelineLayoutInfo = { VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO };
1046         pipelineLayoutInfo.setLayoutCount = 1;
1047         pipelineLayoutInfo.pSetLayouts = descriptorSetLayouts;
1048         pipelineLayoutInfo.pushConstantRangeCount = 1;
1049         pipelineLayoutInfo.pPushConstantRanges = pushConstantRanges;
1050         ERR_GUARD_VULKAN( vkCreatePipelineLayout(g_hDevice, &pipelineLayoutInfo, g_Allocs, &g_hPipelineLayout) );
1051     }
1052 
1053     // Create render pass
1054     {
1055         if(g_hRenderPass != VK_NULL_HANDLE)
1056         {
1057             vkDestroyRenderPass(g_hDevice, g_hRenderPass, g_Allocs);
1058             g_hRenderPass = VK_NULL_HANDLE;
1059         }
1060 
1061         VkAttachmentDescription attachments[2];
1062         ZeroMemory(attachments, sizeof(attachments));
1063 
1064         attachments[0].format = g_SurfaceFormat.format;
1065         attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
1066         attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
1067         attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
1068         attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
1069         attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1070         attachments[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1071         attachments[0].finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
1072 
1073         attachments[1].format = g_DepthFormat;
1074         attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
1075         attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
1076         attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1077         attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
1078         attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1079         attachments[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1080         attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1081 
1082         VkAttachmentReference colorAttachmentRef = {};
1083         colorAttachmentRef.attachment = 0;
1084         colorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1085 
1086         VkAttachmentReference depthStencilAttachmentRef = {};
1087         depthStencilAttachmentRef.attachment = 1;
1088         depthStencilAttachmentRef.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1089 
1090         VkSubpassDescription subpassDesc = {};
1091         subpassDesc.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
1092         subpassDesc.colorAttachmentCount = 1;
1093         subpassDesc.pColorAttachments = &colorAttachmentRef;
1094         subpassDesc.pDepthStencilAttachment = &depthStencilAttachmentRef;
1095 
1096         VkRenderPassCreateInfo renderPassInfo = { VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO };
1097         renderPassInfo.attachmentCount = (uint32_t)_countof(attachments);
1098         renderPassInfo.pAttachments = attachments;
1099         renderPassInfo.subpassCount = 1;
1100         renderPassInfo.pSubpasses = &subpassDesc;
1101         renderPassInfo.dependencyCount = 0;
1102         ERR_GUARD_VULKAN( vkCreateRenderPass(g_hDevice, &renderPassInfo, g_Allocs, &g_hRenderPass) );
1103     }
1104 
1105     // Create pipeline
1106     {
1107         std::vector<char> vertShaderCode;
1108         LoadShader(vertShaderCode, "Shader.vert.spv");
1109         VkShaderModuleCreateInfo shaderModuleInfo = { VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO };
1110         shaderModuleInfo.codeSize = vertShaderCode.size();
1111         shaderModuleInfo.pCode = (const uint32_t*)vertShaderCode.data();
1112         VkShaderModule hVertShaderModule = VK_NULL_HANDLE;
1113         ERR_GUARD_VULKAN( vkCreateShaderModule(g_hDevice, &shaderModuleInfo, g_Allocs, &hVertShaderModule) );
1114 
1115         std::vector<char> hFragShaderCode;
1116         LoadShader(hFragShaderCode, "Shader.frag.spv");
1117         shaderModuleInfo.codeSize = hFragShaderCode.size();
1118         shaderModuleInfo.pCode = (const uint32_t*)hFragShaderCode.data();
1119         VkShaderModule fragShaderModule = VK_NULL_HANDLE;
1120         ERR_GUARD_VULKAN( vkCreateShaderModule(g_hDevice, &shaderModuleInfo, g_Allocs, &fragShaderModule) );
1121 
1122         VkPipelineShaderStageCreateInfo vertPipelineShaderStageInfo = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO };
1123         vertPipelineShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
1124         vertPipelineShaderStageInfo.module = hVertShaderModule;
1125         vertPipelineShaderStageInfo.pName = "main";
1126 
1127         VkPipelineShaderStageCreateInfo fragPipelineShaderStageInfo = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO };
1128         fragPipelineShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
1129         fragPipelineShaderStageInfo.module = fragShaderModule;
1130         fragPipelineShaderStageInfo.pName = "main";
1131 
1132         VkPipelineShaderStageCreateInfo pipelineShaderStageInfos[] = {
1133             vertPipelineShaderStageInfo,
1134             fragPipelineShaderStageInfo
1135         };
1136 
1137         VkVertexInputBindingDescription bindingDescription = {};
1138         bindingDescription.binding = 0;
1139         bindingDescription.stride = sizeof(Vertex);
1140         bindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1141 
1142         VkVertexInputAttributeDescription attributeDescriptions[3];
1143         ZeroMemory(attributeDescriptions, sizeof(attributeDescriptions));
1144 
1145         attributeDescriptions[0].binding = 0;
1146         attributeDescriptions[0].location = 0;
1147         attributeDescriptions[0].format = VK_FORMAT_R32G32B32_SFLOAT;
1148         attributeDescriptions[0].offset = offsetof(Vertex, pos);
1149 
1150         attributeDescriptions[1].binding = 0;
1151         attributeDescriptions[1].location = 1;
1152         attributeDescriptions[1].format = VK_FORMAT_R32G32B32_SFLOAT;
1153         attributeDescriptions[1].offset = offsetof(Vertex, color);
1154 
1155         attributeDescriptions[2].binding = 0;
1156         attributeDescriptions[2].location = 2;
1157         attributeDescriptions[2].format = VK_FORMAT_R32G32_SFLOAT;
1158         attributeDescriptions[2].offset = offsetof(Vertex, texCoord);
1159 
1160         VkPipelineVertexInputStateCreateInfo pipelineVertexInputStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO };
1161         pipelineVertexInputStateInfo.vertexBindingDescriptionCount = 1;
1162         pipelineVertexInputStateInfo.pVertexBindingDescriptions = &bindingDescription;
1163         pipelineVertexInputStateInfo.vertexAttributeDescriptionCount = _countof(attributeDescriptions);
1164         pipelineVertexInputStateInfo.pVertexAttributeDescriptions = attributeDescriptions;
1165 
1166         VkPipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO };
1167         pipelineInputAssemblyStateInfo.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
1168         pipelineInputAssemblyStateInfo.primitiveRestartEnable = VK_TRUE;
1169 
1170         VkViewport viewport = {};
1171         viewport.x = 0.f;
1172         viewport.y = 0.f;
1173         viewport.width = (float)g_Extent.width;
1174         viewport.height = (float)g_Extent.height;
1175         viewport.minDepth = 0.f;
1176         viewport.maxDepth = 1.f;
1177 
1178         VkRect2D scissor = {};
1179         scissor.offset.x = 0;
1180         scissor.offset.y = 0;
1181         scissor.extent = g_Extent;
1182 
1183         VkPipelineViewportStateCreateInfo pipelineViewportStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO };
1184         pipelineViewportStateInfo.viewportCount = 1;
1185         pipelineViewportStateInfo.pViewports = &viewport;
1186         pipelineViewportStateInfo.scissorCount = 1;
1187         pipelineViewportStateInfo.pScissors = &scissor;
1188 
1189         VkPipelineRasterizationStateCreateInfo pipelineRasterizationStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO };
1190         pipelineRasterizationStateInfo.depthClampEnable = VK_FALSE;
1191         pipelineRasterizationStateInfo.rasterizerDiscardEnable = VK_FALSE;
1192         pipelineRasterizationStateInfo.polygonMode = VK_POLYGON_MODE_FILL;
1193         pipelineRasterizationStateInfo.lineWidth = 1.f;
1194         pipelineRasterizationStateInfo.cullMode = VK_CULL_MODE_BACK_BIT;
1195         pipelineRasterizationStateInfo.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
1196         pipelineRasterizationStateInfo.depthBiasEnable = VK_FALSE;
1197         pipelineRasterizationStateInfo.depthBiasConstantFactor = 0.f;
1198         pipelineRasterizationStateInfo.depthBiasClamp = 0.f;
1199         pipelineRasterizationStateInfo.depthBiasSlopeFactor = 0.f;
1200 
1201         VkPipelineMultisampleStateCreateInfo pipelineMultisampleStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO };
1202         pipelineMultisampleStateInfo.sampleShadingEnable = VK_FALSE;
1203         pipelineMultisampleStateInfo.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
1204         pipelineMultisampleStateInfo.minSampleShading = 1.f;
1205         pipelineMultisampleStateInfo.pSampleMask = nullptr;
1206         pipelineMultisampleStateInfo.alphaToCoverageEnable = VK_FALSE;
1207         pipelineMultisampleStateInfo.alphaToOneEnable = VK_FALSE;
1208 
1209         VkPipelineColorBlendAttachmentState pipelineColorBlendAttachmentState = {};
1210         pipelineColorBlendAttachmentState.colorWriteMask =
1211             VK_COLOR_COMPONENT_R_BIT |
1212             VK_COLOR_COMPONENT_G_BIT |
1213             VK_COLOR_COMPONENT_B_BIT |
1214             VK_COLOR_COMPONENT_A_BIT;
1215         pipelineColorBlendAttachmentState.blendEnable = VK_FALSE;
1216         pipelineColorBlendAttachmentState.srcColorBlendFactor = VK_BLEND_FACTOR_ONE; // Optional
1217         pipelineColorBlendAttachmentState.dstColorBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional
1218         pipelineColorBlendAttachmentState.colorBlendOp = VK_BLEND_OP_ADD; // Optional
1219         pipelineColorBlendAttachmentState.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE; // Optional
1220         pipelineColorBlendAttachmentState.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional
1221         pipelineColorBlendAttachmentState.alphaBlendOp = VK_BLEND_OP_ADD; // Optional
1222 
1223         VkPipelineColorBlendStateCreateInfo pipelineColorBlendStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO };
1224         pipelineColorBlendStateInfo.logicOpEnable = VK_FALSE;
1225         pipelineColorBlendStateInfo.logicOp = VK_LOGIC_OP_COPY;
1226         pipelineColorBlendStateInfo.attachmentCount = 1;
1227         pipelineColorBlendStateInfo.pAttachments = &pipelineColorBlendAttachmentState;
1228 
1229         VkPipelineDepthStencilStateCreateInfo depthStencilStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO };
1230         depthStencilStateInfo.depthTestEnable = VK_TRUE;
1231         depthStencilStateInfo.depthWriteEnable = VK_TRUE;
1232         depthStencilStateInfo.depthCompareOp = VK_COMPARE_OP_LESS;
1233         depthStencilStateInfo.depthBoundsTestEnable = VK_FALSE;
1234         depthStencilStateInfo.stencilTestEnable = VK_FALSE;
1235 
1236         VkGraphicsPipelineCreateInfo pipelineInfo = { VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO };
1237         pipelineInfo.stageCount = 2;
1238         pipelineInfo.pStages = pipelineShaderStageInfos;
1239         pipelineInfo.pVertexInputState = &pipelineVertexInputStateInfo;
1240         pipelineInfo.pInputAssemblyState = &pipelineInputAssemblyStateInfo;
1241         pipelineInfo.pViewportState = &pipelineViewportStateInfo;
1242         pipelineInfo.pRasterizationState = &pipelineRasterizationStateInfo;
1243         pipelineInfo.pMultisampleState = &pipelineMultisampleStateInfo;
1244         pipelineInfo.pDepthStencilState = &depthStencilStateInfo;
1245         pipelineInfo.pColorBlendState = &pipelineColorBlendStateInfo;
1246         pipelineInfo.pDynamicState = nullptr;
1247         pipelineInfo.layout = g_hPipelineLayout;
1248         pipelineInfo.renderPass = g_hRenderPass;
1249         pipelineInfo.subpass = 0;
1250         pipelineInfo.basePipelineHandle = VK_NULL_HANDLE;
1251         pipelineInfo.basePipelineIndex = -1;
1252         ERR_GUARD_VULKAN( vkCreateGraphicsPipelines(
1253             g_hDevice,
1254             VK_NULL_HANDLE,
1255             1,
1256             &pipelineInfo,
1257             g_Allocs,
1258             &g_hPipeline) );
1259 
1260         vkDestroyShaderModule(g_hDevice, fragShaderModule, g_Allocs);
1261         vkDestroyShaderModule(g_hDevice, hVertShaderModule, g_Allocs);
1262     }
1263 
1264     // Create frambuffers
1265 
1266     for(size_t i = g_Framebuffers.size(); i--; )
1267         vkDestroyFramebuffer(g_hDevice, g_Framebuffers[i], g_Allocs);
1268     g_Framebuffers.clear();
1269 
1270     g_Framebuffers.resize(g_SwapchainImageViews.size());
1271     for(size_t i = 0; i < g_SwapchainImages.size(); ++i)
1272     {
1273         VkImageView attachments[] = { g_SwapchainImageViews[i], g_hDepthImageView };
1274 
1275         VkFramebufferCreateInfo framebufferInfo = { VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO };
1276         framebufferInfo.renderPass = g_hRenderPass;
1277         framebufferInfo.attachmentCount = (uint32_t)_countof(attachments);
1278         framebufferInfo.pAttachments = attachments;
1279         framebufferInfo.width = g_Extent.width;
1280         framebufferInfo.height = g_Extent.height;
1281         framebufferInfo.layers = 1;
1282         ERR_GUARD_VULKAN( vkCreateFramebuffer(g_hDevice, &framebufferInfo, g_Allocs, &g_Framebuffers[i]) );
1283     }
1284 
1285     // Create semaphores
1286 
1287     if(g_hImageAvailableSemaphore != VK_NULL_HANDLE)
1288     {
1289         vkDestroySemaphore(g_hDevice, g_hImageAvailableSemaphore, g_Allocs);
1290         g_hImageAvailableSemaphore = VK_NULL_HANDLE;
1291     }
1292     if(g_hRenderFinishedSemaphore != VK_NULL_HANDLE)
1293     {
1294         vkDestroySemaphore(g_hDevice, g_hRenderFinishedSemaphore, g_Allocs);
1295         g_hRenderFinishedSemaphore = VK_NULL_HANDLE;
1296     }
1297 
1298     VkSemaphoreCreateInfo semaphoreInfo = { VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO };
1299     ERR_GUARD_VULKAN( vkCreateSemaphore(g_hDevice, &semaphoreInfo, g_Allocs, &g_hImageAvailableSemaphore) );
1300     ERR_GUARD_VULKAN( vkCreateSemaphore(g_hDevice, &semaphoreInfo, g_Allocs, &g_hRenderFinishedSemaphore) );
1301 }
1302 
DestroySwapchain(bool destroyActualSwapchain)1303 static void DestroySwapchain(bool destroyActualSwapchain)
1304 {
1305     if(g_hImageAvailableSemaphore != VK_NULL_HANDLE)
1306     {
1307         vkDestroySemaphore(g_hDevice, g_hImageAvailableSemaphore, g_Allocs);
1308         g_hImageAvailableSemaphore = VK_NULL_HANDLE;
1309     }
1310     if(g_hRenderFinishedSemaphore != VK_NULL_HANDLE)
1311     {
1312         vkDestroySemaphore(g_hDevice, g_hRenderFinishedSemaphore, g_Allocs);
1313         g_hRenderFinishedSemaphore = VK_NULL_HANDLE;
1314     }
1315 
1316     for(size_t i = g_Framebuffers.size(); i--; )
1317         vkDestroyFramebuffer(g_hDevice, g_Framebuffers[i], g_Allocs);
1318     g_Framebuffers.clear();
1319 
1320     if(g_hDepthImageView != VK_NULL_HANDLE)
1321     {
1322         vkDestroyImageView(g_hDevice, g_hDepthImageView, g_Allocs);
1323         g_hDepthImageView = VK_NULL_HANDLE;
1324     }
1325     if(g_hDepthImage != VK_NULL_HANDLE)
1326     {
1327         vmaDestroyImage(g_hAllocator, g_hDepthImage, g_hDepthImageAlloc);
1328         g_hDepthImage = VK_NULL_HANDLE;
1329     }
1330 
1331     if(g_hPipeline != VK_NULL_HANDLE)
1332     {
1333         vkDestroyPipeline(g_hDevice, g_hPipeline, g_Allocs);
1334         g_hPipeline = VK_NULL_HANDLE;
1335     }
1336 
1337     if(g_hRenderPass != VK_NULL_HANDLE)
1338     {
1339         vkDestroyRenderPass(g_hDevice, g_hRenderPass, g_Allocs);
1340         g_hRenderPass = VK_NULL_HANDLE;
1341     }
1342 
1343     if(g_hPipelineLayout != VK_NULL_HANDLE)
1344     {
1345         vkDestroyPipelineLayout(g_hDevice, g_hPipelineLayout, g_Allocs);
1346         g_hPipelineLayout = VK_NULL_HANDLE;
1347     }
1348 
1349     for(size_t i = g_SwapchainImageViews.size(); i--; )
1350         vkDestroyImageView(g_hDevice, g_SwapchainImageViews[i], g_Allocs);
1351     g_SwapchainImageViews.clear();
1352 
1353     if(destroyActualSwapchain && (g_hSwapchain != VK_NULL_HANDLE))
1354     {
1355         vkDestroySwapchainKHR(g_hDevice, g_hSwapchain, g_Allocs);
1356         g_hSwapchain = VK_NULL_HANDLE;
1357     }
1358 }
1359 
PrintEnabledFeatures()1360 static void PrintEnabledFeatures()
1361 {
1362     wprintf(L"Enabled extensions and features:\n");
1363     wprintf(L"Validation layer: %d\n", g_EnableValidationLayer ? 1 : 0);
1364     wprintf(L"Sparse binding: %d\n", g_SparseBindingEnabled ? 1 : 0);
1365     if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1366     {
1367         wprintf(L"VK_KHR_get_memory_requirements2: %d\n", VK_KHR_get_memory_requirements2_enabled ? 1 : 0);
1368         wprintf(L"VK_KHR_get_physical_device_properties2: %d\n", VK_KHR_get_physical_device_properties2_enabled ? 1 : 0);
1369         wprintf(L"VK_KHR_dedicated_allocation: %d\n", VK_KHR_dedicated_allocation_enabled ? 1 : 0);
1370         wprintf(L"VK_KHR_bind_memory2: %d\n", VK_KHR_bind_memory2_enabled ? 1 : 0);
1371     }
1372     wprintf(L"VK_EXT_memory_budget: %d\n", VK_EXT_memory_budget_enabled ? 1 : 0);
1373     wprintf(L"VK_AMD_device_coherent_memory: %d\n", VK_AMD_device_coherent_memory_enabled ? 1 : 0);
1374     if(GetVulkanApiVersion() < VK_API_VERSION_1_2)
1375     {
1376         wprintf(L"VK_KHR_buffer_device_address: %d\n", VK_KHR_buffer_device_address_enabled ? 1 : 0);
1377     }
1378     else
1379     {
1380         wprintf(L"bufferDeviceAddress: %d\n", VK_KHR_buffer_device_address_enabled ? 1 : 0);
1381     }
1382     wprintf(L"VK_EXT_memory_priority: %d\n", VK_EXT_memory_priority ? 1 : 0);
1383 }
1384 
SetAllocatorCreateInfo(VmaAllocatorCreateInfo & outInfo)1385 void SetAllocatorCreateInfo(VmaAllocatorCreateInfo& outInfo)
1386 {
1387     outInfo = {};
1388 
1389     outInfo.physicalDevice = g_hPhysicalDevice;
1390     outInfo.device = g_hDevice;
1391     outInfo.instance = g_hVulkanInstance;
1392     outInfo.vulkanApiVersion = GetVulkanApiVersion();
1393 
1394     if(VK_KHR_dedicated_allocation_enabled)
1395     {
1396         outInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT;
1397     }
1398     if(VK_KHR_bind_memory2_enabled)
1399     {
1400         outInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT;
1401     }
1402 #if !defined(VMA_MEMORY_BUDGET) || VMA_MEMORY_BUDGET == 1
1403     if(VK_EXT_memory_budget_enabled && (
1404         GetVulkanApiVersion() >= VK_API_VERSION_1_1 || VK_KHR_get_physical_device_properties2_enabled))
1405     {
1406         outInfo.flags |= VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT;
1407     }
1408 #endif
1409     if(VK_AMD_device_coherent_memory_enabled)
1410     {
1411         outInfo.flags |= VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT;
1412     }
1413     if(VK_KHR_buffer_device_address_enabled)
1414     {
1415         outInfo.flags |= VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT;
1416     }
1417 #if !defined(VMA_MEMORY_PRIORITY) || VMA_MEMORY_PRIORITY == 1
1418     if(VK_EXT_memory_priority_enabled)
1419     {
1420         outInfo.flags |= VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT;
1421     }
1422 #endif
1423 
1424     if(USE_CUSTOM_CPU_ALLOCATION_CALLBACKS)
1425     {
1426         outInfo.pAllocationCallbacks = &g_CpuAllocationCallbacks;
1427     }
1428 
1429     // Uncomment to enable recording to CSV file.
1430     /*
1431     static VmaRecordSettings recordSettings = {};
1432     recordSettings.pFilePath = "VulkanSample.csv";
1433     outInfo.pRecordSettings = &recordSettings;
1434     */
1435 
1436     // Uncomment to enable HeapSizeLimit.
1437     /*
1438     static std::array<VkDeviceSize, VK_MAX_MEMORY_HEAPS> heapSizeLimit;
1439     std::fill(heapSizeLimit.begin(), heapSizeLimit.end(), VK_WHOLE_SIZE);
1440     heapSizeLimit[0] = 512ull * 1024 * 1024;
1441     outInfo.pHeapSizeLimit = heapSizeLimit.data();
1442     */
1443 }
1444 
PrintPhysicalDeviceProperties(const VkPhysicalDeviceProperties & properties)1445 static void PrintPhysicalDeviceProperties(const VkPhysicalDeviceProperties& properties)
1446 {
1447     wprintf(L"physicalDeviceProperties:\n");
1448     wprintf(L"    driverVersion: 0x%X\n", properties.driverVersion);
1449     wprintf(L"    vendorID: 0x%X (%s)\n", properties.vendorID, VendorIDToStr(properties.vendorID));
1450     wprintf(L"    deviceID: 0x%X\n", properties.deviceID);
1451     wprintf(L"    deviceType: %u (%s)\n", properties.deviceType, PhysicalDeviceTypeToStr(properties.deviceType));
1452     wprintf(L"    deviceName: %hs\n", properties.deviceName);
1453     wprintf(L"    limits:\n");
1454     wprintf(L"        maxMemoryAllocationCount: %u\n", properties.limits.maxMemoryAllocationCount);
1455     wprintf(L"        bufferImageGranularity: %llu B\n", properties.limits.bufferImageGranularity);
1456     wprintf(L"        nonCoherentAtomSize: %llu B\n", properties.limits.nonCoherentAtomSize);
1457 }
1458 
1459 #if VMA_VULKAN_VERSION >= 1002000
PrintPhysicalDeviceVulkan11Properties(const VkPhysicalDeviceVulkan11Properties & properties)1460 static void PrintPhysicalDeviceVulkan11Properties(const VkPhysicalDeviceVulkan11Properties& properties)
1461 {
1462     wprintf(L"physicalDeviceVulkan11Properties:\n");
1463     std::wstring sizeStr = SizeToStr(properties.maxMemoryAllocationSize);
1464     wprintf(L"    maxMemoryAllocationSize: %llu B (%s)\n", properties.maxMemoryAllocationSize, sizeStr.c_str());
1465 }
PrintPhysicalDeviceVulkan12Properties(const VkPhysicalDeviceVulkan12Properties & properties)1466 static void PrintPhysicalDeviceVulkan12Properties(const VkPhysicalDeviceVulkan12Properties& properties)
1467 {
1468     wprintf(L"physicalDeviceVulkan12Properties:\n");
1469     std::wstring str = DriverIDToStr(properties.driverID);
1470     wprintf(L"    driverID: %u (%s)\n", properties.driverID, str.c_str());
1471     wprintf(L"    driverName: %hs\n", properties.driverName);
1472     wprintf(L"    driverInfo: %hs\n", properties.driverInfo);
1473 }
1474 #endif // #if VMA_VULKAN_VERSION > 1002000
1475 
AddFlagToStr(std::wstring & inout,const wchar_t * flagStr)1476 static void AddFlagToStr(std::wstring& inout, const wchar_t* flagStr)
1477 {
1478     if(!inout.empty())
1479         inout += L", ";
1480     inout += flagStr;
1481 }
1482 
HeapFlagsToStr(VkMemoryHeapFlags flags)1483 static std::wstring HeapFlagsToStr(VkMemoryHeapFlags flags)
1484 {
1485     std::wstring result;
1486     if(flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT)
1487         AddFlagToStr(result, L"DEVICE_LOCAL");
1488     if(flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT)
1489         AddFlagToStr(result, L"MULTI_INSTANCE");
1490     return result;
1491 }
1492 
PropertyFlagsToStr(VkMemoryPropertyFlags flags)1493 static std::wstring PropertyFlagsToStr(VkMemoryPropertyFlags flags)
1494 {
1495     std::wstring result;
1496     if(flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
1497         AddFlagToStr(result, L"DEVICE_LOCAL");
1498     if(flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
1499         AddFlagToStr(result, L"HOST_VISIBLE");
1500     if(flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
1501         AddFlagToStr(result, L"HOST_COHERENT");
1502     if(flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT)
1503         AddFlagToStr(result, L"HOST_CACHED");
1504     if(flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)
1505         AddFlagToStr(result, L"LAZILY_ALLOCATED");
1506 
1507 #if VMA_VULKAN_VERSION >= 1001000
1508     if(flags & VK_MEMORY_PROPERTY_PROTECTED_BIT)
1509         AddFlagToStr(result, L"PROTECTED");
1510 #endif
1511 
1512 #if VK_AMD_device_coherent_memory
1513     if(flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD)
1514         AddFlagToStr(result, L"DEVICE_COHERENT (AMD)");
1515     if(flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD)
1516         AddFlagToStr(result, L"DEVICE_UNCACHED (AMD)");
1517 #endif
1518 
1519     return result;
1520 }
1521 
PrintMemoryTypes()1522 static void PrintMemoryTypes()
1523 {
1524     wprintf(L"MEMORY HEAPS:\n");
1525     const VkPhysicalDeviceMemoryProperties* memProps = nullptr;
1526     vmaGetMemoryProperties(g_hAllocator, &memProps);
1527 
1528     wprintf(L"heapCount=%u, typeCount=%u\n", memProps->memoryHeapCount, memProps->memoryTypeCount);
1529 
1530     std::wstring sizeStr, flagsStr;
1531     for(uint32_t heapIndex = 0; heapIndex < memProps->memoryHeapCount; ++heapIndex)
1532     {
1533         const VkMemoryHeap& heap = memProps->memoryHeaps[heapIndex];
1534         sizeStr = SizeToStr(heap.size);
1535         flagsStr = HeapFlagsToStr(heap.flags);
1536         wprintf(L"Heap %u: %llu B (%s) %s\n", heapIndex, heap.size, sizeStr.c_str(), flagsStr.c_str());
1537 
1538         for(uint32_t typeIndex = 0; typeIndex < memProps->memoryTypeCount; ++typeIndex)
1539         {
1540             const VkMemoryType& type = memProps->memoryTypes[typeIndex];
1541             if(type.heapIndex == heapIndex)
1542             {
1543                 flagsStr = PropertyFlagsToStr(type.propertyFlags);
1544                 wprintf(L"    Type %u: %s\n", typeIndex, flagsStr.c_str());
1545             }
1546         }
1547     }
1548 }
1549 
1550 #if 0
1551 template<typename It, typename MapFunc>
1552 inline VkDeviceSize MapSum(It beg, It end, MapFunc mapFunc)
1553 {
1554     VkDeviceSize result = 0;
1555     for(It it = beg; it != end; ++it)
1556         result += mapFunc(*it);
1557     return result;
1558 }
1559 #endif
1560 
CanCreateVertexBuffer(uint32_t allowedMemoryTypeBits)1561 static bool CanCreateVertexBuffer(uint32_t allowedMemoryTypeBits)
1562 {
1563     VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1564     bufCreateInfo.size = 0x10000;
1565     bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1566 
1567     VkBuffer buf = VK_NULL_HANDLE;
1568     VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
1569     assert(res == VK_SUCCESS);
1570 
1571     VkMemoryRequirements memReq = {};
1572     vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
1573 
1574     vkDestroyBuffer(g_hDevice, buf, g_Allocs);
1575 
1576     return (memReq.memoryTypeBits & allowedMemoryTypeBits) != 0;
1577 }
1578 
CanCreateOptimalSampledImage(uint32_t allowedMemoryTypeBits)1579 static bool CanCreateOptimalSampledImage(uint32_t allowedMemoryTypeBits)
1580 {
1581     VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1582     imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1583     imgCreateInfo.extent.width = 256;
1584     imgCreateInfo.extent.height = 256;
1585     imgCreateInfo.extent.depth = 1;
1586     imgCreateInfo.mipLevels = 1;
1587     imgCreateInfo.arrayLayers = 1;
1588     imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1589     imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1590     imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1591     imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
1592     imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1593 
1594     VkImage img = VK_NULL_HANDLE;
1595     VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
1596     assert(res == VK_SUCCESS);
1597 
1598     VkMemoryRequirements memReq = {};
1599     vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
1600 
1601     vkDestroyImage(g_hDevice, img, g_Allocs);
1602 
1603     return (memReq.memoryTypeBits & allowedMemoryTypeBits) != 0;
1604 }
1605 
PrintMemoryConclusions()1606 static void PrintMemoryConclusions()
1607 {
1608     wprintf(L"Conclusions:\n");
1609 
1610     const VkPhysicalDeviceProperties* props = nullptr;
1611     const VkPhysicalDeviceMemoryProperties* memProps = nullptr;
1612     vmaGetPhysicalDeviceProperties(g_hAllocator, &props);
1613     vmaGetMemoryProperties(g_hAllocator, &memProps);
1614 
1615     const uint32_t heapCount = memProps->memoryHeapCount;
1616 
1617     uint32_t deviceLocalHeapCount = 0;
1618     uint32_t hostVisibleHeapCount = 0;
1619     uint32_t deviceLocalAndHostVisibleHeapCount = 0;
1620     VkDeviceSize deviceLocalHeapSumSize = 0;
1621     VkDeviceSize hostVisibleHeapSumSize = 0;
1622     VkDeviceSize deviceLocalAndHostVisibleHeapSumSize = 0;
1623 
1624     for(uint32_t heapIndex = 0; heapIndex < heapCount; ++heapIndex)
1625     {
1626         const VkMemoryHeap& heap = memProps->memoryHeaps[heapIndex];
1627         const bool isDeviceLocal = (heap.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0;
1628         bool isHostVisible = false;
1629         for(uint32_t typeIndex = 0; typeIndex < memProps->memoryTypeCount; ++typeIndex)
1630         {
1631             const VkMemoryType& type = memProps->memoryTypes[typeIndex];
1632             if(type.heapIndex == heapIndex && (type.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
1633             {
1634                 isHostVisible = true;
1635                 break;
1636             }
1637         }
1638         if(isDeviceLocal)
1639         {
1640             ++deviceLocalHeapCount;
1641             deviceLocalHeapSumSize += heap.size;
1642         }
1643         if(isHostVisible)
1644         {
1645             ++hostVisibleHeapCount;
1646             hostVisibleHeapSumSize += heap.size;
1647             if(isDeviceLocal)
1648             {
1649                 ++deviceLocalAndHostVisibleHeapCount;
1650                 deviceLocalAndHostVisibleHeapSumSize += heap.size;
1651             }
1652         }
1653     }
1654 
1655     uint32_t hostVisibleNotHostCoherentTypeCount = 0;
1656     uint32_t notDeviceLocalNotHostVisibleTypeCount = 0;
1657     uint32_t amdSpecificTypeCount = 0;
1658     uint32_t lazilyAllocatedTypeCount = 0;
1659     uint32_t allTypeBits = 0;
1660     uint32_t deviceLocalTypeBits = 0;
1661     for(uint32_t typeIndex = 0; typeIndex < memProps->memoryTypeCount; ++typeIndex)
1662     {
1663         const VkMemoryType& type = memProps->memoryTypes[typeIndex];
1664         allTypeBits |= 1u << typeIndex;
1665         if(type.propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
1666         {
1667             deviceLocalTypeBits |= 1u << typeIndex;
1668         }
1669         if((type.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) &&
1670             (type.propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
1671         {
1672             ++hostVisibleNotHostCoherentTypeCount;
1673         }
1674         if((type.propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0 &&
1675             (type.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
1676         {
1677             ++notDeviceLocalNotHostVisibleTypeCount;
1678         }
1679         if(type.propertyFlags & (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD))
1680         {
1681             ++amdSpecificTypeCount;
1682         }
1683         if(type.propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)
1684         {
1685             ++lazilyAllocatedTypeCount;
1686         }
1687     }
1688 
1689     assert(deviceLocalHeapCount > 0);
1690     if(deviceLocalHeapCount == heapCount)
1691         wprintf(L"- All heaps are DEVICE_LOCAL.\n");
1692     else
1693         wprintf(L"- %u heaps are DEVICE_LOCAL, total %s.\n", deviceLocalHeapCount, SizeToStr(deviceLocalHeapSumSize).c_str());
1694 
1695     assert(hostVisibleHeapCount > 0);
1696     if(hostVisibleHeapCount == heapCount)
1697         wprintf(L"- All heaps are HOST_VISIBLE.\n");
1698     else
1699         wprintf(L"- %u heaps are HOST_VISIBLE, total %s.\n", deviceLocalHeapCount, SizeToStr(hostVisibleHeapSumSize).c_str());
1700 
1701     if(deviceLocalHeapCount < heapCount && hostVisibleHeapCount < heapCount)
1702     {
1703         if(deviceLocalAndHostVisibleHeapCount == 0)
1704             wprintf(L"- No heaps are DEVICE_LOCAL and HOST_VISIBLE.\n");
1705         if(deviceLocalAndHostVisibleHeapCount == heapCount)
1706             wprintf(L"- All heaps are DEVICE_LOCAL and HOST_VISIBLE.\n");
1707         else
1708             wprintf(L"- %u heaps are DEVICE_LOCAL and HOST_VISIBLE, total %s.\n", deviceLocalAndHostVisibleHeapCount, SizeToStr(deviceLocalAndHostVisibleHeapSumSize).c_str());
1709     }
1710 
1711     if(hostVisibleNotHostCoherentTypeCount == 0)
1712         wprintf(L"- No types are HOST_VISIBLE but not HOST_COHERENT.\n");
1713     else
1714         wprintf(L"- %u types are HOST_VISIBLE but not HOST_COHERENT.\n", hostVisibleNotHostCoherentTypeCount);
1715 
1716     if(notDeviceLocalNotHostVisibleTypeCount == 0)
1717         wprintf(L"- No types are not DEVICE_LOCAL and not HOST_VISIBLE.\n");
1718     else
1719         wprintf(L"- %u types are not DEVICE_LOCAL and not HOST_VISIBLE.\n", notDeviceLocalNotHostVisibleTypeCount);
1720 
1721     if(amdSpecificTypeCount == 0)
1722         wprintf(L"- No types are AMD-specific DEVICE_COHERENT or DEVICE_UNCACHED.\n");
1723     else
1724         wprintf(L"- %u types are AMD-specific DEVICE_COHERENT or DEVICE_UNCACHED.\n", amdSpecificTypeCount);
1725 
1726     if(lazilyAllocatedTypeCount == 0)
1727         wprintf(L"- No types are LAZILY_ALLOCATED.\n");
1728     else
1729         wprintf(L"- %u types are LAZILY_ALLOCATED.\n", lazilyAllocatedTypeCount);
1730 
1731     if(props->vendorID == VENDOR_ID_AMD &&
1732         props->deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU &&
1733         deviceLocalAndHostVisibleHeapSumSize > 256llu * 1024 * 1024)
1734     {
1735         wprintf(L"- AMD Smart Access Memory (SAM) is enabled!\n");
1736     }
1737 
1738     if(deviceLocalHeapCount < heapCount)
1739     {
1740         const uint32_t nonDeviceLocalTypeBits = ~deviceLocalTypeBits & allTypeBits;
1741 
1742         if(CanCreateVertexBuffer(nonDeviceLocalTypeBits))
1743             wprintf(L"- A buffer with VERTEX_BUFFER usage can be created in some non-DEVICE_LOCAL type.\n");
1744         else
1745             wprintf(L"- A buffer with VERTEX_BUFFER usage cannot be created in some non-DEVICE_LOCAL type.\n");
1746 
1747         if(CanCreateOptimalSampledImage(nonDeviceLocalTypeBits))
1748             wprintf(L"- An image with OPTIMAL tiling and SAMPLED usage can be created in some non-DEVICE_LOCAL type.\n");
1749         else
1750             wprintf(L"- An image with OPTIMAL tiling and SAMPLED usage cannot be created in some non-DEVICE_LOCAL type.\n");
1751     }
1752 
1753     //wprintf(L"\n");
1754 }
1755 
InitializeApplication()1756 static void InitializeApplication()
1757 {
1758     // Create VkSurfaceKHR.
1759     VkWin32SurfaceCreateInfoKHR surfaceInfo = { VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR };
1760     surfaceInfo.hinstance = g_hAppInstance;
1761     surfaceInfo.hwnd = g_hWnd;
1762     VkResult result = vkCreateWin32SurfaceKHR(g_hVulkanInstance, &surfaceInfo, g_Allocs, &g_hSurface);
1763     assert(result == VK_SUCCESS);
1764 
1765     // Query for device extensions
1766 
1767     uint32_t physicalDeviceExtensionPropertyCount = 0;
1768     ERR_GUARD_VULKAN( vkEnumerateDeviceExtensionProperties(g_hPhysicalDevice, nullptr, &physicalDeviceExtensionPropertyCount, nullptr) );
1769     std::vector<VkExtensionProperties> physicalDeviceExtensionProperties{physicalDeviceExtensionPropertyCount};
1770     if(physicalDeviceExtensionPropertyCount)
1771     {
1772         ERR_GUARD_VULKAN( vkEnumerateDeviceExtensionProperties(
1773             g_hPhysicalDevice,
1774             nullptr,
1775             &physicalDeviceExtensionPropertyCount,
1776             physicalDeviceExtensionProperties.data()) );
1777     }
1778 
1779     for(uint32_t i = 0; i < physicalDeviceExtensionPropertyCount; ++i)
1780     {
1781         if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME) == 0)
1782         {
1783             if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1784             {
1785                 VK_KHR_get_memory_requirements2_enabled = true;
1786             }
1787         }
1788         else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME) == 0)
1789         {
1790             if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1791             {
1792                 VK_KHR_dedicated_allocation_enabled = true;
1793             }
1794         }
1795         else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME) == 0)
1796         {
1797             if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1798             {
1799                 VK_KHR_bind_memory2_enabled = true;
1800             }
1801         }
1802         else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_EXT_MEMORY_BUDGET_EXTENSION_NAME) == 0)
1803             VK_EXT_memory_budget_enabled = true;
1804         else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME) == 0)
1805             VK_AMD_device_coherent_memory_enabled = true;
1806         else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME) == 0)
1807         {
1808             if(GetVulkanApiVersion() < VK_API_VERSION_1_2)
1809             {
1810                 VK_KHR_buffer_device_address_enabled = true;
1811             }
1812         }
1813         else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME) == 0)
1814             VK_EXT_memory_priority_enabled = true;
1815     }
1816 
1817     if(GetVulkanApiVersion() >= VK_API_VERSION_1_2)
1818         VK_KHR_buffer_device_address_enabled = true; // Promoted to core Vulkan 1.2.
1819 
1820     // Query for features
1821 
1822 #if VMA_VULKAN_VERSION >= 1001000
1823     VkPhysicalDeviceProperties2 physicalDeviceProperties2 = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 };
1824 
1825 #if VMA_VULKAN_VERSION >= 1002000
1826     // Vulkan spec says structure VkPhysicalDeviceVulkan11Properties is "Provided by VK_VERSION_1_2" - is this a mistake? Assuming not...
1827     VkPhysicalDeviceVulkan11Properties physicalDeviceVulkan11Properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES };
1828     VkPhysicalDeviceVulkan12Properties physicalDeviceVulkan12Properties = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES };
1829     PnextChainPushFront(&physicalDeviceProperties2, &physicalDeviceVulkan11Properties);
1830     PnextChainPushFront(&physicalDeviceProperties2, &physicalDeviceVulkan12Properties);
1831 #endif
1832 
1833     vkGetPhysicalDeviceProperties2(g_hPhysicalDevice, &physicalDeviceProperties2);
1834 
1835     PrintPhysicalDeviceProperties(physicalDeviceProperties2.properties);
1836 #if VMA_VULKAN_VERSION >= 1002000
1837     PrintPhysicalDeviceVulkan11Properties(physicalDeviceVulkan11Properties);
1838     PrintPhysicalDeviceVulkan12Properties(physicalDeviceVulkan12Properties);
1839 #endif
1840 
1841 #else // #if VMA_VULKAN_VERSION >= 1001000
1842     VkPhysicalDeviceProperties physicalDeviceProperties = {};
1843     vkGetPhysicalDeviceProperties(g_hPhysicalDevice, &physicalDeviceProperties);
1844     PrintPhysicalDeviceProperties(physicalDeviceProperties);
1845 
1846 #endif // #if VMA_VULKAN_VERSION >= 1001000
1847 
1848     wprintf(L"\n");
1849 
1850     VkPhysicalDeviceFeatures2 physicalDeviceFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
1851 
1852     VkPhysicalDeviceCoherentMemoryFeaturesAMD physicalDeviceCoherentMemoryFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD };
1853     if(VK_AMD_device_coherent_memory_enabled)
1854     {
1855         PnextChainPushFront(&physicalDeviceFeatures, &physicalDeviceCoherentMemoryFeatures);
1856     }
1857 
1858     VkPhysicalDeviceBufferDeviceAddressFeaturesKHR physicalDeviceBufferDeviceAddressFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR };
1859     if(VK_KHR_buffer_device_address_enabled)
1860     {
1861         PnextChainPushFront(&physicalDeviceFeatures, &physicalDeviceBufferDeviceAddressFeatures);
1862     }
1863 
1864     VkPhysicalDeviceMemoryPriorityFeaturesEXT physicalDeviceMemoryPriorityFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT };
1865     if(VK_EXT_memory_priority_enabled)
1866     {
1867         PnextChainPushFront(&physicalDeviceFeatures, &physicalDeviceMemoryPriorityFeatures);
1868     }
1869 
1870     vkGetPhysicalDeviceFeatures2(g_hPhysicalDevice, &physicalDeviceFeatures);
1871 
1872     g_SparseBindingEnabled = physicalDeviceFeatures.features.sparseBinding != 0;
1873 
1874     // The extension is supported as fake with no real support for this feature? Don't use it.
1875     if(VK_AMD_device_coherent_memory_enabled && !physicalDeviceCoherentMemoryFeatures.deviceCoherentMemory)
1876         VK_AMD_device_coherent_memory_enabled = false;
1877     if(VK_KHR_buffer_device_address_enabled && !physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress)
1878         VK_KHR_buffer_device_address_enabled = false;
1879     if(VK_EXT_memory_priority_enabled && !physicalDeviceMemoryPriorityFeatures.memoryPriority)
1880         VK_EXT_memory_priority_enabled = false;
1881 
1882     // Find queue family index
1883 
1884     uint32_t queueFamilyCount = 0;
1885     vkGetPhysicalDeviceQueueFamilyProperties(g_hPhysicalDevice, &queueFamilyCount, nullptr);
1886     assert(queueFamilyCount > 0);
1887     std::vector<VkQueueFamilyProperties> queueFamilies(queueFamilyCount);
1888     vkGetPhysicalDeviceQueueFamilyProperties(g_hPhysicalDevice, &queueFamilyCount, queueFamilies.data());
1889     for(uint32_t i = 0;
1890         (i < queueFamilyCount) &&
1891             (g_GraphicsQueueFamilyIndex == UINT_MAX ||
1892                 g_PresentQueueFamilyIndex == UINT_MAX ||
1893                 (g_SparseBindingEnabled && g_SparseBindingQueueFamilyIndex == UINT_MAX));
1894         ++i)
1895     {
1896         if(queueFamilies[i].queueCount > 0)
1897         {
1898             const uint32_t flagsForGraphicsQueue = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
1899             if((g_GraphicsQueueFamilyIndex != 0) &&
1900                 ((queueFamilies[i].queueFlags & flagsForGraphicsQueue) == flagsForGraphicsQueue))
1901             {
1902                 g_GraphicsQueueFamilyIndex = i;
1903             }
1904 
1905             VkBool32 surfaceSupported = 0;
1906             VkResult res = vkGetPhysicalDeviceSurfaceSupportKHR(g_hPhysicalDevice, i, g_hSurface, &surfaceSupported);
1907             if((res >= 0) && (surfaceSupported == VK_TRUE))
1908             {
1909                 g_PresentQueueFamilyIndex = i;
1910             }
1911 
1912             if(g_SparseBindingEnabled &&
1913                 g_SparseBindingQueueFamilyIndex == UINT32_MAX &&
1914                 (queueFamilies[i].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) != 0)
1915             {
1916                 g_SparseBindingQueueFamilyIndex = i;
1917             }
1918         }
1919     }
1920     assert(g_GraphicsQueueFamilyIndex != UINT_MAX);
1921 
1922     g_SparseBindingEnabled = g_SparseBindingEnabled && g_SparseBindingQueueFamilyIndex != UINT32_MAX;
1923 
1924     // Create logical device
1925 
1926     const float queuePriority = 1.f;
1927 
1928     VkDeviceQueueCreateInfo queueCreateInfo[3] = {};
1929     uint32_t queueCount = 1;
1930     queueCreateInfo[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1931     queueCreateInfo[0].queueFamilyIndex = g_GraphicsQueueFamilyIndex;
1932     queueCreateInfo[0].queueCount = 1;
1933     queueCreateInfo[0].pQueuePriorities = &queuePriority;
1934 
1935     if(g_PresentQueueFamilyIndex != g_GraphicsQueueFamilyIndex)
1936     {
1937 
1938         queueCreateInfo[queueCount].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1939         queueCreateInfo[queueCount].queueFamilyIndex = g_PresentQueueFamilyIndex;
1940         queueCreateInfo[queueCount].queueCount = 1;
1941         queueCreateInfo[queueCount].pQueuePriorities = &queuePriority;
1942         ++queueCount;
1943     }
1944 
1945     if(g_SparseBindingEnabled &&
1946         g_SparseBindingQueueFamilyIndex != g_GraphicsQueueFamilyIndex &&
1947         g_SparseBindingQueueFamilyIndex != g_PresentQueueFamilyIndex)
1948     {
1949 
1950         queueCreateInfo[queueCount].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1951         queueCreateInfo[queueCount].queueFamilyIndex = g_SparseBindingQueueFamilyIndex;
1952         queueCreateInfo[queueCount].queueCount = 1;
1953         queueCreateInfo[queueCount].pQueuePriorities = &queuePriority;
1954         ++queueCount;
1955     }
1956 
1957     std::vector<const char*> enabledDeviceExtensions;
1958     enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1959     if(VK_KHR_get_memory_requirements2_enabled)
1960         enabledDeviceExtensions.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
1961     if(VK_KHR_dedicated_allocation_enabled)
1962         enabledDeviceExtensions.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
1963     if(VK_KHR_bind_memory2_enabled)
1964         enabledDeviceExtensions.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
1965     if(VK_EXT_memory_budget_enabled)
1966         enabledDeviceExtensions.push_back(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME);
1967     if(VK_AMD_device_coherent_memory_enabled)
1968         enabledDeviceExtensions.push_back(VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME);
1969     if(VK_KHR_buffer_device_address_enabled && GetVulkanApiVersion() < VK_API_VERSION_1_2)
1970         enabledDeviceExtensions.push_back(VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
1971     if(VK_EXT_memory_priority_enabled)
1972         enabledDeviceExtensions.push_back(VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME);
1973 
1974     VkPhysicalDeviceFeatures2 deviceFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
1975     deviceFeatures.features.samplerAnisotropy = VK_TRUE;
1976     deviceFeatures.features.sparseBinding = g_SparseBindingEnabled ? VK_TRUE : VK_FALSE;
1977 
1978     if(VK_AMD_device_coherent_memory_enabled)
1979     {
1980         physicalDeviceCoherentMemoryFeatures.deviceCoherentMemory = VK_TRUE;
1981         PnextChainPushBack(&deviceFeatures, &physicalDeviceCoherentMemoryFeatures);
1982     }
1983     if(VK_KHR_buffer_device_address_enabled)
1984     {
1985         physicalDeviceBufferDeviceAddressFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR };
1986         physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress = VK_TRUE;
1987         PnextChainPushBack(&deviceFeatures, &physicalDeviceBufferDeviceAddressFeatures);
1988     }
1989     if(VK_EXT_memory_priority_enabled)
1990     {
1991         PnextChainPushBack(&deviceFeatures, &physicalDeviceMemoryPriorityFeatures);
1992     }
1993 
1994     VkDeviceCreateInfo deviceCreateInfo = { VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
1995     deviceCreateInfo.pNext = &deviceFeatures;
1996     deviceCreateInfo.enabledLayerCount = 0;
1997     deviceCreateInfo.ppEnabledLayerNames = nullptr;
1998     deviceCreateInfo.enabledExtensionCount = (uint32_t)enabledDeviceExtensions.size();
1999     deviceCreateInfo.ppEnabledExtensionNames = !enabledDeviceExtensions.empty() ? enabledDeviceExtensions.data() : nullptr;
2000     deviceCreateInfo.queueCreateInfoCount = queueCount;
2001     deviceCreateInfo.pQueueCreateInfos = queueCreateInfo;
2002 
2003     ERR_GUARD_VULKAN( vkCreateDevice(g_hPhysicalDevice, &deviceCreateInfo, g_Allocs, &g_hDevice) );
2004 
2005     // Fetch pointers to extension functions
2006     if(VK_KHR_buffer_device_address_enabled)
2007     {
2008         if(GetVulkanApiVersion() >= VK_API_VERSION_1_2)
2009         {
2010             g_vkGetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressEXT)vkGetDeviceProcAddr(g_hDevice, "vkGetBufferDeviceAddress");
2011         }
2012         else if(VK_KHR_buffer_device_address_enabled)
2013         {
2014             g_vkGetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressEXT)vkGetDeviceProcAddr(g_hDevice, "vkGetBufferDeviceAddressKHR");
2015         }
2016         assert(g_vkGetBufferDeviceAddressKHR != nullptr);
2017     }
2018 
2019     // Create memory allocator
2020 
2021     VmaAllocatorCreateInfo allocatorInfo = {};
2022     SetAllocatorCreateInfo(allocatorInfo);
2023     ERR_GUARD_VULKAN( vmaCreateAllocator(&allocatorInfo, &g_hAllocator) );
2024 
2025     PrintMemoryTypes();
2026     wprintf(L"\n");
2027     PrintMemoryConclusions();
2028     wprintf(L"\n");
2029     PrintEnabledFeatures();
2030     wprintf(L"\n");
2031 
2032     // Retrieve queues (don't need to be destroyed).
2033 
2034     vkGetDeviceQueue(g_hDevice, g_GraphicsQueueFamilyIndex, 0, &g_hGraphicsQueue);
2035     vkGetDeviceQueue(g_hDevice, g_PresentQueueFamilyIndex, 0, &g_hPresentQueue);
2036     assert(g_hGraphicsQueue);
2037     assert(g_hPresentQueue);
2038 
2039     if(g_SparseBindingEnabled)
2040     {
2041         vkGetDeviceQueue(g_hDevice, g_SparseBindingQueueFamilyIndex, 0, &g_hSparseBindingQueue);
2042         assert(g_hSparseBindingQueue);
2043     }
2044 
2045     // Create command pool
2046 
2047     VkCommandPoolCreateInfo commandPoolInfo = { VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO };
2048     commandPoolInfo.queueFamilyIndex = g_GraphicsQueueFamilyIndex;
2049     commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
2050     ERR_GUARD_VULKAN( vkCreateCommandPool(g_hDevice, &commandPoolInfo, g_Allocs, &g_hCommandPool) );
2051 
2052     VkCommandBufferAllocateInfo commandBufferInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO };
2053     commandBufferInfo.commandPool = g_hCommandPool;
2054     commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
2055     commandBufferInfo.commandBufferCount = COMMAND_BUFFER_COUNT;
2056     ERR_GUARD_VULKAN( vkAllocateCommandBuffers(g_hDevice, &commandBufferInfo, g_MainCommandBuffers) );
2057 
2058     VkFenceCreateInfo fenceInfo = { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
2059     fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
2060     for(size_t i = 0; i < COMMAND_BUFFER_COUNT; ++i)
2061     {
2062         ERR_GUARD_VULKAN( vkCreateFence(g_hDevice, &fenceInfo, g_Allocs, &g_MainCommandBufferExecutedFances[i]) );
2063     }
2064 
2065     ERR_GUARD_VULKAN( vkCreateFence(g_hDevice, &fenceInfo, g_Allocs, &g_ImmediateFence) );
2066 
2067     commandBufferInfo.commandBufferCount = 1;
2068     ERR_GUARD_VULKAN( vkAllocateCommandBuffers(g_hDevice, &commandBufferInfo, &g_hTemporaryCommandBuffer) );
2069 
2070     // Create texture sampler
2071 
2072     VkSamplerCreateInfo samplerInfo = { VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO };
2073     samplerInfo.magFilter = VK_FILTER_LINEAR;
2074     samplerInfo.minFilter = VK_FILTER_LINEAR;
2075     samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
2076     samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
2077     samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
2078     samplerInfo.anisotropyEnable = VK_TRUE;
2079     samplerInfo.maxAnisotropy = 16;
2080     samplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
2081     samplerInfo.unnormalizedCoordinates = VK_FALSE;
2082     samplerInfo.compareEnable = VK_FALSE;
2083     samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
2084     samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
2085     samplerInfo.mipLodBias = 0.f;
2086     samplerInfo.minLod = 0.f;
2087     samplerInfo.maxLod = FLT_MAX;
2088     ERR_GUARD_VULKAN( vkCreateSampler(g_hDevice, &samplerInfo, g_Allocs, &g_hSampler) );
2089 
2090     CreateTexture(128, 128);
2091     CreateMesh();
2092 
2093     VkDescriptorSetLayoutBinding samplerLayoutBinding = {};
2094     samplerLayoutBinding.binding = 1;
2095     samplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
2096     samplerLayoutBinding.descriptorCount = 1;
2097     samplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
2098 
2099     VkDescriptorSetLayoutCreateInfo descriptorSetLayoutInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO };
2100     descriptorSetLayoutInfo.bindingCount = 1;
2101     descriptorSetLayoutInfo.pBindings = &samplerLayoutBinding;
2102     ERR_GUARD_VULKAN( vkCreateDescriptorSetLayout(g_hDevice, &descriptorSetLayoutInfo, g_Allocs, &g_hDescriptorSetLayout) );
2103 
2104     // Create descriptor pool
2105 
2106     VkDescriptorPoolSize descriptorPoolSizes[2];
2107     ZeroMemory(descriptorPoolSizes, sizeof(descriptorPoolSizes));
2108     descriptorPoolSizes[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
2109     descriptorPoolSizes[0].descriptorCount = 1;
2110     descriptorPoolSizes[1].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
2111     descriptorPoolSizes[1].descriptorCount = 1;
2112 
2113     VkDescriptorPoolCreateInfo descriptorPoolInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO };
2114     descriptorPoolInfo.poolSizeCount = (uint32_t)_countof(descriptorPoolSizes);
2115     descriptorPoolInfo.pPoolSizes = descriptorPoolSizes;
2116     descriptorPoolInfo.maxSets = 1;
2117     ERR_GUARD_VULKAN( vkCreateDescriptorPool(g_hDevice, &descriptorPoolInfo, g_Allocs, &g_hDescriptorPool) );
2118 
2119     // Create descriptor set layout
2120 
2121     VkDescriptorSetLayout descriptorSetLayouts[] = { g_hDescriptorSetLayout };
2122     VkDescriptorSetAllocateInfo descriptorSetInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO };
2123     descriptorSetInfo.descriptorPool = g_hDescriptorPool;
2124     descriptorSetInfo.descriptorSetCount = 1;
2125     descriptorSetInfo.pSetLayouts = descriptorSetLayouts;
2126     ERR_GUARD_VULKAN( vkAllocateDescriptorSets(g_hDevice, &descriptorSetInfo, &g_hDescriptorSet) );
2127 
2128     VkDescriptorImageInfo descriptorImageInfo = {};
2129     descriptorImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2130     descriptorImageInfo.imageView = g_hTextureImageView;
2131     descriptorImageInfo.sampler = g_hSampler;
2132 
2133     VkWriteDescriptorSet writeDescriptorSet = { VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET };
2134     writeDescriptorSet.dstSet = g_hDescriptorSet;
2135     writeDescriptorSet.dstBinding = 1;
2136     writeDescriptorSet.dstArrayElement = 0;
2137     writeDescriptorSet.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
2138     writeDescriptorSet.descriptorCount = 1;
2139     writeDescriptorSet.pImageInfo = &descriptorImageInfo;
2140 
2141     vkUpdateDescriptorSets(g_hDevice, 1, &writeDescriptorSet, 0, nullptr);
2142 
2143     CreateSwapchain();
2144 }
2145 
FinalizeApplication()2146 static void FinalizeApplication()
2147 {
2148     vkDeviceWaitIdle(g_hDevice);
2149 
2150     DestroySwapchain(true);
2151 
2152     if(g_hDescriptorPool != VK_NULL_HANDLE)
2153     {
2154         vkDestroyDescriptorPool(g_hDevice, g_hDescriptorPool, g_Allocs);
2155         g_hDescriptorPool = VK_NULL_HANDLE;
2156     }
2157 
2158     if(g_hDescriptorSetLayout != VK_NULL_HANDLE)
2159     {
2160         vkDestroyDescriptorSetLayout(g_hDevice, g_hDescriptorSetLayout, g_Allocs);
2161         g_hDescriptorSetLayout = VK_NULL_HANDLE;
2162     }
2163 
2164     if(g_hTextureImageView != VK_NULL_HANDLE)
2165     {
2166         vkDestroyImageView(g_hDevice, g_hTextureImageView, g_Allocs);
2167         g_hTextureImageView = VK_NULL_HANDLE;
2168     }
2169     if(g_hTextureImage != VK_NULL_HANDLE)
2170     {
2171         vmaDestroyImage(g_hAllocator, g_hTextureImage, g_hTextureImageAlloc);
2172         g_hTextureImage = VK_NULL_HANDLE;
2173     }
2174 
2175     if(g_hIndexBuffer != VK_NULL_HANDLE)
2176     {
2177         vmaDestroyBuffer(g_hAllocator, g_hIndexBuffer, g_hIndexBufferAlloc);
2178         g_hIndexBuffer = VK_NULL_HANDLE;
2179     }
2180     if(g_hVertexBuffer != VK_NULL_HANDLE)
2181     {
2182         vmaDestroyBuffer(g_hAllocator, g_hVertexBuffer, g_hVertexBufferAlloc);
2183         g_hVertexBuffer = VK_NULL_HANDLE;
2184     }
2185 
2186     if(g_hSampler != VK_NULL_HANDLE)
2187     {
2188         vkDestroySampler(g_hDevice, g_hSampler, g_Allocs);
2189         g_hSampler = VK_NULL_HANDLE;
2190     }
2191 
2192     if(g_ImmediateFence)
2193     {
2194         vkDestroyFence(g_hDevice, g_ImmediateFence, g_Allocs);
2195         g_ImmediateFence = VK_NULL_HANDLE;
2196     }
2197 
2198     for(size_t i = COMMAND_BUFFER_COUNT; i--; )
2199     {
2200         if(g_MainCommandBufferExecutedFances[i] != VK_NULL_HANDLE)
2201         {
2202             vkDestroyFence(g_hDevice, g_MainCommandBufferExecutedFances[i], g_Allocs);
2203             g_MainCommandBufferExecutedFances[i] = VK_NULL_HANDLE;
2204         }
2205     }
2206     if(g_MainCommandBuffers[0] != VK_NULL_HANDLE)
2207     {
2208         vkFreeCommandBuffers(g_hDevice, g_hCommandPool, COMMAND_BUFFER_COUNT, g_MainCommandBuffers);
2209         ZeroMemory(g_MainCommandBuffers, sizeof(g_MainCommandBuffers));
2210     }
2211     if(g_hTemporaryCommandBuffer != VK_NULL_HANDLE)
2212     {
2213         vkFreeCommandBuffers(g_hDevice, g_hCommandPool, 1, &g_hTemporaryCommandBuffer);
2214         g_hTemporaryCommandBuffer = VK_NULL_HANDLE;
2215     }
2216 
2217     if(g_hCommandPool != VK_NULL_HANDLE)
2218     {
2219         vkDestroyCommandPool(g_hDevice, g_hCommandPool, g_Allocs);
2220         g_hCommandPool = VK_NULL_HANDLE;
2221     }
2222 
2223     if(g_hAllocator != VK_NULL_HANDLE)
2224     {
2225         vmaDestroyAllocator(g_hAllocator);
2226         g_hAllocator = nullptr;
2227     }
2228 
2229     if(g_hDevice != VK_NULL_HANDLE)
2230     {
2231         vkDestroyDevice(g_hDevice, g_Allocs);
2232         g_hDevice = nullptr;
2233     }
2234 
2235     if(g_hSurface != VK_NULL_HANDLE)
2236     {
2237         vkDestroySurfaceKHR(g_hVulkanInstance, g_hSurface, g_Allocs);
2238         g_hSurface = VK_NULL_HANDLE;
2239     }
2240 }
2241 
PrintAllocatorStats()2242 static void PrintAllocatorStats()
2243 {
2244 #if VMA_STATS_STRING_ENABLED
2245     char* statsString = nullptr;
2246     vmaBuildStatsString(g_hAllocator, &statsString, true);
2247     printf("%s\n", statsString);
2248     vmaFreeStatsString(g_hAllocator, statsString);
2249 #endif
2250 }
2251 
RecreateSwapChain()2252 static void RecreateSwapChain()
2253 {
2254     vkDeviceWaitIdle(g_hDevice);
2255     DestroySwapchain(false);
2256     CreateSwapchain();
2257 }
2258 
DrawFrame()2259 static void DrawFrame()
2260 {
2261     // Begin main command buffer
2262     size_t cmdBufIndex = (g_NextCommandBufferIndex++) % COMMAND_BUFFER_COUNT;
2263     VkCommandBuffer hCommandBuffer = g_MainCommandBuffers[cmdBufIndex];
2264     VkFence hCommandBufferExecutedFence = g_MainCommandBufferExecutedFances[cmdBufIndex];
2265 
2266     ERR_GUARD_VULKAN( vkWaitForFences(g_hDevice, 1, &hCommandBufferExecutedFence, VK_TRUE, UINT64_MAX) );
2267     ERR_GUARD_VULKAN( vkResetFences(g_hDevice, 1, &hCommandBufferExecutedFence) );
2268 
2269     VkCommandBufferBeginInfo commandBufferBeginInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
2270     commandBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2271     ERR_GUARD_VULKAN( vkBeginCommandBuffer(hCommandBuffer, &commandBufferBeginInfo) );
2272 
2273     // Acquire swapchain image
2274     uint32_t imageIndex = 0;
2275     VkResult res = vkAcquireNextImageKHR(g_hDevice, g_hSwapchain, UINT64_MAX, g_hImageAvailableSemaphore, VK_NULL_HANDLE, &imageIndex);
2276     if(res == VK_ERROR_OUT_OF_DATE_KHR)
2277     {
2278         RecreateSwapChain();
2279         return;
2280     }
2281     else if(res < 0)
2282     {
2283         ERR_GUARD_VULKAN(res);
2284     }
2285 
2286     // Record geometry pass
2287 
2288     VkClearValue clearValues[2];
2289     ZeroMemory(clearValues, sizeof(clearValues));
2290     clearValues[0].color.float32[0] = 0.25f;
2291     clearValues[0].color.float32[1] = 0.25f;
2292     clearValues[0].color.float32[2] = 0.5f;
2293     clearValues[0].color.float32[3] = 1.0f;
2294     clearValues[1].depthStencil.depth = 1.0f;
2295 
2296     VkRenderPassBeginInfo renderPassBeginInfo = { VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO };
2297     renderPassBeginInfo.renderPass = g_hRenderPass;
2298     renderPassBeginInfo.framebuffer = g_Framebuffers[imageIndex];
2299     renderPassBeginInfo.renderArea.offset.x = 0;
2300     renderPassBeginInfo.renderArea.offset.y = 0;
2301     renderPassBeginInfo.renderArea.extent = g_Extent;
2302     renderPassBeginInfo.clearValueCount = (uint32_t)_countof(clearValues);
2303     renderPassBeginInfo.pClearValues = clearValues;
2304     vkCmdBeginRenderPass(hCommandBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
2305 
2306     vkCmdBindPipeline(
2307         hCommandBuffer,
2308         VK_PIPELINE_BIND_POINT_GRAPHICS,
2309         g_hPipeline);
2310 
2311     mat4 view = mat4::LookAt(
2312         vec3(0.f, 0.f, 0.f),
2313         vec3(0.f, -2.f, 4.f),
2314         vec3(0.f, 1.f, 0.f));
2315     mat4 proj = mat4::Perspective(
2316         1.0471975511966f, // 60 degrees
2317         (float)g_Extent.width / (float)g_Extent.height,
2318         0.1f,
2319         1000.f);
2320     mat4 viewProj = view * proj;
2321 
2322     vkCmdBindDescriptorSets(
2323         hCommandBuffer,
2324         VK_PIPELINE_BIND_POINT_GRAPHICS,
2325         g_hPipelineLayout,
2326         0,
2327         1,
2328         &g_hDescriptorSet,
2329         0,
2330         nullptr);
2331 
2332     float rotationAngle = (float)GetTickCount() * 0.001f * (float)PI * 0.2f;
2333     mat4 model = mat4::RotationY(rotationAngle);
2334 
2335     UniformBufferObject ubo = {};
2336     ubo.ModelViewProj = model * viewProj;
2337     vkCmdPushConstants(hCommandBuffer, g_hPipelineLayout, VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(UniformBufferObject), &ubo);
2338 
2339     VkBuffer vertexBuffers[] = { g_hVertexBuffer };
2340     VkDeviceSize offsets[] = { 0 };
2341     vkCmdBindVertexBuffers(hCommandBuffer, 0, 1, vertexBuffers, offsets);
2342 
2343     vkCmdBindIndexBuffer(hCommandBuffer, g_hIndexBuffer, 0, VK_INDEX_TYPE_UINT16);
2344 
2345     vkCmdDrawIndexed(hCommandBuffer, g_IndexCount, 1, 0, 0, 0);
2346 
2347     vkCmdEndRenderPass(hCommandBuffer);
2348 
2349     vkEndCommandBuffer(hCommandBuffer);
2350 
2351     // Submit command buffer
2352 
2353     VkSemaphore submitWaitSemaphores[] = { g_hImageAvailableSemaphore };
2354     VkPipelineStageFlags submitWaitStages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
2355     VkSemaphore submitSignalSemaphores[] = { g_hRenderFinishedSemaphore };
2356     VkSubmitInfo submitInfo = { VK_STRUCTURE_TYPE_SUBMIT_INFO };
2357     submitInfo.waitSemaphoreCount = 1;
2358     submitInfo.pWaitSemaphores = submitWaitSemaphores;
2359     submitInfo.pWaitDstStageMask = submitWaitStages;
2360     submitInfo.commandBufferCount = 1;
2361     submitInfo.pCommandBuffers = &hCommandBuffer;
2362     submitInfo.signalSemaphoreCount = _countof(submitSignalSemaphores);
2363     submitInfo.pSignalSemaphores = submitSignalSemaphores;
2364     ERR_GUARD_VULKAN( vkQueueSubmit(g_hGraphicsQueue, 1, &submitInfo, hCommandBufferExecutedFence) );
2365 
2366     VkSemaphore presentWaitSemaphores[] = { g_hRenderFinishedSemaphore };
2367 
2368     VkSwapchainKHR swapchains[] = { g_hSwapchain };
2369     VkPresentInfoKHR presentInfo = { VK_STRUCTURE_TYPE_PRESENT_INFO_KHR };
2370     presentInfo.waitSemaphoreCount = _countof(presentWaitSemaphores);
2371     presentInfo.pWaitSemaphores = presentWaitSemaphores;
2372     presentInfo.swapchainCount = 1;
2373     presentInfo.pSwapchains = swapchains;
2374     presentInfo.pImageIndices = &imageIndex;
2375     presentInfo.pResults = nullptr;
2376     res = vkQueuePresentKHR(g_hPresentQueue, &presentInfo);
2377     if(res == VK_ERROR_OUT_OF_DATE_KHR)
2378     {
2379         RecreateSwapChain();
2380     }
2381     else
2382         ERR_GUARD_VULKAN(res);
2383 }
2384 
HandlePossibleSizeChange()2385 static void HandlePossibleSizeChange()
2386 {
2387     RECT clientRect;
2388     GetClientRect(g_hWnd, &clientRect);
2389     LONG newSizeX = clientRect.right - clientRect.left;
2390     LONG newSizeY = clientRect.bottom - clientRect.top;
2391     if((newSizeX > 0) &&
2392         (newSizeY > 0) &&
2393         ((newSizeX != g_SizeX) || (newSizeY != g_SizeY)))
2394     {
2395         g_SizeX = newSizeX;
2396         g_SizeY = newSizeY;
2397 
2398         RecreateSwapChain();
2399     }
2400 }
2401 
2402 #define CATCH_PRINT_ERROR(extraCatchCode) \
2403     catch(const std::exception& ex) \
2404     { \
2405         fwprintf(stderr, L"ERROR: %hs\n", ex.what()); \
2406         extraCatchCode \
2407     } \
2408     catch(...) \
2409     { \
2410         fwprintf(stderr, L"UNKNOWN ERROR.\n"); \
2411         extraCatchCode \
2412     }
2413 
WndProc(HWND hWnd,UINT msg,WPARAM wParam,LPARAM lParam)2414 static LRESULT WINAPI WndProc(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam)
2415 {
2416     switch(msg)
2417     {
2418     case WM_CREATE:
2419         // This is intentionally assigned here because we are now inside CreateWindow, before it returns.
2420         g_hWnd = hWnd;
2421         try
2422         {
2423             InitializeApplication();
2424         }
2425         CATCH_PRINT_ERROR(return -1;)
2426         //PrintAllocatorStats();
2427         return 0;
2428 
2429     case WM_DESTROY:
2430         try
2431         {
2432             FinalizeApplication();
2433         }
2434         CATCH_PRINT_ERROR(;)
2435         PostQuitMessage(0);
2436         return 0;
2437 
2438     // This prevents app from freezing when left Alt is pressed
2439     // (which normally enters modal menu loop).
2440     case WM_SYSKEYDOWN:
2441     case WM_SYSKEYUP:
2442         return 0;
2443 
2444     case WM_SIZE:
2445         if((wParam == SIZE_MAXIMIZED) || (wParam == SIZE_RESTORED))
2446         {
2447             try
2448             {
2449                 HandlePossibleSizeChange();
2450             }
2451             CATCH_PRINT_ERROR(DestroyWindow(hWnd);)
2452         }
2453         return 0;
2454 
2455     case WM_EXITSIZEMOVE:
2456         try
2457         {
2458             HandlePossibleSizeChange();
2459         }
2460         CATCH_PRINT_ERROR(DestroyWindow(hWnd);)
2461         return 0;
2462 
2463     case WM_KEYDOWN:
2464         switch(wParam)
2465         {
2466         case VK_ESCAPE:
2467             PostMessage(hWnd, WM_CLOSE, 0, 0);
2468             break;
2469         case 'T':
2470             try
2471             {
2472                 Test();
2473             }
2474             CATCH_PRINT_ERROR(;)
2475             break;
2476         case 'S':
2477             try
2478             {
2479                 if(g_SparseBindingEnabled)
2480                 {
2481                     try
2482                     {
2483                         TestSparseBinding();
2484                     }
2485                     CATCH_PRINT_ERROR(;)
2486                 }
2487                 else
2488                 {
2489                     printf("Sparse binding not supported.\n");
2490                 }
2491             }
2492             catch(const std::exception& ex)
2493             {
2494                 printf("ERROR: %s\n", ex.what());
2495             }
2496             break;
2497         }
2498         return 0;
2499 
2500     default:
2501         break;
2502     }
2503 
2504     return DefWindowProc(hWnd, msg, wParam, lParam);
2505 }
2506 
PrintLogo()2507 static void PrintLogo()
2508 {
2509     wprintf(L"%s\n", APP_TITLE_W);
2510 }
2511 
PrintHelp()2512 static void PrintHelp()
2513 {
2514     wprintf(
2515         L"Command line syntax:\n"
2516         L"-h, --Help   Print this information\n"
2517         L"-l, --List   Print list of GPUs\n"
2518         L"-g S, --GPU S   Select GPU with name containing S\n"
2519         L"-i N, --GPUIndex N   Select GPU index N\n"
2520     );
2521 }
2522 
MainWindow()2523 int MainWindow()
2524 {
2525     WNDCLASSEX wndClassDesc = { sizeof(WNDCLASSEX) };
2526     wndClassDesc.style = CS_VREDRAW | CS_HREDRAW | CS_DBLCLKS;
2527     wndClassDesc.hbrBackground = NULL;
2528     wndClassDesc.hCursor = LoadCursor(NULL, IDC_CROSS);
2529     wndClassDesc.hIcon = LoadIcon(NULL, IDI_APPLICATION);
2530     wndClassDesc.hInstance = g_hAppInstance;
2531     wndClassDesc.lpfnWndProc = WndProc;
2532     wndClassDesc.lpszClassName = WINDOW_CLASS_NAME;
2533 
2534     const ATOM hWndClass = RegisterClassEx(&wndClassDesc);
2535     assert(hWndClass);
2536 
2537     const DWORD style = WS_VISIBLE | WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_MINIMIZEBOX | WS_MAXIMIZEBOX | WS_THICKFRAME;
2538     const DWORD exStyle = 0;
2539 
2540     RECT rect = { 0, 0, g_SizeX, g_SizeY };
2541     AdjustWindowRectEx(&rect, style, FALSE, exStyle);
2542 
2543     CreateWindowEx(
2544         exStyle, WINDOW_CLASS_NAME, APP_TITLE_W, style,
2545         CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT,
2546         NULL, NULL, g_hAppInstance, NULL);
2547 
2548     MSG msg;
2549     for(;;)
2550     {
2551         if(PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
2552         {
2553             if(msg.message == WM_QUIT)
2554                 break;
2555             TranslateMessage(&msg);
2556             DispatchMessage(&msg);
2557         }
2558         if(g_hDevice != VK_NULL_HANDLE)
2559             DrawFrame();
2560     }
2561 
2562     return (int)msg.wParam;;
2563 }
2564 
Main2(int argc,wchar_t ** argv)2565 int Main2(int argc, wchar_t** argv)
2566 {
2567     PrintLogo();
2568 
2569     if(!g_CommandLineParameters.Parse(argc, argv))
2570     {
2571         wprintf(L"ERROR: Invalid command line syntax.\n");
2572         PrintHelp();
2573         return (int)ExitCode::CommandLineError;
2574     }
2575 
2576     if(g_CommandLineParameters.m_Help)
2577     {
2578         PrintHelp();
2579         return (int)ExitCode::Help;
2580     }
2581 
2582     VulkanUsage vulkanUsage;
2583     vulkanUsage.Init();
2584 
2585     if(g_CommandLineParameters.m_List)
2586     {
2587         vulkanUsage.PrintPhysicalDeviceList();
2588         return (int)ExitCode::GPUList;
2589     }
2590 
2591     g_hPhysicalDevice = vulkanUsage.SelectPhysicalDevice(g_CommandLineParameters.m_GPUSelection);
2592     TEST(g_hPhysicalDevice);
2593 
2594     return MainWindow();
2595 }
2596 
wmain(int argc,wchar_t ** argv)2597 int wmain(int argc, wchar_t** argv)
2598 {
2599     try
2600     {
2601         return Main2(argc, argv);
2602         TEST(g_CpuAllocCount.load() == 0);
2603     }
2604     CATCH_PRINT_ERROR(return (int)ExitCode::RuntimeError;)
2605 }
2606 
2607 #else // #ifdef _WIN32
2608 
2609 #include "VmaUsage.h"
2610 
main()2611 int main()
2612 {
2613 }
2614 
2615 #endif // #ifdef _WIN32
2616