1 //
2 // Copyright (c) 2017-2020 Advanced Micro Devices, Inc. All rights reserved.
3 //
4 // Permission is hereby granted, free of charge, to any person obtaining a copy
5 // of this software and associated documentation files (the "Software"), to deal
6 // in the Software without restriction, including without limitation the rights
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 // copies of the Software, and to permit persons to whom the Software is
9 // furnished to do so, subject to the following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included in
12 // all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20 // THE SOFTWARE.
21 //
22
23 #ifdef _WIN32
24
25 #include "SparseBindingTest.h"
26 #include "Tests.h"
27 #include "VmaUsage.h"
28 #include "Common.h"
29 #include <atomic>
30
31 static const char* const SHADER_PATH1 = "./";
32 static const char* const SHADER_PATH2 = "../bin/";
33 static const wchar_t* const WINDOW_CLASS_NAME = L"VULKAN_MEMORY_ALLOCATOR_SAMPLE";
34 static const char* const VALIDATION_LAYER_NAME = "VK_LAYER_LUNARG_standard_validation";
35 static const char* const APP_TITLE_A = "Vulkan Memory Allocator Sample 2.4.0";
36 static const wchar_t* const APP_TITLE_W = L"Vulkan Memory Allocator Sample 2.4.0";
37
38 static const bool VSYNC = true;
39 static const uint32_t COMMAND_BUFFER_COUNT = 2;
40 static void* const CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA = (void*)(intptr_t)43564544;
41 static const bool USE_CUSTOM_CPU_ALLOCATION_CALLBACKS = true;
42
43 VkPhysicalDevice g_hPhysicalDevice;
44 VkDevice g_hDevice;
45 VmaAllocator g_hAllocator;
46 VkInstance g_hVulkanInstance;
47 bool g_MemoryAliasingWarningEnabled = true;
48
49 bool g_EnableValidationLayer = true;
50 bool VK_KHR_get_memory_requirements2_enabled = false;
51 bool VK_KHR_get_physical_device_properties2_enabled = false;
52 bool VK_KHR_dedicated_allocation_enabled = false;
53 bool VK_KHR_bind_memory2_enabled = false;
54 bool VK_EXT_memory_budget_enabled = false;
55 bool VK_AMD_device_coherent_memory_enabled = false;
56 bool VK_EXT_buffer_device_address_enabled = false;
57 bool VK_KHR_buffer_device_address_enabled = false;
58 bool g_SparseBindingEnabled = false;
59 bool g_BufferDeviceAddressEnabled = false;
60
61 // # Pointers to functions from extensions
62 PFN_vkGetBufferDeviceAddressEXT g_vkGetBufferDeviceAddressEXT;
63
64 static HINSTANCE g_hAppInstance;
65 static HWND g_hWnd;
66 static LONG g_SizeX = 1280, g_SizeY = 720;
67 static VkSurfaceKHR g_hSurface;
68 static VkQueue g_hPresentQueue;
69 static VkSurfaceFormatKHR g_SurfaceFormat;
70 static VkExtent2D g_Extent;
71 static VkSwapchainKHR g_hSwapchain;
72 static std::vector<VkImage> g_SwapchainImages;
73 static std::vector<VkImageView> g_SwapchainImageViews;
74 static std::vector<VkFramebuffer> g_Framebuffers;
75 static VkCommandPool g_hCommandPool;
76 static VkCommandBuffer g_MainCommandBuffers[COMMAND_BUFFER_COUNT];
77 static VkFence g_MainCommandBufferExecutedFances[COMMAND_BUFFER_COUNT];
78 VkFence g_ImmediateFence;
79 static uint32_t g_NextCommandBufferIndex;
80 static VkSemaphore g_hImageAvailableSemaphore;
81 static VkSemaphore g_hRenderFinishedSemaphore;
82 static uint32_t g_GraphicsQueueFamilyIndex = UINT_MAX;
83 static uint32_t g_PresentQueueFamilyIndex = UINT_MAX;
84 static uint32_t g_SparseBindingQueueFamilyIndex = UINT_MAX;
85 static VkDescriptorSetLayout g_hDescriptorSetLayout;
86 static VkDescriptorPool g_hDescriptorPool;
87 static VkDescriptorSet g_hDescriptorSet; // Automatically destroyed with m_DescriptorPool.
88 static VkSampler g_hSampler;
89 static VkFormat g_DepthFormat;
90 static VkImage g_hDepthImage;
91 static VmaAllocation g_hDepthImageAlloc;
92 static VkImageView g_hDepthImageView;
93
94 static VkSurfaceCapabilitiesKHR g_SurfaceCapabilities;
95 static std::vector<VkSurfaceFormatKHR> g_SurfaceFormats;
96 static std::vector<VkPresentModeKHR> g_PresentModes;
97
98 static PFN_vkCreateDebugReportCallbackEXT g_pvkCreateDebugReportCallbackEXT;
99 static PFN_vkDebugReportMessageEXT g_pvkDebugReportMessageEXT;
100 static PFN_vkDestroyDebugReportCallbackEXT g_pvkDestroyDebugReportCallbackEXT;
101 static VkDebugReportCallbackEXT g_hCallback;
102
103 static VkQueue g_hGraphicsQueue;
104 VkQueue g_hSparseBindingQueue;
105 VkCommandBuffer g_hTemporaryCommandBuffer;
106
107 static VkPipelineLayout g_hPipelineLayout;
108 static VkRenderPass g_hRenderPass;
109 static VkPipeline g_hPipeline;
110
111 static VkBuffer g_hVertexBuffer;
112 static VmaAllocation g_hVertexBufferAlloc;
113 static VkBuffer g_hIndexBuffer;
114 static VmaAllocation g_hIndexBufferAlloc;
115 static uint32_t g_VertexCount;
116 static uint32_t g_IndexCount;
117
118 static VkImage g_hTextureImage;
119 static VmaAllocation g_hTextureImageAlloc;
120 static VkImageView g_hTextureImageView;
121
122 static std::atomic_uint32_t g_CpuAllocCount;
123
CustomCpuAllocation(void * pUserData,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)124 static void* CustomCpuAllocation(
125 void* pUserData, size_t size, size_t alignment,
126 VkSystemAllocationScope allocationScope)
127 {
128 assert(pUserData == CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA);
129 void* const result = _aligned_malloc(size, alignment);
130 if(result)
131 {
132 ++g_CpuAllocCount;
133 }
134 return result;
135 }
136
CustomCpuReallocation(void * pUserData,void * pOriginal,size_t size,size_t alignment,VkSystemAllocationScope allocationScope)137 static void* CustomCpuReallocation(
138 void* pUserData, void* pOriginal, size_t size, size_t alignment,
139 VkSystemAllocationScope allocationScope)
140 {
141 assert(pUserData == CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA);
142 void* const result = _aligned_realloc(pOriginal, size, alignment);
143 if(pOriginal && !result)
144 {
145 --g_CpuAllocCount;
146 }
147 else if(!pOriginal && result)
148 {
149 ++g_CpuAllocCount;
150 }
151 return result;
152 }
153
CustomCpuFree(void * pUserData,void * pMemory)154 static void CustomCpuFree(void* pUserData, void* pMemory)
155 {
156 assert(pUserData == CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA);
157 if(pMemory)
158 {
159 const uint32_t oldAllocCount = g_CpuAllocCount.fetch_sub(1);
160 TEST(oldAllocCount > 0);
161 _aligned_free(pMemory);
162 }
163 }
164
165 static const VkAllocationCallbacks g_CpuAllocationCallbacks = {
166 CUSTOM_CPU_ALLOCATION_CALLBACK_USER_DATA, // pUserData
167 &CustomCpuAllocation, // pfnAllocation
168 &CustomCpuReallocation, // pfnReallocation
169 &CustomCpuFree // pfnFree
170 };
171
172 const VkAllocationCallbacks* g_Allocs;
173
BeginSingleTimeCommands()174 void BeginSingleTimeCommands()
175 {
176 VkCommandBufferBeginInfo cmdBufBeginInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
177 cmdBufBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
178 ERR_GUARD_VULKAN( vkBeginCommandBuffer(g_hTemporaryCommandBuffer, &cmdBufBeginInfo) );
179 }
180
EndSingleTimeCommands()181 void EndSingleTimeCommands()
182 {
183 ERR_GUARD_VULKAN( vkEndCommandBuffer(g_hTemporaryCommandBuffer) );
184
185 VkSubmitInfo submitInfo = { VK_STRUCTURE_TYPE_SUBMIT_INFO };
186 submitInfo.commandBufferCount = 1;
187 submitInfo.pCommandBuffers = &g_hTemporaryCommandBuffer;
188
189 ERR_GUARD_VULKAN( vkQueueSubmit(g_hGraphicsQueue, 1, &submitInfo, VK_NULL_HANDLE) );
190 ERR_GUARD_VULKAN( vkQueueWaitIdle(g_hGraphicsQueue) );
191 }
192
LoadShader(std::vector<char> & out,const char * fileName)193 void LoadShader(std::vector<char>& out, const char* fileName)
194 {
195 std::ifstream file(std::string(SHADER_PATH1) + fileName, std::ios::ate | std::ios::binary);
196 if(file.is_open() == false)
197 file.open(std::string(SHADER_PATH2) + fileName, std::ios::ate | std::ios::binary);
198 assert(file.is_open());
199 size_t fileSize = (size_t)file.tellg();
200 if(fileSize > 0)
201 {
202 out.resize(fileSize);
203 file.seekg(0);
204 file.read(out.data(), fileSize);
205 file.close();
206 }
207 else
208 out.clear();
209 }
210
MyDebugReportCallback(VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const char * pLayerPrefix,const char * pMessage,void * pUserData)211 VKAPI_ATTR VkBool32 VKAPI_CALL MyDebugReportCallback(
212 VkDebugReportFlagsEXT flags,
213 VkDebugReportObjectTypeEXT objectType,
214 uint64_t object,
215 size_t location,
216 int32_t messageCode,
217 const char* pLayerPrefix,
218 const char* pMessage,
219 void* pUserData)
220 {
221 // "Non-linear image 0xebc91 is aliased with linear buffer 0xeb8e4 which may indicate a bug."
222 if(!g_MemoryAliasingWarningEnabled && flags == VK_DEBUG_REPORT_WARNING_BIT_EXT &&
223 (strstr(pMessage, " is aliased with non-linear ") || strstr(pMessage, " is aliased with linear ")))
224 {
225 return VK_FALSE;
226 }
227
228 // Ignoring because when VK_KHR_dedicated_allocation extension is enabled,
229 // vkGetBufferMemoryRequirements2KHR function is used instead, while Validation
230 // Layer seems to be unaware of it.
231 if (strstr(pMessage, "but vkGetBufferMemoryRequirements() has not been called on that buffer") != nullptr)
232 {
233 return VK_FALSE;
234 }
235 if (strstr(pMessage, "but vkGetImageMemoryRequirements() has not been called on that image") != nullptr)
236 {
237 return VK_FALSE;
238 }
239
240 /*
241 "Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used."
242 Ignoring because we map entire VkDeviceMemory blocks, where different types of
243 images and buffers may end up together, especially on GPUs with unified memory
244 like Intel.
245 */
246 if(strstr(pMessage, "Mapping an image with layout") != nullptr &&
247 strstr(pMessage, "can result in undefined behavior if this memory is used by the device") != nullptr)
248 {
249 return VK_FALSE;
250 }
251
252 switch(flags)
253 {
254 case VK_DEBUG_REPORT_WARNING_BIT_EXT:
255 SetConsoleColor(CONSOLE_COLOR::WARNING);
256 break;
257 case VK_DEBUG_REPORT_ERROR_BIT_EXT:
258 SetConsoleColor(CONSOLE_COLOR::ERROR_);
259 break;
260 default:
261 SetConsoleColor(CONSOLE_COLOR::INFO);
262 }
263
264 printf("%s \xBA %s\n", pLayerPrefix, pMessage);
265
266 SetConsoleColor(CONSOLE_COLOR::NORMAL);
267
268 if(flags == VK_DEBUG_REPORT_WARNING_BIT_EXT ||
269 flags == VK_DEBUG_REPORT_ERROR_BIT_EXT)
270 {
271 OutputDebugStringA(pMessage);
272 OutputDebugStringA("\n");
273 }
274
275 return VK_FALSE;
276 }
277
ChooseSurfaceFormat()278 static VkSurfaceFormatKHR ChooseSurfaceFormat()
279 {
280 assert(!g_SurfaceFormats.empty());
281
282 if((g_SurfaceFormats.size() == 1) && (g_SurfaceFormats[0].format == VK_FORMAT_UNDEFINED))
283 {
284 VkSurfaceFormatKHR result = { VK_FORMAT_B8G8R8A8_UNORM, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR };
285 return result;
286 }
287
288 for(const auto& format : g_SurfaceFormats)
289 {
290 if((format.format == VK_FORMAT_B8G8R8A8_UNORM) &&
291 (format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR))
292 {
293 return format;
294 }
295 }
296
297 return g_SurfaceFormats[0];
298 }
299
ChooseSwapPresentMode()300 VkPresentModeKHR ChooseSwapPresentMode()
301 {
302 VkPresentModeKHR preferredMode = VSYNC ? VK_PRESENT_MODE_MAILBOX_KHR : VK_PRESENT_MODE_IMMEDIATE_KHR;
303
304 if(std::find(g_PresentModes.begin(), g_PresentModes.end(), preferredMode) !=
305 g_PresentModes.end())
306 {
307 return preferredMode;
308 }
309
310 return VK_PRESENT_MODE_FIFO_KHR;
311 }
312
ChooseSwapExtent()313 static VkExtent2D ChooseSwapExtent()
314 {
315 if(g_SurfaceCapabilities.currentExtent.width != UINT_MAX)
316 return g_SurfaceCapabilities.currentExtent;
317
318 VkExtent2D result = {
319 std::max(g_SurfaceCapabilities.minImageExtent.width,
320 std::min(g_SurfaceCapabilities.maxImageExtent.width, (uint32_t)g_SizeX)),
321 std::max(g_SurfaceCapabilities.minImageExtent.height,
322 std::min(g_SurfaceCapabilities.maxImageExtent.height, (uint32_t)g_SizeY)) };
323 return result;
324 }
325
326 struct Vertex
327 {
328 float pos[3];
329 float color[3];
330 float texCoord[2];
331 };
332
CreateMesh()333 static void CreateMesh()
334 {
335 assert(g_hAllocator);
336
337 static Vertex vertices[] = {
338 // -X
339 { { -1.f, -1.f, -1.f}, {1.0f, 0.0f, 0.0f}, {0.f, 0.f} },
340 { { -1.f, -1.f, 1.f}, {1.0f, 0.0f, 0.0f}, {1.f, 0.f} },
341 { { -1.f, 1.f, -1.f}, {1.0f, 0.0f, 0.0f}, {0.f, 1.f} },
342 { { -1.f, 1.f, 1.f}, {1.0f, 0.0f, 0.0f}, {1.f, 1.f} },
343 // +X
344 { { 1.f, -1.f, 1.f}, {0.0f, 1.0f, 0.0f}, {0.f, 0.f} },
345 { { 1.f, -1.f, -1.f}, {0.0f, 1.0f, 0.0f}, {1.f, 0.f} },
346 { { 1.f, 1.f, 1.f}, {0.0f, 1.0f, 0.0f}, {0.f, 1.f} },
347 { { 1.f, 1.f, -1.f}, {0.0f, 1.0f, 0.0f}, {1.f, 1.f} },
348 // -Z
349 { { 1.f, -1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {0.f, 0.f} },
350 { {-1.f, -1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {1.f, 0.f} },
351 { { 1.f, 1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {0.f, 1.f} },
352 { {-1.f, 1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {1.f, 1.f} },
353 // +Z
354 { {-1.f, -1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {0.f, 0.f} },
355 { { 1.f, -1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {1.f, 0.f} },
356 { {-1.f, 1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {0.f, 1.f} },
357 { { 1.f, 1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {1.f, 1.f} },
358 // -Y
359 { {-1.f, -1.f, -1.f}, {0.0f, 1.0f, 1.0f}, {0.f, 0.f} },
360 { { 1.f, -1.f, -1.f}, {0.0f, 1.0f, 1.0f}, {1.f, 0.f} },
361 { {-1.f, -1.f, 1.f}, {0.0f, 1.0f, 1.0f}, {0.f, 1.f} },
362 { { 1.f, -1.f, 1.f}, {0.0f, 1.0f, 1.0f}, {1.f, 1.f} },
363 // +Y
364 { { 1.f, 1.f, -1.f}, {1.0f, 0.0f, 1.0f}, {0.f, 0.f} },
365 { {-1.f, 1.f, -1.f}, {1.0f, 0.0f, 1.0f}, {1.f, 0.f} },
366 { { 1.f, 1.f, 1.f}, {1.0f, 0.0f, 1.0f}, {0.f, 1.f} },
367 { {-1.f, 1.f, 1.f}, {1.0f, 0.0f, 1.0f}, {1.f, 1.f} },
368 };
369 static uint16_t indices[] = {
370 0, 1, 2, 3, USHRT_MAX,
371 4, 5, 6, 7, USHRT_MAX,
372 8, 9, 10, 11, USHRT_MAX,
373 12, 13, 14, 15, USHRT_MAX,
374 16, 17, 18, 19, USHRT_MAX,
375 20, 21, 22, 23, USHRT_MAX,
376 };
377
378 size_t vertexBufferSize = sizeof(Vertex) * _countof(vertices);
379 size_t indexBufferSize = sizeof(uint16_t) * _countof(indices);
380 g_IndexCount = (uint32_t)_countof(indices);
381
382 // Create vertex buffer
383
384 VkBufferCreateInfo vbInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
385 vbInfo.size = vertexBufferSize;
386 vbInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
387 vbInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
388
389 VmaAllocationCreateInfo vbAllocCreateInfo = {};
390 vbAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
391 vbAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
392
393 VkBuffer stagingVertexBuffer = VK_NULL_HANDLE;
394 VmaAllocation stagingVertexBufferAlloc = VK_NULL_HANDLE;
395 VmaAllocationInfo stagingVertexBufferAllocInfo = {};
396 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &vbInfo, &vbAllocCreateInfo, &stagingVertexBuffer, &stagingVertexBufferAlloc, &stagingVertexBufferAllocInfo) );
397
398 memcpy(stagingVertexBufferAllocInfo.pMappedData, vertices, vertexBufferSize);
399
400 // No need to flush stagingVertexBuffer memory because CPU_ONLY memory is always HOST_COHERENT.
401
402 vbInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
403 vbAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
404 vbAllocCreateInfo.flags = 0;
405 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &vbInfo, &vbAllocCreateInfo, &g_hVertexBuffer, &g_hVertexBufferAlloc, nullptr) );
406
407 // Create index buffer
408
409 VkBufferCreateInfo ibInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
410 ibInfo.size = indexBufferSize;
411 ibInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
412 ibInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
413
414 VmaAllocationCreateInfo ibAllocCreateInfo = {};
415 ibAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
416 ibAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
417
418 VkBuffer stagingIndexBuffer = VK_NULL_HANDLE;
419 VmaAllocation stagingIndexBufferAlloc = VK_NULL_HANDLE;
420 VmaAllocationInfo stagingIndexBufferAllocInfo = {};
421 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &ibInfo, &ibAllocCreateInfo, &stagingIndexBuffer, &stagingIndexBufferAlloc, &stagingIndexBufferAllocInfo) );
422
423 memcpy(stagingIndexBufferAllocInfo.pMappedData, indices, indexBufferSize);
424
425 // No need to flush stagingIndexBuffer memory because CPU_ONLY memory is always HOST_COHERENT.
426
427 ibInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
428 ibAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
429 ibAllocCreateInfo.flags = 0;
430 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &ibInfo, &ibAllocCreateInfo, &g_hIndexBuffer, &g_hIndexBufferAlloc, nullptr) );
431
432 // Copy buffers
433
434 BeginSingleTimeCommands();
435
436 VkBufferCopy vbCopyRegion = {};
437 vbCopyRegion.srcOffset = 0;
438 vbCopyRegion.dstOffset = 0;
439 vbCopyRegion.size = vbInfo.size;
440 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingVertexBuffer, g_hVertexBuffer, 1, &vbCopyRegion);
441
442 VkBufferCopy ibCopyRegion = {};
443 ibCopyRegion.srcOffset = 0;
444 ibCopyRegion.dstOffset = 0;
445 ibCopyRegion.size = ibInfo.size;
446 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingIndexBuffer, g_hIndexBuffer, 1, &ibCopyRegion);
447
448 EndSingleTimeCommands();
449
450 vmaDestroyBuffer(g_hAllocator, stagingIndexBuffer, stagingIndexBufferAlloc);
451 vmaDestroyBuffer(g_hAllocator, stagingVertexBuffer, stagingVertexBufferAlloc);
452 }
453
CreateTexture(uint32_t sizeX,uint32_t sizeY)454 static void CreateTexture(uint32_t sizeX, uint32_t sizeY)
455 {
456 // Create staging buffer.
457
458 const VkDeviceSize imageSize = sizeX * sizeY * 4;
459
460 VkBufferCreateInfo stagingBufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
461 stagingBufInfo.size = imageSize;
462 stagingBufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
463
464 VmaAllocationCreateInfo stagingBufAllocCreateInfo = {};
465 stagingBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
466 stagingBufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
467
468 VkBuffer stagingBuf = VK_NULL_HANDLE;
469 VmaAllocation stagingBufAlloc = VK_NULL_HANDLE;
470 VmaAllocationInfo stagingBufAllocInfo = {};
471 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &stagingBufInfo, &stagingBufAllocCreateInfo, &stagingBuf, &stagingBufAlloc, &stagingBufAllocInfo) );
472
473 char* const pImageData = (char*)stagingBufAllocInfo.pMappedData;
474 uint8_t* pRowData = (uint8_t*)pImageData;
475 for(uint32_t y = 0; y < sizeY; ++y)
476 {
477 uint32_t* pPixelData = (uint32_t*)pRowData;
478 for(uint32_t x = 0; x < sizeY; ++x)
479 {
480 *pPixelData =
481 ((x & 0x18) == 0x08 ? 0x000000FF : 0x00000000) |
482 ((x & 0x18) == 0x10 ? 0x0000FFFF : 0x00000000) |
483 ((y & 0x18) == 0x08 ? 0x0000FF00 : 0x00000000) |
484 ((y & 0x18) == 0x10 ? 0x00FF0000 : 0x00000000);
485 ++pPixelData;
486 }
487 pRowData += sizeX * 4;
488 }
489
490 // No need to flush stagingImage memory because CPU_ONLY memory is always HOST_COHERENT.
491
492 // Create g_hTextureImage in GPU memory.
493
494 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
495 imageInfo.imageType = VK_IMAGE_TYPE_2D;
496 imageInfo.extent.width = sizeX;
497 imageInfo.extent.height = sizeY;
498 imageInfo.extent.depth = 1;
499 imageInfo.mipLevels = 1;
500 imageInfo.arrayLayers = 1;
501 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
502 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
503 imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
504 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
505 imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
506 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
507 imageInfo.flags = 0;
508
509 VmaAllocationCreateInfo imageAllocCreateInfo = {};
510 imageAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
511
512 ERR_GUARD_VULKAN( vmaCreateImage(g_hAllocator, &imageInfo, &imageAllocCreateInfo, &g_hTextureImage, &g_hTextureImageAlloc, nullptr) );
513
514 // Transition image layouts, copy image.
515
516 BeginSingleTimeCommands();
517
518 VkImageMemoryBarrier imgMemBarrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
519 imgMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
520 imgMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
521 imgMemBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
522 imgMemBarrier.subresourceRange.baseMipLevel = 0;
523 imgMemBarrier.subresourceRange.levelCount = 1;
524 imgMemBarrier.subresourceRange.baseArrayLayer = 0;
525 imgMemBarrier.subresourceRange.layerCount = 1;
526 imgMemBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
527 imgMemBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
528 imgMemBarrier.image = g_hTextureImage;
529 imgMemBarrier.srcAccessMask = 0;
530 imgMemBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
531
532 vkCmdPipelineBarrier(
533 g_hTemporaryCommandBuffer,
534 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
535 VK_PIPELINE_STAGE_TRANSFER_BIT,
536 0,
537 0, nullptr,
538 0, nullptr,
539 1, &imgMemBarrier);
540
541 VkBufferImageCopy region = {};
542 region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
543 region.imageSubresource.layerCount = 1;
544 region.imageExtent.width = sizeX;
545 region.imageExtent.height = sizeY;
546 region.imageExtent.depth = 1;
547
548 vkCmdCopyBufferToImage(g_hTemporaryCommandBuffer, stagingBuf, g_hTextureImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion);
549
550 imgMemBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
551 imgMemBarrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
552 imgMemBarrier.image = g_hTextureImage;
553 imgMemBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
554 imgMemBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
555
556 vkCmdPipelineBarrier(
557 g_hTemporaryCommandBuffer,
558 VK_PIPELINE_STAGE_TRANSFER_BIT,
559 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
560 0,
561 0, nullptr,
562 0, nullptr,
563 1, &imgMemBarrier);
564
565 EndSingleTimeCommands();
566
567 vmaDestroyBuffer(g_hAllocator, stagingBuf, stagingBufAlloc);
568
569 // Create ImageView
570
571 VkImageViewCreateInfo textureImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
572 textureImageViewInfo.image = g_hTextureImage;
573 textureImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
574 textureImageViewInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
575 textureImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
576 textureImageViewInfo.subresourceRange.baseMipLevel = 0;
577 textureImageViewInfo.subresourceRange.levelCount = 1;
578 textureImageViewInfo.subresourceRange.baseArrayLayer = 0;
579 textureImageViewInfo.subresourceRange.layerCount = 1;
580 ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &textureImageViewInfo, g_Allocs, &g_hTextureImageView) );
581 }
582
583 struct UniformBufferObject
584 {
585 mat4 ModelViewProj;
586 };
587
RegisterDebugCallbacks()588 static void RegisterDebugCallbacks()
589 {
590 g_pvkCreateDebugReportCallbackEXT =
591 reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>
592 (vkGetInstanceProcAddr(g_hVulkanInstance, "vkCreateDebugReportCallbackEXT"));
593 g_pvkDebugReportMessageEXT =
594 reinterpret_cast<PFN_vkDebugReportMessageEXT>
595 (vkGetInstanceProcAddr(g_hVulkanInstance, "vkDebugReportMessageEXT"));
596 g_pvkDestroyDebugReportCallbackEXT =
597 reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>
598 (vkGetInstanceProcAddr(g_hVulkanInstance, "vkDestroyDebugReportCallbackEXT"));
599 assert(g_pvkCreateDebugReportCallbackEXT);
600 assert(g_pvkDebugReportMessageEXT);
601 assert(g_pvkDestroyDebugReportCallbackEXT);
602
603 VkDebugReportCallbackCreateInfoEXT callbackCreateInfo = { VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT };
604 callbackCreateInfo.flags = //VK_DEBUG_REPORT_INFORMATION_BIT_EXT |
605 VK_DEBUG_REPORT_ERROR_BIT_EXT |
606 VK_DEBUG_REPORT_WARNING_BIT_EXT |
607 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT /*|
608 VK_DEBUG_REPORT_DEBUG_BIT_EXT*/;
609 callbackCreateInfo.pfnCallback = &MyDebugReportCallback;
610
611 ERR_GUARD_VULKAN( g_pvkCreateDebugReportCallbackEXT(g_hVulkanInstance, &callbackCreateInfo, g_Allocs, &g_hCallback) );
612 }
613
IsLayerSupported(const VkLayerProperties * pProps,size_t propCount,const char * pLayerName)614 static bool IsLayerSupported(const VkLayerProperties* pProps, size_t propCount, const char* pLayerName)
615 {
616 const VkLayerProperties* propsEnd = pProps + propCount;
617 return std::find_if(
618 pProps,
619 propsEnd,
620 [pLayerName](const VkLayerProperties& prop) -> bool {
621 return strcmp(pLayerName, prop.layerName) == 0;
622 }) != propsEnd;
623 }
624
FindSupportedFormat(const std::vector<VkFormat> & candidates,VkImageTiling tiling,VkFormatFeatureFlags features)625 static VkFormat FindSupportedFormat(
626 const std::vector<VkFormat>& candidates,
627 VkImageTiling tiling,
628 VkFormatFeatureFlags features)
629 {
630 for (VkFormat format : candidates)
631 {
632 VkFormatProperties props;
633 vkGetPhysicalDeviceFormatProperties(g_hPhysicalDevice, format, &props);
634
635 if ((tiling == VK_IMAGE_TILING_LINEAR) &&
636 ((props.linearTilingFeatures & features) == features))
637 {
638 return format;
639 }
640 else if ((tiling == VK_IMAGE_TILING_OPTIMAL) &&
641 ((props.optimalTilingFeatures & features) == features))
642 {
643 return format;
644 }
645 }
646 return VK_FORMAT_UNDEFINED;
647 }
648
FindDepthFormat()649 static VkFormat FindDepthFormat()
650 {
651 std::vector<VkFormat> formats;
652 formats.push_back(VK_FORMAT_D32_SFLOAT);
653 formats.push_back(VK_FORMAT_D32_SFLOAT_S8_UINT);
654 formats.push_back(VK_FORMAT_D24_UNORM_S8_UINT);
655
656 return FindSupportedFormat(
657 formats,
658 VK_IMAGE_TILING_OPTIMAL,
659 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT);
660 }
661
CreateSwapchain()662 static void CreateSwapchain()
663 {
664 // Query surface formats.
665
666 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceCapabilitiesKHR(g_hPhysicalDevice, g_hSurface, &g_SurfaceCapabilities) );
667
668 uint32_t formatCount = 0;
669 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceFormatsKHR(g_hPhysicalDevice, g_hSurface, &formatCount, nullptr) );
670 g_SurfaceFormats.resize(formatCount);
671 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceFormatsKHR(g_hPhysicalDevice, g_hSurface, &formatCount, g_SurfaceFormats.data()) );
672
673 uint32_t presentModeCount = 0;
674 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfacePresentModesKHR(g_hPhysicalDevice, g_hSurface, &presentModeCount, nullptr) );
675 g_PresentModes.resize(presentModeCount);
676 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfacePresentModesKHR(g_hPhysicalDevice, g_hSurface, &presentModeCount, g_PresentModes.data()) );
677
678 // Create swap chain
679
680 g_SurfaceFormat = ChooseSurfaceFormat();
681 VkPresentModeKHR presentMode = ChooseSwapPresentMode();
682 g_Extent = ChooseSwapExtent();
683
684 uint32_t imageCount = g_SurfaceCapabilities.minImageCount + 1;
685 if((g_SurfaceCapabilities.maxImageCount > 0) &&
686 (imageCount > g_SurfaceCapabilities.maxImageCount))
687 {
688 imageCount = g_SurfaceCapabilities.maxImageCount;
689 }
690
691 VkSwapchainCreateInfoKHR swapChainInfo = { VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
692 swapChainInfo.surface = g_hSurface;
693 swapChainInfo.minImageCount = imageCount;
694 swapChainInfo.imageFormat = g_SurfaceFormat.format;
695 swapChainInfo.imageColorSpace = g_SurfaceFormat.colorSpace;
696 swapChainInfo.imageExtent = g_Extent;
697 swapChainInfo.imageArrayLayers = 1;
698 swapChainInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
699 swapChainInfo.preTransform = g_SurfaceCapabilities.currentTransform;
700 swapChainInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
701 swapChainInfo.presentMode = presentMode;
702 swapChainInfo.clipped = VK_TRUE;
703 swapChainInfo.oldSwapchain = g_hSwapchain;
704
705 uint32_t queueFamilyIndices[] = { g_GraphicsQueueFamilyIndex, g_PresentQueueFamilyIndex };
706 if(g_PresentQueueFamilyIndex != g_GraphicsQueueFamilyIndex)
707 {
708 swapChainInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
709 swapChainInfo.queueFamilyIndexCount = 2;
710 swapChainInfo.pQueueFamilyIndices = queueFamilyIndices;
711 }
712 else
713 {
714 swapChainInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
715 }
716
717 VkSwapchainKHR hNewSwapchain = VK_NULL_HANDLE;
718 ERR_GUARD_VULKAN( vkCreateSwapchainKHR(g_hDevice, &swapChainInfo, g_Allocs, &hNewSwapchain) );
719 if(g_hSwapchain != VK_NULL_HANDLE)
720 vkDestroySwapchainKHR(g_hDevice, g_hSwapchain, g_Allocs);
721 g_hSwapchain = hNewSwapchain;
722
723 // Retrieve swapchain images.
724
725 uint32_t swapchainImageCount = 0;
726 ERR_GUARD_VULKAN( vkGetSwapchainImagesKHR(g_hDevice, g_hSwapchain, &swapchainImageCount, nullptr) );
727 g_SwapchainImages.resize(swapchainImageCount);
728 ERR_GUARD_VULKAN( vkGetSwapchainImagesKHR(g_hDevice, g_hSwapchain, &swapchainImageCount, g_SwapchainImages.data()) );
729
730 // Create swapchain image views.
731
732 for(size_t i = g_SwapchainImageViews.size(); i--; )
733 vkDestroyImageView(g_hDevice, g_SwapchainImageViews[i], g_Allocs);
734 g_SwapchainImageViews.clear();
735
736 VkImageViewCreateInfo swapchainImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
737 g_SwapchainImageViews.resize(swapchainImageCount);
738 for(uint32_t i = 0; i < swapchainImageCount; ++i)
739 {
740 swapchainImageViewInfo.image = g_SwapchainImages[i];
741 swapchainImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
742 swapchainImageViewInfo.format = g_SurfaceFormat.format;
743 swapchainImageViewInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
744 swapchainImageViewInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
745 swapchainImageViewInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
746 swapchainImageViewInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
747 swapchainImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
748 swapchainImageViewInfo.subresourceRange.baseMipLevel = 0;
749 swapchainImageViewInfo.subresourceRange.levelCount = 1;
750 swapchainImageViewInfo.subresourceRange.baseArrayLayer = 0;
751 swapchainImageViewInfo.subresourceRange.layerCount = 1;
752 ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &swapchainImageViewInfo, g_Allocs, &g_SwapchainImageViews[i]) );
753 }
754
755 // Create depth buffer
756
757 g_DepthFormat = FindDepthFormat();
758 assert(g_DepthFormat != VK_FORMAT_UNDEFINED);
759
760 VkImageCreateInfo depthImageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
761 depthImageInfo.imageType = VK_IMAGE_TYPE_2D;
762 depthImageInfo.extent.width = g_Extent.width;
763 depthImageInfo.extent.height = g_Extent.height;
764 depthImageInfo.extent.depth = 1;
765 depthImageInfo.mipLevels = 1;
766 depthImageInfo.arrayLayers = 1;
767 depthImageInfo.format = g_DepthFormat;
768 depthImageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
769 depthImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
770 depthImageInfo.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
771 depthImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
772 depthImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
773 depthImageInfo.flags = 0;
774
775 VmaAllocationCreateInfo depthImageAllocCreateInfo = {};
776 depthImageAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
777
778 ERR_GUARD_VULKAN( vmaCreateImage(g_hAllocator, &depthImageInfo, &depthImageAllocCreateInfo, &g_hDepthImage, &g_hDepthImageAlloc, nullptr) );
779
780 VkImageViewCreateInfo depthImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
781 depthImageViewInfo.image = g_hDepthImage;
782 depthImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
783 depthImageViewInfo.format = g_DepthFormat;
784 depthImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
785 depthImageViewInfo.subresourceRange.baseMipLevel = 0;
786 depthImageViewInfo.subresourceRange.levelCount = 1;
787 depthImageViewInfo.subresourceRange.baseArrayLayer = 0;
788 depthImageViewInfo.subresourceRange.layerCount = 1;
789
790 ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &depthImageViewInfo, g_Allocs, &g_hDepthImageView) );
791
792 // Create pipeline layout
793 {
794 if(g_hPipelineLayout != VK_NULL_HANDLE)
795 {
796 vkDestroyPipelineLayout(g_hDevice, g_hPipelineLayout, g_Allocs);
797 g_hPipelineLayout = VK_NULL_HANDLE;
798 }
799
800 VkPushConstantRange pushConstantRanges[1];
801 ZeroMemory(&pushConstantRanges, sizeof pushConstantRanges);
802 pushConstantRanges[0].offset = 0;
803 pushConstantRanges[0].size = sizeof(UniformBufferObject);
804 pushConstantRanges[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
805
806 VkDescriptorSetLayout descriptorSetLayouts[] = { g_hDescriptorSetLayout };
807 VkPipelineLayoutCreateInfo pipelineLayoutInfo = { VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO };
808 pipelineLayoutInfo.setLayoutCount = 1;
809 pipelineLayoutInfo.pSetLayouts = descriptorSetLayouts;
810 pipelineLayoutInfo.pushConstantRangeCount = 1;
811 pipelineLayoutInfo.pPushConstantRanges = pushConstantRanges;
812 ERR_GUARD_VULKAN( vkCreatePipelineLayout(g_hDevice, &pipelineLayoutInfo, g_Allocs, &g_hPipelineLayout) );
813 }
814
815 // Create render pass
816 {
817 if(g_hRenderPass != VK_NULL_HANDLE)
818 {
819 vkDestroyRenderPass(g_hDevice, g_hRenderPass, g_Allocs);
820 g_hRenderPass = VK_NULL_HANDLE;
821 }
822
823 VkAttachmentDescription attachments[2];
824 ZeroMemory(attachments, sizeof(attachments));
825
826 attachments[0].format = g_SurfaceFormat.format;
827 attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
828 attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
829 attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
830 attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
831 attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
832 attachments[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
833 attachments[0].finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
834
835 attachments[1].format = g_DepthFormat;
836 attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
837 attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
838 attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
839 attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
840 attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
841 attachments[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
842 attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
843
844 VkAttachmentReference colorAttachmentRef = {};
845 colorAttachmentRef.attachment = 0;
846 colorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
847
848 VkAttachmentReference depthStencilAttachmentRef = {};
849 depthStencilAttachmentRef.attachment = 1;
850 depthStencilAttachmentRef.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
851
852 VkSubpassDescription subpassDesc = {};
853 subpassDesc.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
854 subpassDesc.colorAttachmentCount = 1;
855 subpassDesc.pColorAttachments = &colorAttachmentRef;
856 subpassDesc.pDepthStencilAttachment = &depthStencilAttachmentRef;
857
858 VkRenderPassCreateInfo renderPassInfo = { VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO };
859 renderPassInfo.attachmentCount = (uint32_t)_countof(attachments);
860 renderPassInfo.pAttachments = attachments;
861 renderPassInfo.subpassCount = 1;
862 renderPassInfo.pSubpasses = &subpassDesc;
863 renderPassInfo.dependencyCount = 0;
864 ERR_GUARD_VULKAN( vkCreateRenderPass(g_hDevice, &renderPassInfo, g_Allocs, &g_hRenderPass) );
865 }
866
867 // Create pipeline
868 {
869 std::vector<char> vertShaderCode;
870 LoadShader(vertShaderCode, "Shader.vert.spv");
871 VkShaderModuleCreateInfo shaderModuleInfo = { VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO };
872 shaderModuleInfo.codeSize = vertShaderCode.size();
873 shaderModuleInfo.pCode = (const uint32_t*)vertShaderCode.data();
874 VkShaderModule hVertShaderModule = VK_NULL_HANDLE;
875 ERR_GUARD_VULKAN( vkCreateShaderModule(g_hDevice, &shaderModuleInfo, g_Allocs, &hVertShaderModule) );
876
877 std::vector<char> hFragShaderCode;
878 LoadShader(hFragShaderCode, "Shader.frag.spv");
879 shaderModuleInfo.codeSize = hFragShaderCode.size();
880 shaderModuleInfo.pCode = (const uint32_t*)hFragShaderCode.data();
881 VkShaderModule fragShaderModule = VK_NULL_HANDLE;
882 ERR_GUARD_VULKAN( vkCreateShaderModule(g_hDevice, &shaderModuleInfo, g_Allocs, &fragShaderModule) );
883
884 VkPipelineShaderStageCreateInfo vertPipelineShaderStageInfo = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO };
885 vertPipelineShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
886 vertPipelineShaderStageInfo.module = hVertShaderModule;
887 vertPipelineShaderStageInfo.pName = "main";
888
889 VkPipelineShaderStageCreateInfo fragPipelineShaderStageInfo = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO };
890 fragPipelineShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
891 fragPipelineShaderStageInfo.module = fragShaderModule;
892 fragPipelineShaderStageInfo.pName = "main";
893
894 VkPipelineShaderStageCreateInfo pipelineShaderStageInfos[] = {
895 vertPipelineShaderStageInfo,
896 fragPipelineShaderStageInfo
897 };
898
899 VkVertexInputBindingDescription bindingDescription = {};
900 bindingDescription.binding = 0;
901 bindingDescription.stride = sizeof(Vertex);
902 bindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
903
904 VkVertexInputAttributeDescription attributeDescriptions[3];
905 ZeroMemory(attributeDescriptions, sizeof(attributeDescriptions));
906
907 attributeDescriptions[0].binding = 0;
908 attributeDescriptions[0].location = 0;
909 attributeDescriptions[0].format = VK_FORMAT_R32G32B32_SFLOAT;
910 attributeDescriptions[0].offset = offsetof(Vertex, pos);
911
912 attributeDescriptions[1].binding = 0;
913 attributeDescriptions[1].location = 1;
914 attributeDescriptions[1].format = VK_FORMAT_R32G32B32_SFLOAT;
915 attributeDescriptions[1].offset = offsetof(Vertex, color);
916
917 attributeDescriptions[2].binding = 0;
918 attributeDescriptions[2].location = 2;
919 attributeDescriptions[2].format = VK_FORMAT_R32G32_SFLOAT;
920 attributeDescriptions[2].offset = offsetof(Vertex, texCoord);
921
922 VkPipelineVertexInputStateCreateInfo pipelineVertexInputStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO };
923 pipelineVertexInputStateInfo.vertexBindingDescriptionCount = 1;
924 pipelineVertexInputStateInfo.pVertexBindingDescriptions = &bindingDescription;
925 pipelineVertexInputStateInfo.vertexAttributeDescriptionCount = _countof(attributeDescriptions);
926 pipelineVertexInputStateInfo.pVertexAttributeDescriptions = attributeDescriptions;
927
928 VkPipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO };
929 pipelineInputAssemblyStateInfo.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
930 pipelineInputAssemblyStateInfo.primitiveRestartEnable = VK_TRUE;
931
932 VkViewport viewport = {};
933 viewport.x = 0.f;
934 viewport.y = 0.f;
935 viewport.width = (float)g_Extent.width;
936 viewport.height = (float)g_Extent.height;
937 viewport.minDepth = 0.f;
938 viewport.maxDepth = 1.f;
939
940 VkRect2D scissor = {};
941 scissor.offset.x = 0;
942 scissor.offset.y = 0;
943 scissor.extent = g_Extent;
944
945 VkPipelineViewportStateCreateInfo pipelineViewportStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO };
946 pipelineViewportStateInfo.viewportCount = 1;
947 pipelineViewportStateInfo.pViewports = &viewport;
948 pipelineViewportStateInfo.scissorCount = 1;
949 pipelineViewportStateInfo.pScissors = &scissor;
950
951 VkPipelineRasterizationStateCreateInfo pipelineRasterizationStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO };
952 pipelineRasterizationStateInfo.depthClampEnable = VK_FALSE;
953 pipelineRasterizationStateInfo.rasterizerDiscardEnable = VK_FALSE;
954 pipelineRasterizationStateInfo.polygonMode = VK_POLYGON_MODE_FILL;
955 pipelineRasterizationStateInfo.lineWidth = 1.f;
956 pipelineRasterizationStateInfo.cullMode = VK_CULL_MODE_BACK_BIT;
957 pipelineRasterizationStateInfo.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
958 pipelineRasterizationStateInfo.depthBiasEnable = VK_FALSE;
959 pipelineRasterizationStateInfo.depthBiasConstantFactor = 0.f;
960 pipelineRasterizationStateInfo.depthBiasClamp = 0.f;
961 pipelineRasterizationStateInfo.depthBiasSlopeFactor = 0.f;
962
963 VkPipelineMultisampleStateCreateInfo pipelineMultisampleStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO };
964 pipelineMultisampleStateInfo.sampleShadingEnable = VK_FALSE;
965 pipelineMultisampleStateInfo.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
966 pipelineMultisampleStateInfo.minSampleShading = 1.f;
967 pipelineMultisampleStateInfo.pSampleMask = nullptr;
968 pipelineMultisampleStateInfo.alphaToCoverageEnable = VK_FALSE;
969 pipelineMultisampleStateInfo.alphaToOneEnable = VK_FALSE;
970
971 VkPipelineColorBlendAttachmentState pipelineColorBlendAttachmentState = {};
972 pipelineColorBlendAttachmentState.colorWriteMask =
973 VK_COLOR_COMPONENT_R_BIT |
974 VK_COLOR_COMPONENT_G_BIT |
975 VK_COLOR_COMPONENT_B_BIT |
976 VK_COLOR_COMPONENT_A_BIT;
977 pipelineColorBlendAttachmentState.blendEnable = VK_FALSE;
978 pipelineColorBlendAttachmentState.srcColorBlendFactor = VK_BLEND_FACTOR_ONE; // Optional
979 pipelineColorBlendAttachmentState.dstColorBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional
980 pipelineColorBlendAttachmentState.colorBlendOp = VK_BLEND_OP_ADD; // Optional
981 pipelineColorBlendAttachmentState.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE; // Optional
982 pipelineColorBlendAttachmentState.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional
983 pipelineColorBlendAttachmentState.alphaBlendOp = VK_BLEND_OP_ADD; // Optional
984
985 VkPipelineColorBlendStateCreateInfo pipelineColorBlendStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO };
986 pipelineColorBlendStateInfo.logicOpEnable = VK_FALSE;
987 pipelineColorBlendStateInfo.logicOp = VK_LOGIC_OP_COPY;
988 pipelineColorBlendStateInfo.attachmentCount = 1;
989 pipelineColorBlendStateInfo.pAttachments = &pipelineColorBlendAttachmentState;
990
991 VkPipelineDepthStencilStateCreateInfo depthStencilStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO };
992 depthStencilStateInfo.depthTestEnable = VK_TRUE;
993 depthStencilStateInfo.depthWriteEnable = VK_TRUE;
994 depthStencilStateInfo.depthCompareOp = VK_COMPARE_OP_LESS;
995 depthStencilStateInfo.depthBoundsTestEnable = VK_FALSE;
996 depthStencilStateInfo.stencilTestEnable = VK_FALSE;
997
998 VkGraphicsPipelineCreateInfo pipelineInfo = { VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO };
999 pipelineInfo.stageCount = 2;
1000 pipelineInfo.pStages = pipelineShaderStageInfos;
1001 pipelineInfo.pVertexInputState = &pipelineVertexInputStateInfo;
1002 pipelineInfo.pInputAssemblyState = &pipelineInputAssemblyStateInfo;
1003 pipelineInfo.pViewportState = &pipelineViewportStateInfo;
1004 pipelineInfo.pRasterizationState = &pipelineRasterizationStateInfo;
1005 pipelineInfo.pMultisampleState = &pipelineMultisampleStateInfo;
1006 pipelineInfo.pDepthStencilState = &depthStencilStateInfo;
1007 pipelineInfo.pColorBlendState = &pipelineColorBlendStateInfo;
1008 pipelineInfo.pDynamicState = nullptr;
1009 pipelineInfo.layout = g_hPipelineLayout;
1010 pipelineInfo.renderPass = g_hRenderPass;
1011 pipelineInfo.subpass = 0;
1012 pipelineInfo.basePipelineHandle = VK_NULL_HANDLE;
1013 pipelineInfo.basePipelineIndex = -1;
1014 ERR_GUARD_VULKAN( vkCreateGraphicsPipelines(
1015 g_hDevice,
1016 VK_NULL_HANDLE,
1017 1,
1018 &pipelineInfo,
1019 g_Allocs,
1020 &g_hPipeline) );
1021
1022 vkDestroyShaderModule(g_hDevice, fragShaderModule, g_Allocs);
1023 vkDestroyShaderModule(g_hDevice, hVertShaderModule, g_Allocs);
1024 }
1025
1026 // Create frambuffers
1027
1028 for(size_t i = g_Framebuffers.size(); i--; )
1029 vkDestroyFramebuffer(g_hDevice, g_Framebuffers[i], g_Allocs);
1030 g_Framebuffers.clear();
1031
1032 g_Framebuffers.resize(g_SwapchainImageViews.size());
1033 for(size_t i = 0; i < g_SwapchainImages.size(); ++i)
1034 {
1035 VkImageView attachments[] = { g_SwapchainImageViews[i], g_hDepthImageView };
1036
1037 VkFramebufferCreateInfo framebufferInfo = { VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO };
1038 framebufferInfo.renderPass = g_hRenderPass;
1039 framebufferInfo.attachmentCount = (uint32_t)_countof(attachments);
1040 framebufferInfo.pAttachments = attachments;
1041 framebufferInfo.width = g_Extent.width;
1042 framebufferInfo.height = g_Extent.height;
1043 framebufferInfo.layers = 1;
1044 ERR_GUARD_VULKAN( vkCreateFramebuffer(g_hDevice, &framebufferInfo, g_Allocs, &g_Framebuffers[i]) );
1045 }
1046
1047 // Create semaphores
1048
1049 if(g_hImageAvailableSemaphore != VK_NULL_HANDLE)
1050 {
1051 vkDestroySemaphore(g_hDevice, g_hImageAvailableSemaphore, g_Allocs);
1052 g_hImageAvailableSemaphore = VK_NULL_HANDLE;
1053 }
1054 if(g_hRenderFinishedSemaphore != VK_NULL_HANDLE)
1055 {
1056 vkDestroySemaphore(g_hDevice, g_hRenderFinishedSemaphore, g_Allocs);
1057 g_hRenderFinishedSemaphore = VK_NULL_HANDLE;
1058 }
1059
1060 VkSemaphoreCreateInfo semaphoreInfo = { VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO };
1061 ERR_GUARD_VULKAN( vkCreateSemaphore(g_hDevice, &semaphoreInfo, g_Allocs, &g_hImageAvailableSemaphore) );
1062 ERR_GUARD_VULKAN( vkCreateSemaphore(g_hDevice, &semaphoreInfo, g_Allocs, &g_hRenderFinishedSemaphore) );
1063 }
1064
DestroySwapchain(bool destroyActualSwapchain)1065 static void DestroySwapchain(bool destroyActualSwapchain)
1066 {
1067 if(g_hImageAvailableSemaphore != VK_NULL_HANDLE)
1068 {
1069 vkDestroySemaphore(g_hDevice, g_hImageAvailableSemaphore, g_Allocs);
1070 g_hImageAvailableSemaphore = VK_NULL_HANDLE;
1071 }
1072 if(g_hRenderFinishedSemaphore != VK_NULL_HANDLE)
1073 {
1074 vkDestroySemaphore(g_hDevice, g_hRenderFinishedSemaphore, g_Allocs);
1075 g_hRenderFinishedSemaphore = VK_NULL_HANDLE;
1076 }
1077
1078 for(size_t i = g_Framebuffers.size(); i--; )
1079 vkDestroyFramebuffer(g_hDevice, g_Framebuffers[i], g_Allocs);
1080 g_Framebuffers.clear();
1081
1082 if(g_hDepthImageView != VK_NULL_HANDLE)
1083 {
1084 vkDestroyImageView(g_hDevice, g_hDepthImageView, g_Allocs);
1085 g_hDepthImageView = VK_NULL_HANDLE;
1086 }
1087 if(g_hDepthImage != VK_NULL_HANDLE)
1088 {
1089 vmaDestroyImage(g_hAllocator, g_hDepthImage, g_hDepthImageAlloc);
1090 g_hDepthImage = VK_NULL_HANDLE;
1091 }
1092
1093 if(g_hPipeline != VK_NULL_HANDLE)
1094 {
1095 vkDestroyPipeline(g_hDevice, g_hPipeline, g_Allocs);
1096 g_hPipeline = VK_NULL_HANDLE;
1097 }
1098
1099 if(g_hRenderPass != VK_NULL_HANDLE)
1100 {
1101 vkDestroyRenderPass(g_hDevice, g_hRenderPass, g_Allocs);
1102 g_hRenderPass = VK_NULL_HANDLE;
1103 }
1104
1105 if(g_hPipelineLayout != VK_NULL_HANDLE)
1106 {
1107 vkDestroyPipelineLayout(g_hDevice, g_hPipelineLayout, g_Allocs);
1108 g_hPipelineLayout = VK_NULL_HANDLE;
1109 }
1110
1111 for(size_t i = g_SwapchainImageViews.size(); i--; )
1112 vkDestroyImageView(g_hDevice, g_SwapchainImageViews[i], g_Allocs);
1113 g_SwapchainImageViews.clear();
1114
1115 if(destroyActualSwapchain && (g_hSwapchain != VK_NULL_HANDLE))
1116 {
1117 vkDestroySwapchainKHR(g_hDevice, g_hSwapchain, g_Allocs);
1118 g_hSwapchain = VK_NULL_HANDLE;
1119 }
1120 }
1121
GetVulkanApiVersion()1122 static constexpr uint32_t GetVulkanApiVersion()
1123 {
1124 #if VMA_VULKAN_VERSION == 1002000
1125 return VK_API_VERSION_1_2;
1126 #elif VMA_VULKAN_VERSION == 1001000
1127 return VK_API_VERSION_1_1;
1128 #elif VMA_VULKAN_VERSION == 1000000
1129 return VK_API_VERSION_1_0;
1130 #else
1131 #error Invalid VMA_VULKAN_VERSION.
1132 return UINT32_MAX;
1133 #endif
1134 }
1135
PrintEnabledFeatures()1136 static void PrintEnabledFeatures()
1137 {
1138 wprintf(L"Validation layer: %d\n", g_EnableValidationLayer ? 1 : 0);
1139 wprintf(L"Sparse binding: %d\n", g_SparseBindingEnabled ? 1 : 0);
1140 wprintf(L"Buffer device address: %d\n", g_BufferDeviceAddressEnabled ? 1 : 0);
1141 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1142 {
1143 wprintf(L"VK_KHR_get_memory_requirements2: %d\n", VK_KHR_get_memory_requirements2_enabled ? 1 : 0);
1144 wprintf(L"VK_KHR_get_physical_device_properties2: %d\n", VK_KHR_get_physical_device_properties2_enabled ? 1 : 0);
1145 wprintf(L"VK_KHR_dedicated_allocation: %d\n", VK_KHR_dedicated_allocation_enabled ? 1 : 0);
1146 wprintf(L"VK_KHR_bind_memory2: %d\n", VK_KHR_bind_memory2_enabled ? 1 : 0);
1147 }
1148 wprintf(L"VK_EXT_memory_budget: %d\n", VK_EXT_memory_budget_enabled ? 1 : 0);
1149 wprintf(L"VK_AMD_device_coherent_memory: %d\n", VK_AMD_device_coherent_memory_enabled ? 1 : 0);
1150 wprintf(L"VK_KHR_buffer_device_address: %d\n", VK_KHR_buffer_device_address_enabled ? 1 : 0);
1151 wprintf(L"VK_EXT_buffer_device_address: %d\n", VK_EXT_buffer_device_address_enabled ? 1 : 0);
1152 }
1153
SetAllocatorCreateInfo(VmaAllocatorCreateInfo & outInfo)1154 void SetAllocatorCreateInfo(VmaAllocatorCreateInfo& outInfo)
1155 {
1156 outInfo = {};
1157
1158 outInfo.physicalDevice = g_hPhysicalDevice;
1159 outInfo.device = g_hDevice;
1160 outInfo.instance = g_hVulkanInstance;
1161 outInfo.vulkanApiVersion = GetVulkanApiVersion();
1162
1163 if(VK_KHR_dedicated_allocation_enabled)
1164 {
1165 outInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT;
1166 }
1167 if(VK_KHR_bind_memory2_enabled)
1168 {
1169 outInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT;
1170 }
1171 #if !defined(VMA_MEMORY_BUDGET) || VMA_MEMORY_BUDGET == 1
1172 if(VK_EXT_memory_budget_enabled && (
1173 GetVulkanApiVersion() >= VK_API_VERSION_1_1 || VK_KHR_get_physical_device_properties2_enabled))
1174 {
1175 outInfo.flags |= VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT;
1176 }
1177 #endif
1178 if(VK_AMD_device_coherent_memory_enabled)
1179 {
1180 outInfo.flags |= VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT;
1181 }
1182 if(g_BufferDeviceAddressEnabled)
1183 {
1184 outInfo.flags |= VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT;
1185 }
1186
1187 if(USE_CUSTOM_CPU_ALLOCATION_CALLBACKS)
1188 {
1189 outInfo.pAllocationCallbacks = &g_CpuAllocationCallbacks;
1190 }
1191
1192 // Uncomment to enable recording to CSV file.
1193 /*
1194 static VmaRecordSettings recordSettings = {};
1195 recordSettings.pFilePath = "VulkanSample.csv";
1196 outInfo.pRecordSettings = &recordSettings;
1197 */
1198
1199 // Uncomment to enable HeapSizeLimit.
1200 /*
1201 static std::array<VkDeviceSize, VK_MAX_MEMORY_HEAPS> heapSizeLimit;
1202 std::fill(heapSizeLimit.begin(), heapSizeLimit.end(), VK_WHOLE_SIZE);
1203 heapSizeLimit[0] = 512ull * 1024 * 1024;
1204 outInfo.pHeapSizeLimit = heapSizeLimit.data();
1205 */
1206 }
1207
PrintPhysicalDeviceProperties(const VkPhysicalDeviceProperties & properties)1208 static void PrintPhysicalDeviceProperties(const VkPhysicalDeviceProperties& properties)
1209 {
1210 wprintf(L"Physical device:\n");
1211 wprintf(L" Driver version: 0x%X\n", properties.driverVersion);
1212 wprintf(L" Vendor ID: 0x%X\n", properties.vendorID);
1213 wprintf(L" Device ID: 0x%X\n", properties.deviceID);
1214 wprintf(L" Device type: %u\n", properties.deviceType);
1215 wprintf(L" Device name: %hs\n", properties.deviceName);
1216 }
1217
InitializeApplication()1218 static void InitializeApplication()
1219 {
1220 if(USE_CUSTOM_CPU_ALLOCATION_CALLBACKS)
1221 {
1222 g_Allocs = &g_CpuAllocationCallbacks;
1223 }
1224
1225 uint32_t instanceLayerPropCount = 0;
1226 ERR_GUARD_VULKAN( vkEnumerateInstanceLayerProperties(&instanceLayerPropCount, nullptr) );
1227 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerPropCount);
1228 if(instanceLayerPropCount > 0)
1229 {
1230 ERR_GUARD_VULKAN( vkEnumerateInstanceLayerProperties(&instanceLayerPropCount, instanceLayerProps.data()) );
1231 }
1232
1233 if(g_EnableValidationLayer == true)
1234 {
1235 if(IsLayerSupported(instanceLayerProps.data(), instanceLayerProps.size(), VALIDATION_LAYER_NAME) == false)
1236 {
1237 wprintf(L"Layer \"%hs\" not supported.", VALIDATION_LAYER_NAME);
1238 g_EnableValidationLayer = false;
1239 }
1240 }
1241
1242 uint32_t availableInstanceExtensionCount = 0;
1243 ERR_GUARD_VULKAN( vkEnumerateInstanceExtensionProperties(nullptr, &availableInstanceExtensionCount, nullptr) );
1244 std::vector<VkExtensionProperties> availableInstanceExtensions(availableInstanceExtensionCount);
1245 if(availableInstanceExtensionCount > 0)
1246 {
1247 ERR_GUARD_VULKAN( vkEnumerateInstanceExtensionProperties(nullptr, &availableInstanceExtensionCount, availableInstanceExtensions.data()) );
1248 }
1249
1250 std::vector<const char*> enabledInstanceExtensions;
1251 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
1252 enabledInstanceExtensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
1253
1254 std::vector<const char*> instanceLayers;
1255 if(g_EnableValidationLayer == true)
1256 {
1257 instanceLayers.push_back(VALIDATION_LAYER_NAME);
1258 enabledInstanceExtensions.push_back("VK_EXT_debug_report");
1259 }
1260
1261 for(const auto& extensionProperties : availableInstanceExtensions)
1262 {
1263 if(strcmp(extensionProperties.extensionName, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) == 0)
1264 {
1265 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1266 {
1267 enabledInstanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1268 VK_KHR_get_physical_device_properties2_enabled = true;
1269 }
1270 }
1271 }
1272
1273 VkApplicationInfo appInfo = { VK_STRUCTURE_TYPE_APPLICATION_INFO };
1274 appInfo.pApplicationName = APP_TITLE_A;
1275 appInfo.applicationVersion = VK_MAKE_VERSION(1, 0, 0);
1276 appInfo.pEngineName = "Adam Sawicki Engine";
1277 appInfo.engineVersion = VK_MAKE_VERSION(1, 0, 0);
1278 appInfo.apiVersion = GetVulkanApiVersion();
1279
1280 VkInstanceCreateInfo instInfo = { VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO };
1281 instInfo.pApplicationInfo = &appInfo;
1282 instInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
1283 instInfo.ppEnabledExtensionNames = enabledInstanceExtensions.data();
1284 instInfo.enabledLayerCount = static_cast<uint32_t>(instanceLayers.size());
1285 instInfo.ppEnabledLayerNames = instanceLayers.data();
1286
1287 wprintf(L"Vulkan API version: ");
1288 switch(appInfo.apiVersion)
1289 {
1290 case VK_API_VERSION_1_0: wprintf(L"1.0\n"); break;
1291 case VK_API_VERSION_1_1: wprintf(L"1.1\n"); break;
1292 case VK_API_VERSION_1_2: wprintf(L"1.2\n"); break;
1293 default: assert(0);
1294 }
1295
1296 ERR_GUARD_VULKAN( vkCreateInstance(&instInfo, g_Allocs, &g_hVulkanInstance) );
1297
1298 // Create VkSurfaceKHR.
1299 VkWin32SurfaceCreateInfoKHR surfaceInfo = { VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR };
1300 surfaceInfo.hinstance = g_hAppInstance;
1301 surfaceInfo.hwnd = g_hWnd;
1302 VkResult result = vkCreateWin32SurfaceKHR(g_hVulkanInstance, &surfaceInfo, g_Allocs, &g_hSurface);
1303 assert(result == VK_SUCCESS);
1304
1305 if(g_EnableValidationLayer == true)
1306 RegisterDebugCallbacks();
1307
1308 // Find physical device
1309
1310 uint32_t deviceCount = 0;
1311 ERR_GUARD_VULKAN( vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, nullptr) );
1312 assert(deviceCount > 0);
1313
1314 std::vector<VkPhysicalDevice> physicalDevices(deviceCount);
1315 ERR_GUARD_VULKAN( vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, physicalDevices.data()) );
1316
1317 g_hPhysicalDevice = physicalDevices[0];
1318
1319 // Query for device extensions
1320
1321 uint32_t physicalDeviceExtensionPropertyCount = 0;
1322 ERR_GUARD_VULKAN( vkEnumerateDeviceExtensionProperties(g_hPhysicalDevice, nullptr, &physicalDeviceExtensionPropertyCount, nullptr) );
1323 std::vector<VkExtensionProperties> physicalDeviceExtensionProperties{physicalDeviceExtensionPropertyCount};
1324 if(physicalDeviceExtensionPropertyCount)
1325 {
1326 ERR_GUARD_VULKAN( vkEnumerateDeviceExtensionProperties(
1327 g_hPhysicalDevice,
1328 nullptr,
1329 &physicalDeviceExtensionPropertyCount,
1330 physicalDeviceExtensionProperties.data()) );
1331 }
1332
1333 for(uint32_t i = 0; i < physicalDeviceExtensionPropertyCount; ++i)
1334 {
1335 if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME) == 0)
1336 {
1337 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1338 {
1339 VK_KHR_get_memory_requirements2_enabled = true;
1340 }
1341 }
1342 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME) == 0)
1343 {
1344 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1345 {
1346 VK_KHR_dedicated_allocation_enabled = true;
1347 }
1348 }
1349 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME) == 0)
1350 {
1351 if(GetVulkanApiVersion() == VK_API_VERSION_1_0)
1352 {
1353 VK_KHR_bind_memory2_enabled = true;
1354 }
1355 }
1356 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_EXT_MEMORY_BUDGET_EXTENSION_NAME) == 0)
1357 VK_EXT_memory_budget_enabled = true;
1358 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME) == 0)
1359 VK_AMD_device_coherent_memory_enabled = true;
1360 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME) == 0)
1361 {
1362 if(GetVulkanApiVersion() < VK_API_VERSION_1_2)
1363 {
1364 VK_KHR_buffer_device_address_enabled = true;
1365 }
1366 }
1367 else if(strcmp(physicalDeviceExtensionProperties[i].extensionName, VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME) == 0)
1368 {
1369 if(GetVulkanApiVersion() < VK_API_VERSION_1_2)
1370 {
1371 VK_EXT_buffer_device_address_enabled = true;
1372 }
1373 }
1374 }
1375
1376 if(VK_EXT_buffer_device_address_enabled && VK_KHR_buffer_device_address_enabled)
1377 VK_EXT_buffer_device_address_enabled = false;
1378
1379 // Query for features
1380
1381 VkPhysicalDeviceProperties physicalDeviceProperties = {};
1382 vkGetPhysicalDeviceProperties(g_hPhysicalDevice, &physicalDeviceProperties);
1383
1384 PrintPhysicalDeviceProperties(physicalDeviceProperties);
1385
1386 VkPhysicalDeviceFeatures2 physicalDeviceFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
1387
1388 VkPhysicalDeviceCoherentMemoryFeaturesAMD physicalDeviceCoherentMemoryFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD };
1389 if(VK_AMD_device_coherent_memory_enabled)
1390 {
1391 PnextChainPushFront(&physicalDeviceFeatures, &physicalDeviceCoherentMemoryFeatures);
1392 }
1393
1394 VkPhysicalDeviceBufferDeviceAddressFeaturesEXT physicalDeviceBufferDeviceAddressFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT };
1395 if(VK_KHR_buffer_device_address_enabled || VK_EXT_buffer_device_address_enabled || GetVulkanApiVersion() >= VK_API_VERSION_1_2)
1396 {
1397 PnextChainPushFront(&physicalDeviceFeatures, &physicalDeviceBufferDeviceAddressFeatures);
1398 }
1399
1400 vkGetPhysicalDeviceFeatures2(g_hPhysicalDevice, &physicalDeviceFeatures);
1401
1402 g_SparseBindingEnabled = physicalDeviceFeatures.features.sparseBinding != 0;
1403
1404 // The extension is supported as fake with no real support for this feature? Don't use it.
1405 if(VK_AMD_device_coherent_memory_enabled && !physicalDeviceCoherentMemoryFeatures.deviceCoherentMemory)
1406 VK_AMD_device_coherent_memory_enabled = false;
1407 if(VK_KHR_buffer_device_address_enabled || VK_EXT_buffer_device_address_enabled || GetVulkanApiVersion() >= VK_API_VERSION_1_2)
1408 g_BufferDeviceAddressEnabled = physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress != VK_FALSE;
1409
1410 // Find queue family index
1411
1412 uint32_t queueFamilyCount = 0;
1413 vkGetPhysicalDeviceQueueFamilyProperties(g_hPhysicalDevice, &queueFamilyCount, nullptr);
1414 assert(queueFamilyCount > 0);
1415 std::vector<VkQueueFamilyProperties> queueFamilies(queueFamilyCount);
1416 vkGetPhysicalDeviceQueueFamilyProperties(g_hPhysicalDevice, &queueFamilyCount, queueFamilies.data());
1417 for(uint32_t i = 0;
1418 (i < queueFamilyCount) &&
1419 (g_GraphicsQueueFamilyIndex == UINT_MAX ||
1420 g_PresentQueueFamilyIndex == UINT_MAX ||
1421 (g_SparseBindingEnabled && g_SparseBindingQueueFamilyIndex == UINT_MAX));
1422 ++i)
1423 {
1424 if(queueFamilies[i].queueCount > 0)
1425 {
1426 const uint32_t flagsForGraphicsQueue = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
1427 if((g_GraphicsQueueFamilyIndex != 0) &&
1428 ((queueFamilies[i].queueFlags & flagsForGraphicsQueue) == flagsForGraphicsQueue))
1429 {
1430 g_GraphicsQueueFamilyIndex = i;
1431 }
1432
1433 VkBool32 surfaceSupported = 0;
1434 VkResult res = vkGetPhysicalDeviceSurfaceSupportKHR(g_hPhysicalDevice, i, g_hSurface, &surfaceSupported);
1435 if((res >= 0) && (surfaceSupported == VK_TRUE))
1436 {
1437 g_PresentQueueFamilyIndex = i;
1438 }
1439
1440 if(g_SparseBindingEnabled &&
1441 g_SparseBindingQueueFamilyIndex == UINT32_MAX &&
1442 (queueFamilies[i].queueFlags & VK_QUEUE_SPARSE_BINDING_BIT) != 0)
1443 {
1444 g_SparseBindingQueueFamilyIndex = i;
1445 }
1446 }
1447 }
1448 assert(g_GraphicsQueueFamilyIndex != UINT_MAX);
1449
1450 g_SparseBindingEnabled = g_SparseBindingEnabled && g_SparseBindingQueueFamilyIndex != UINT32_MAX;
1451
1452 // Create logical device
1453
1454 const float queuePriority = 1.f;
1455
1456 VkDeviceQueueCreateInfo queueCreateInfo[3] = {};
1457 uint32_t queueCount = 1;
1458 queueCreateInfo[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1459 queueCreateInfo[0].queueFamilyIndex = g_GraphicsQueueFamilyIndex;
1460 queueCreateInfo[0].queueCount = 1;
1461 queueCreateInfo[0].pQueuePriorities = &queuePriority;
1462
1463 if(g_PresentQueueFamilyIndex != g_GraphicsQueueFamilyIndex)
1464 {
1465
1466 queueCreateInfo[queueCount].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1467 queueCreateInfo[queueCount].queueFamilyIndex = g_PresentQueueFamilyIndex;
1468 queueCreateInfo[queueCount].queueCount = 1;
1469 queueCreateInfo[queueCount].pQueuePriorities = &queuePriority;
1470 ++queueCount;
1471 }
1472
1473 if(g_SparseBindingEnabled &&
1474 g_SparseBindingQueueFamilyIndex != g_GraphicsQueueFamilyIndex &&
1475 g_SparseBindingQueueFamilyIndex != g_PresentQueueFamilyIndex)
1476 {
1477
1478 queueCreateInfo[queueCount].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1479 queueCreateInfo[queueCount].queueFamilyIndex = g_SparseBindingQueueFamilyIndex;
1480 queueCreateInfo[queueCount].queueCount = 1;
1481 queueCreateInfo[queueCount].pQueuePriorities = &queuePriority;
1482 ++queueCount;
1483 }
1484
1485 std::vector<const char*> enabledDeviceExtensions;
1486 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1487 if(VK_KHR_get_memory_requirements2_enabled)
1488 enabledDeviceExtensions.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
1489 if(VK_KHR_dedicated_allocation_enabled)
1490 enabledDeviceExtensions.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
1491 if(VK_KHR_bind_memory2_enabled)
1492 enabledDeviceExtensions.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
1493 if(VK_EXT_memory_budget_enabled)
1494 enabledDeviceExtensions.push_back(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME);
1495 if(VK_AMD_device_coherent_memory_enabled)
1496 enabledDeviceExtensions.push_back(VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME);
1497 if(VK_KHR_buffer_device_address_enabled)
1498 enabledDeviceExtensions.push_back(VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
1499 if(VK_EXT_buffer_device_address_enabled)
1500 enabledDeviceExtensions.push_back(VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME);
1501
1502 VkPhysicalDeviceFeatures2 deviceFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
1503 deviceFeatures.features.samplerAnisotropy = VK_TRUE;
1504 deviceFeatures.features.sparseBinding = g_SparseBindingEnabled ? VK_TRUE : VK_FALSE;
1505
1506 if(VK_AMD_device_coherent_memory_enabled)
1507 {
1508 physicalDeviceCoherentMemoryFeatures.deviceCoherentMemory = VK_TRUE;
1509 PnextChainPushBack(&deviceFeatures, &physicalDeviceCoherentMemoryFeatures);
1510 }
1511 if(g_BufferDeviceAddressEnabled)
1512 {
1513 physicalDeviceBufferDeviceAddressFeatures = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT };
1514 physicalDeviceBufferDeviceAddressFeatures.bufferDeviceAddress = VK_TRUE;
1515 PnextChainPushBack(&deviceFeatures, &physicalDeviceBufferDeviceAddressFeatures);
1516 }
1517
1518 VkDeviceCreateInfo deviceCreateInfo = { VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
1519 deviceCreateInfo.pNext = &deviceFeatures;
1520 deviceCreateInfo.enabledLayerCount = 0;
1521 deviceCreateInfo.ppEnabledLayerNames = nullptr;
1522 deviceCreateInfo.enabledExtensionCount = (uint32_t)enabledDeviceExtensions.size();
1523 deviceCreateInfo.ppEnabledExtensionNames = !enabledDeviceExtensions.empty() ? enabledDeviceExtensions.data() : nullptr;
1524 deviceCreateInfo.queueCreateInfoCount = queueCount;
1525 deviceCreateInfo.pQueueCreateInfos = queueCreateInfo;
1526
1527 ERR_GUARD_VULKAN( vkCreateDevice(g_hPhysicalDevice, &deviceCreateInfo, g_Allocs, &g_hDevice) );
1528
1529 // Fetch pointers to extension functions
1530 if(g_BufferDeviceAddressEnabled)
1531 {
1532 if(GetVulkanApiVersion() >= VK_API_VERSION_1_2)
1533 {
1534 g_vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)vkGetDeviceProcAddr(g_hDevice, "vkGetBufferDeviceAddress");
1535 //assert(g_vkGetBufferDeviceAddressEXT != nullptr);
1536 /*
1537 For some reason this doesn't work, the pointer is NULL :( None of the below methods help.
1538
1539 Validation layers also report following error:
1540 [ VUID-VkMemoryAllocateInfo-flags-03331 ] Object: VK_NULL_HANDLE (Type = 0) | If VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR is set, bufferDeviceAddress must be enabled. The Vulkan spec states: If VkMemoryAllocateFlagsInfo::flags includes VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, the bufferDeviceAddress feature must be enabled (https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#VUID-VkMemoryAllocateInfo-flags-03331)
1541 Despite I'm posting VkPhysicalDeviceBufferDeviceAddressFeaturesEXT::bufferDeviceAddress = VK_TRUE in VkDeviceCreateInfo::pNext chain.
1542
1543 if(g_vkGetBufferDeviceAddressEXT == nullptr)
1544 {
1545 g_vkGetBufferDeviceAddressEXT = &vkGetBufferDeviceAddress; // Doesn't run, cannot find entry point...
1546 }
1547
1548 if(g_vkGetBufferDeviceAddressEXT == nullptr)
1549 {
1550 g_vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)vkGetInstanceProcAddr(g_hVulkanInstance, "vkGetBufferDeviceAddress");
1551 }
1552 if(g_vkGetBufferDeviceAddressEXT == nullptr)
1553 {
1554 g_vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)vkGetDeviceProcAddr(g_hDevice, "vkGetBufferDeviceAddressKHR");
1555 }
1556 if(g_vkGetBufferDeviceAddressEXT == nullptr)
1557 {
1558 g_vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)vkGetDeviceProcAddr(g_hDevice, "vkGetBufferDeviceAddressEXT");
1559 }
1560 */
1561 }
1562 else if(VK_KHR_buffer_device_address_enabled)
1563 {
1564 g_vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)vkGetDeviceProcAddr(g_hDevice, "vkGetBufferDeviceAddressKHR");
1565 assert(g_vkGetBufferDeviceAddressEXT != nullptr);
1566 }
1567 else if(VK_EXT_buffer_device_address_enabled)
1568 {
1569 g_vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)vkGetDeviceProcAddr(g_hDevice, "vkGetBufferDeviceAddressEXT");
1570 assert(g_vkGetBufferDeviceAddressEXT != nullptr);
1571 }
1572 }
1573
1574 // Create memory allocator
1575
1576 VmaAllocatorCreateInfo allocatorInfo = {};
1577 SetAllocatorCreateInfo(allocatorInfo);
1578 ERR_GUARD_VULKAN( vmaCreateAllocator(&allocatorInfo, &g_hAllocator) );
1579
1580 PrintEnabledFeatures();
1581
1582 // Retrieve queues (don't need to be destroyed).
1583
1584 vkGetDeviceQueue(g_hDevice, g_GraphicsQueueFamilyIndex, 0, &g_hGraphicsQueue);
1585 vkGetDeviceQueue(g_hDevice, g_PresentQueueFamilyIndex, 0, &g_hPresentQueue);
1586 assert(g_hGraphicsQueue);
1587 assert(g_hPresentQueue);
1588
1589 if(g_SparseBindingEnabled)
1590 {
1591 vkGetDeviceQueue(g_hDevice, g_SparseBindingQueueFamilyIndex, 0, &g_hSparseBindingQueue);
1592 assert(g_hSparseBindingQueue);
1593 }
1594
1595 // Create command pool
1596
1597 VkCommandPoolCreateInfo commandPoolInfo = { VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO };
1598 commandPoolInfo.queueFamilyIndex = g_GraphicsQueueFamilyIndex;
1599 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1600 ERR_GUARD_VULKAN( vkCreateCommandPool(g_hDevice, &commandPoolInfo, g_Allocs, &g_hCommandPool) );
1601
1602 VkCommandBufferAllocateInfo commandBufferInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO };
1603 commandBufferInfo.commandPool = g_hCommandPool;
1604 commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1605 commandBufferInfo.commandBufferCount = COMMAND_BUFFER_COUNT;
1606 ERR_GUARD_VULKAN( vkAllocateCommandBuffers(g_hDevice, &commandBufferInfo, g_MainCommandBuffers) );
1607
1608 VkFenceCreateInfo fenceInfo = { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
1609 fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
1610 for(size_t i = 0; i < COMMAND_BUFFER_COUNT; ++i)
1611 {
1612 ERR_GUARD_VULKAN( vkCreateFence(g_hDevice, &fenceInfo, g_Allocs, &g_MainCommandBufferExecutedFances[i]) );
1613 }
1614
1615 ERR_GUARD_VULKAN( vkCreateFence(g_hDevice, &fenceInfo, g_Allocs, &g_ImmediateFence) );
1616
1617 commandBufferInfo.commandBufferCount = 1;
1618 ERR_GUARD_VULKAN( vkAllocateCommandBuffers(g_hDevice, &commandBufferInfo, &g_hTemporaryCommandBuffer) );
1619
1620 // Create texture sampler
1621
1622 VkSamplerCreateInfo samplerInfo = { VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO };
1623 samplerInfo.magFilter = VK_FILTER_LINEAR;
1624 samplerInfo.minFilter = VK_FILTER_LINEAR;
1625 samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1626 samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1627 samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1628 samplerInfo.anisotropyEnable = VK_TRUE;
1629 samplerInfo.maxAnisotropy = 16;
1630 samplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
1631 samplerInfo.unnormalizedCoordinates = VK_FALSE;
1632 samplerInfo.compareEnable = VK_FALSE;
1633 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
1634 samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
1635 samplerInfo.mipLodBias = 0.f;
1636 samplerInfo.minLod = 0.f;
1637 samplerInfo.maxLod = FLT_MAX;
1638 ERR_GUARD_VULKAN( vkCreateSampler(g_hDevice, &samplerInfo, g_Allocs, &g_hSampler) );
1639
1640 CreateTexture(128, 128);
1641 CreateMesh();
1642
1643 VkDescriptorSetLayoutBinding samplerLayoutBinding = {};
1644 samplerLayoutBinding.binding = 1;
1645 samplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1646 samplerLayoutBinding.descriptorCount = 1;
1647 samplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
1648
1649 VkDescriptorSetLayoutCreateInfo descriptorSetLayoutInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO };
1650 descriptorSetLayoutInfo.bindingCount = 1;
1651 descriptorSetLayoutInfo.pBindings = &samplerLayoutBinding;
1652 ERR_GUARD_VULKAN( vkCreateDescriptorSetLayout(g_hDevice, &descriptorSetLayoutInfo, g_Allocs, &g_hDescriptorSetLayout) );
1653
1654 // Create descriptor pool
1655
1656 VkDescriptorPoolSize descriptorPoolSizes[2];
1657 ZeroMemory(descriptorPoolSizes, sizeof(descriptorPoolSizes));
1658 descriptorPoolSizes[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1659 descriptorPoolSizes[0].descriptorCount = 1;
1660 descriptorPoolSizes[1].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1661 descriptorPoolSizes[1].descriptorCount = 1;
1662
1663 VkDescriptorPoolCreateInfo descriptorPoolInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO };
1664 descriptorPoolInfo.poolSizeCount = (uint32_t)_countof(descriptorPoolSizes);
1665 descriptorPoolInfo.pPoolSizes = descriptorPoolSizes;
1666 descriptorPoolInfo.maxSets = 1;
1667 ERR_GUARD_VULKAN( vkCreateDescriptorPool(g_hDevice, &descriptorPoolInfo, g_Allocs, &g_hDescriptorPool) );
1668
1669 // Create descriptor set layout
1670
1671 VkDescriptorSetLayout descriptorSetLayouts[] = { g_hDescriptorSetLayout };
1672 VkDescriptorSetAllocateInfo descriptorSetInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO };
1673 descriptorSetInfo.descriptorPool = g_hDescriptorPool;
1674 descriptorSetInfo.descriptorSetCount = 1;
1675 descriptorSetInfo.pSetLayouts = descriptorSetLayouts;
1676 ERR_GUARD_VULKAN( vkAllocateDescriptorSets(g_hDevice, &descriptorSetInfo, &g_hDescriptorSet) );
1677
1678 VkDescriptorImageInfo descriptorImageInfo = {};
1679 descriptorImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1680 descriptorImageInfo.imageView = g_hTextureImageView;
1681 descriptorImageInfo.sampler = g_hSampler;
1682
1683 VkWriteDescriptorSet writeDescriptorSet = { VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET };
1684 writeDescriptorSet.dstSet = g_hDescriptorSet;
1685 writeDescriptorSet.dstBinding = 1;
1686 writeDescriptorSet.dstArrayElement = 0;
1687 writeDescriptorSet.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1688 writeDescriptorSet.descriptorCount = 1;
1689 writeDescriptorSet.pImageInfo = &descriptorImageInfo;
1690
1691 vkUpdateDescriptorSets(g_hDevice, 1, &writeDescriptorSet, 0, nullptr);
1692
1693 CreateSwapchain();
1694 }
1695
FinalizeApplication()1696 static void FinalizeApplication()
1697 {
1698 vkDeviceWaitIdle(g_hDevice);
1699
1700 DestroySwapchain(true);
1701
1702 if(g_hDescriptorPool != VK_NULL_HANDLE)
1703 {
1704 vkDestroyDescriptorPool(g_hDevice, g_hDescriptorPool, g_Allocs);
1705 g_hDescriptorPool = VK_NULL_HANDLE;
1706 }
1707
1708 if(g_hDescriptorSetLayout != VK_NULL_HANDLE)
1709 {
1710 vkDestroyDescriptorSetLayout(g_hDevice, g_hDescriptorSetLayout, g_Allocs);
1711 g_hDescriptorSetLayout = VK_NULL_HANDLE;
1712 }
1713
1714 if(g_hTextureImageView != VK_NULL_HANDLE)
1715 {
1716 vkDestroyImageView(g_hDevice, g_hTextureImageView, g_Allocs);
1717 g_hTextureImageView = VK_NULL_HANDLE;
1718 }
1719 if(g_hTextureImage != VK_NULL_HANDLE)
1720 {
1721 vmaDestroyImage(g_hAllocator, g_hTextureImage, g_hTextureImageAlloc);
1722 g_hTextureImage = VK_NULL_HANDLE;
1723 }
1724
1725 if(g_hIndexBuffer != VK_NULL_HANDLE)
1726 {
1727 vmaDestroyBuffer(g_hAllocator, g_hIndexBuffer, g_hIndexBufferAlloc);
1728 g_hIndexBuffer = VK_NULL_HANDLE;
1729 }
1730 if(g_hVertexBuffer != VK_NULL_HANDLE)
1731 {
1732 vmaDestroyBuffer(g_hAllocator, g_hVertexBuffer, g_hVertexBufferAlloc);
1733 g_hVertexBuffer = VK_NULL_HANDLE;
1734 }
1735
1736 if(g_hSampler != VK_NULL_HANDLE)
1737 {
1738 vkDestroySampler(g_hDevice, g_hSampler, g_Allocs);
1739 g_hSampler = VK_NULL_HANDLE;
1740 }
1741
1742 if(g_ImmediateFence)
1743 {
1744 vkDestroyFence(g_hDevice, g_ImmediateFence, g_Allocs);
1745 g_ImmediateFence = VK_NULL_HANDLE;
1746 }
1747
1748 for(size_t i = COMMAND_BUFFER_COUNT; i--; )
1749 {
1750 if(g_MainCommandBufferExecutedFances[i] != VK_NULL_HANDLE)
1751 {
1752 vkDestroyFence(g_hDevice, g_MainCommandBufferExecutedFances[i], g_Allocs);
1753 g_MainCommandBufferExecutedFances[i] = VK_NULL_HANDLE;
1754 }
1755 }
1756 if(g_MainCommandBuffers[0] != VK_NULL_HANDLE)
1757 {
1758 vkFreeCommandBuffers(g_hDevice, g_hCommandPool, COMMAND_BUFFER_COUNT, g_MainCommandBuffers);
1759 ZeroMemory(g_MainCommandBuffers, sizeof(g_MainCommandBuffers));
1760 }
1761 if(g_hTemporaryCommandBuffer != VK_NULL_HANDLE)
1762 {
1763 vkFreeCommandBuffers(g_hDevice, g_hCommandPool, 1, &g_hTemporaryCommandBuffer);
1764 g_hTemporaryCommandBuffer = VK_NULL_HANDLE;
1765 }
1766
1767 if(g_hCommandPool != VK_NULL_HANDLE)
1768 {
1769 vkDestroyCommandPool(g_hDevice, g_hCommandPool, g_Allocs);
1770 g_hCommandPool = VK_NULL_HANDLE;
1771 }
1772
1773 if(g_hAllocator != VK_NULL_HANDLE)
1774 {
1775 vmaDestroyAllocator(g_hAllocator);
1776 g_hAllocator = nullptr;
1777 }
1778
1779 if(g_hDevice != VK_NULL_HANDLE)
1780 {
1781 vkDestroyDevice(g_hDevice, g_Allocs);
1782 g_hDevice = nullptr;
1783 }
1784
1785 if(g_pvkDestroyDebugReportCallbackEXT && g_hCallback != VK_NULL_HANDLE)
1786 {
1787 g_pvkDestroyDebugReportCallbackEXT(g_hVulkanInstance, g_hCallback, g_Allocs);
1788 g_hCallback = VK_NULL_HANDLE;
1789 }
1790
1791 if(g_hSurface != VK_NULL_HANDLE)
1792 {
1793 vkDestroySurfaceKHR(g_hVulkanInstance, g_hSurface, g_Allocs);
1794 g_hSurface = VK_NULL_HANDLE;
1795 }
1796
1797 if(g_hVulkanInstance != VK_NULL_HANDLE)
1798 {
1799 vkDestroyInstance(g_hVulkanInstance, g_Allocs);
1800 g_hVulkanInstance = VK_NULL_HANDLE;
1801 }
1802 }
1803
PrintAllocatorStats()1804 static void PrintAllocatorStats()
1805 {
1806 #if VMA_STATS_STRING_ENABLED
1807 char* statsString = nullptr;
1808 vmaBuildStatsString(g_hAllocator, &statsString, true);
1809 printf("%s\n", statsString);
1810 vmaFreeStatsString(g_hAllocator, statsString);
1811 #endif
1812 }
1813
RecreateSwapChain()1814 static void RecreateSwapChain()
1815 {
1816 vkDeviceWaitIdle(g_hDevice);
1817 DestroySwapchain(false);
1818 CreateSwapchain();
1819 }
1820
DrawFrame()1821 static void DrawFrame()
1822 {
1823 // Begin main command buffer
1824 size_t cmdBufIndex = (g_NextCommandBufferIndex++) % COMMAND_BUFFER_COUNT;
1825 VkCommandBuffer hCommandBuffer = g_MainCommandBuffers[cmdBufIndex];
1826 VkFence hCommandBufferExecutedFence = g_MainCommandBufferExecutedFances[cmdBufIndex];
1827
1828 ERR_GUARD_VULKAN( vkWaitForFences(g_hDevice, 1, &hCommandBufferExecutedFence, VK_TRUE, UINT64_MAX) );
1829 ERR_GUARD_VULKAN( vkResetFences(g_hDevice, 1, &hCommandBufferExecutedFence) );
1830
1831 VkCommandBufferBeginInfo commandBufferBeginInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
1832 commandBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1833 ERR_GUARD_VULKAN( vkBeginCommandBuffer(hCommandBuffer, &commandBufferBeginInfo) );
1834
1835 // Acquire swapchain image
1836 uint32_t imageIndex = 0;
1837 VkResult res = vkAcquireNextImageKHR(g_hDevice, g_hSwapchain, UINT64_MAX, g_hImageAvailableSemaphore, VK_NULL_HANDLE, &imageIndex);
1838 if(res == VK_ERROR_OUT_OF_DATE_KHR)
1839 {
1840 RecreateSwapChain();
1841 return;
1842 }
1843 else if(res < 0)
1844 {
1845 ERR_GUARD_VULKAN(res);
1846 }
1847
1848 // Record geometry pass
1849
1850 VkClearValue clearValues[2];
1851 ZeroMemory(clearValues, sizeof(clearValues));
1852 clearValues[0].color.float32[0] = 0.25f;
1853 clearValues[0].color.float32[1] = 0.25f;
1854 clearValues[0].color.float32[2] = 0.5f;
1855 clearValues[0].color.float32[3] = 1.0f;
1856 clearValues[1].depthStencil.depth = 1.0f;
1857
1858 VkRenderPassBeginInfo renderPassBeginInfo = { VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO };
1859 renderPassBeginInfo.renderPass = g_hRenderPass;
1860 renderPassBeginInfo.framebuffer = g_Framebuffers[imageIndex];
1861 renderPassBeginInfo.renderArea.offset.x = 0;
1862 renderPassBeginInfo.renderArea.offset.y = 0;
1863 renderPassBeginInfo.renderArea.extent = g_Extent;
1864 renderPassBeginInfo.clearValueCount = (uint32_t)_countof(clearValues);
1865 renderPassBeginInfo.pClearValues = clearValues;
1866 vkCmdBeginRenderPass(hCommandBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
1867
1868 vkCmdBindPipeline(
1869 hCommandBuffer,
1870 VK_PIPELINE_BIND_POINT_GRAPHICS,
1871 g_hPipeline);
1872
1873 mat4 view = mat4::LookAt(
1874 vec3(0.f, 0.f, 0.f),
1875 vec3(0.f, -2.f, 4.f),
1876 vec3(0.f, 1.f, 0.f));
1877 mat4 proj = mat4::Perspective(
1878 1.0471975511966f, // 60 degrees
1879 (float)g_Extent.width / (float)g_Extent.height,
1880 0.1f,
1881 1000.f);
1882 mat4 viewProj = view * proj;
1883
1884 vkCmdBindDescriptorSets(
1885 hCommandBuffer,
1886 VK_PIPELINE_BIND_POINT_GRAPHICS,
1887 g_hPipelineLayout,
1888 0,
1889 1,
1890 &g_hDescriptorSet,
1891 0,
1892 nullptr);
1893
1894 float rotationAngle = (float)GetTickCount() * 0.001f * (float)PI * 0.2f;
1895 mat4 model = mat4::RotationY(rotationAngle);
1896
1897 UniformBufferObject ubo = {};
1898 ubo.ModelViewProj = model * viewProj;
1899 vkCmdPushConstants(hCommandBuffer, g_hPipelineLayout, VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(UniformBufferObject), &ubo);
1900
1901 VkBuffer vertexBuffers[] = { g_hVertexBuffer };
1902 VkDeviceSize offsets[] = { 0 };
1903 vkCmdBindVertexBuffers(hCommandBuffer, 0, 1, vertexBuffers, offsets);
1904
1905 vkCmdBindIndexBuffer(hCommandBuffer, g_hIndexBuffer, 0, VK_INDEX_TYPE_UINT16);
1906
1907 vkCmdDrawIndexed(hCommandBuffer, g_IndexCount, 1, 0, 0, 0);
1908
1909 vkCmdEndRenderPass(hCommandBuffer);
1910
1911 vkEndCommandBuffer(hCommandBuffer);
1912
1913 // Submit command buffer
1914
1915 VkSemaphore submitWaitSemaphores[] = { g_hImageAvailableSemaphore };
1916 VkPipelineStageFlags submitWaitStages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
1917 VkSemaphore submitSignalSemaphores[] = { g_hRenderFinishedSemaphore };
1918 VkSubmitInfo submitInfo = { VK_STRUCTURE_TYPE_SUBMIT_INFO };
1919 submitInfo.waitSemaphoreCount = 1;
1920 submitInfo.pWaitSemaphores = submitWaitSemaphores;
1921 submitInfo.pWaitDstStageMask = submitWaitStages;
1922 submitInfo.commandBufferCount = 1;
1923 submitInfo.pCommandBuffers = &hCommandBuffer;
1924 submitInfo.signalSemaphoreCount = _countof(submitSignalSemaphores);
1925 submitInfo.pSignalSemaphores = submitSignalSemaphores;
1926 ERR_GUARD_VULKAN( vkQueueSubmit(g_hGraphicsQueue, 1, &submitInfo, hCommandBufferExecutedFence) );
1927
1928 VkSemaphore presentWaitSemaphores[] = { g_hRenderFinishedSemaphore };
1929
1930 VkSwapchainKHR swapchains[] = { g_hSwapchain };
1931 VkPresentInfoKHR presentInfo = { VK_STRUCTURE_TYPE_PRESENT_INFO_KHR };
1932 presentInfo.waitSemaphoreCount = _countof(presentWaitSemaphores);
1933 presentInfo.pWaitSemaphores = presentWaitSemaphores;
1934 presentInfo.swapchainCount = 1;
1935 presentInfo.pSwapchains = swapchains;
1936 presentInfo.pImageIndices = &imageIndex;
1937 presentInfo.pResults = nullptr;
1938 res = vkQueuePresentKHR(g_hPresentQueue, &presentInfo);
1939 if(res == VK_ERROR_OUT_OF_DATE_KHR)
1940 {
1941 RecreateSwapChain();
1942 }
1943 else
1944 ERR_GUARD_VULKAN(res);
1945 }
1946
HandlePossibleSizeChange()1947 static void HandlePossibleSizeChange()
1948 {
1949 RECT clientRect;
1950 GetClientRect(g_hWnd, &clientRect);
1951 LONG newSizeX = clientRect.right - clientRect.left;
1952 LONG newSizeY = clientRect.bottom - clientRect.top;
1953 if((newSizeX > 0) &&
1954 (newSizeY > 0) &&
1955 ((newSizeX != g_SizeX) || (newSizeY != g_SizeY)))
1956 {
1957 g_SizeX = newSizeX;
1958 g_SizeY = newSizeY;
1959
1960 RecreateSwapChain();
1961 }
1962 }
1963
WndProc(HWND hWnd,UINT msg,WPARAM wParam,LPARAM lParam)1964 static LRESULT WINAPI WndProc(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam)
1965 {
1966 switch(msg)
1967 {
1968 case WM_CREATE:
1969 // This is intentionally assigned here because we are now inside CreateWindow, before it returns.
1970 g_hWnd = hWnd;
1971 InitializeApplication();
1972 PrintAllocatorStats();
1973 return 0;
1974
1975 case WM_DESTROY:
1976 FinalizeApplication();
1977 PostQuitMessage(0);
1978 return 0;
1979
1980 // This prevents app from freezing when left Alt is pressed
1981 // (which normally enters modal menu loop).
1982 case WM_SYSKEYDOWN:
1983 case WM_SYSKEYUP:
1984 return 0;
1985
1986 case WM_SIZE:
1987 if((wParam == SIZE_MAXIMIZED) || (wParam == SIZE_RESTORED))
1988 HandlePossibleSizeChange();
1989 return 0;
1990
1991 case WM_EXITSIZEMOVE:
1992 HandlePossibleSizeChange();
1993 return 0;
1994
1995 case WM_KEYDOWN:
1996 switch(wParam)
1997 {
1998 case VK_ESCAPE:
1999 PostMessage(hWnd, WM_CLOSE, 0, 0);
2000 break;
2001 case 'T':
2002 try
2003 {
2004 Test();
2005 }
2006 catch(const std::exception& ex)
2007 {
2008 printf("ERROR: %s\n", ex.what());
2009 }
2010 break;
2011 case 'S':
2012 try
2013 {
2014 if(g_SparseBindingEnabled)
2015 {
2016 TestSparseBinding();
2017 }
2018 else
2019 {
2020 printf("Sparse binding not supported.\n");
2021 }
2022 }
2023 catch(const std::exception& ex)
2024 {
2025 printf("ERROR: %s\n", ex.what());
2026 }
2027 break;
2028 }
2029 return 0;
2030
2031 default:
2032 break;
2033 }
2034
2035 return DefWindowProc(hWnd, msg, wParam, lParam);
2036 }
2037
main()2038 int main()
2039 {
2040 g_hAppInstance = (HINSTANCE)GetModuleHandle(NULL);
2041
2042 WNDCLASSEX wndClassDesc = { sizeof(WNDCLASSEX) };
2043 wndClassDesc.style = CS_VREDRAW | CS_HREDRAW | CS_DBLCLKS;
2044 wndClassDesc.hbrBackground = NULL;
2045 wndClassDesc.hCursor = LoadCursor(NULL, IDC_CROSS);
2046 wndClassDesc.hIcon = LoadIcon(NULL, IDI_APPLICATION);
2047 wndClassDesc.hInstance = g_hAppInstance;
2048 wndClassDesc.lpfnWndProc = WndProc;
2049 wndClassDesc.lpszClassName = WINDOW_CLASS_NAME;
2050
2051 const ATOM hWndClass = RegisterClassEx(&wndClassDesc);
2052 assert(hWndClass);
2053
2054 const DWORD style = WS_VISIBLE | WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_MINIMIZEBOX | WS_MAXIMIZEBOX | WS_THICKFRAME;
2055 const DWORD exStyle = 0;
2056
2057 RECT rect = { 0, 0, g_SizeX, g_SizeY };
2058 AdjustWindowRectEx(&rect, style, FALSE, exStyle);
2059
2060 CreateWindowEx(
2061 exStyle, WINDOW_CLASS_NAME, APP_TITLE_W, style,
2062 CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT,
2063 NULL, NULL, g_hAppInstance, NULL);
2064
2065 MSG msg;
2066 for(;;)
2067 {
2068 if(PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
2069 {
2070 if(msg.message == WM_QUIT)
2071 break;
2072 TranslateMessage(&msg);
2073 DispatchMessage(&msg);
2074 }
2075 if(g_hDevice != VK_NULL_HANDLE)
2076 DrawFrame();
2077 }
2078
2079 TEST(g_CpuAllocCount.load() == 0);
2080
2081 return 0;
2082 }
2083
2084 #else // #ifdef _WIN32
2085
2086 #include "VmaUsage.h"
2087
main()2088 int main()
2089 {
2090 }
2091
2092 #endif // #ifdef _WIN32
2093