1 /*
2 * Copyright (c) 2015-2016 The Khronos Group Inc.
3 * Copyright (c) 2015-2016 Valve Corporation
4 * Copyright (c) 2015-2016 LunarG, Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * Author: Chia-I Wu <olvaffe@gmail.com>
19 * Author: Cody Northrop <cody@lunarg.com>
20 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
21 * Author: Ian Elliott <ian@LunarG.com>
22 * Author: Jon Ashburn <jon@lunarg.com>
23 * Author: Piers Daniell <pdaniell@nvidia.com>
24 * Author: Gwan-gyeong Mun <elongbug@gmail.com>
25 * Porter: Camilla Löwy <elmindreda@glfw.org>
26 */
27 /*
28 * Draw a textured triangle with depth testing. This is written against Intel
29 * ICD. It does not do state transition nor object memory binding like it
30 * should. It also does no error checking.
31 */
32
33 #include <stdio.h>
34 #include <stdlib.h>
35 #include <string.h>
36 #include <stdbool.h>
37 #include <assert.h>
38 #include <signal.h>
39
40 #ifdef _WIN32
41 #include <windows.h>
42 #endif
43
44 #define GLAD_VULKAN_IMPLEMENTATION
45 #include <glad/vulkan.h>
46 #define GLFW_INCLUDE_NONE
47 #include <GLFW/glfw3.h>
48
49 #define DEMO_TEXTURE_COUNT 1
50 #define VERTEX_BUFFER_BIND_ID 0
51 #define APP_SHORT_NAME "tri"
52 #define APP_LONG_NAME "The Vulkan Triangle Demo Program"
53
54 #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
55
56 #if defined(NDEBUG) && defined(__GNUC__)
57 #define U_ASSERT_ONLY __attribute__((unused))
58 #else
59 #define U_ASSERT_ONLY
60 #endif
61
62 #define ERR_EXIT(err_msg, err_class) \
63 do { \
64 printf(err_msg); \
65 fflush(stdout); \
66 exit(1); \
67 } while (0)
68
69 static const uint32_t fragShaderCode[] = {
70 0x07230203,0x00010000,0x00080007,0x00000014,0x00000000,0x00020011,0x00000001,0x0006000b,
71 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
72 0x0007000f,0x00000004,0x00000004,0x6e69616d,0x00000000,0x00000009,0x00000011,0x00030010,
73 0x00000004,0x00000007,0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,
74 0x72617065,0x5f657461,0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,
75 0x735f4252,0x69646168,0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,
76 0x00000004,0x6e69616d,0x00000000,0x00050005,0x00000009,0x61724675,0x6c6f4367,0x0000726f,
77 0x00030005,0x0000000d,0x00786574,0x00050005,0x00000011,0x63786574,0x64726f6f,0x00000000,
78 0x00040047,0x00000009,0x0000001e,0x00000000,0x00040047,0x0000000d,0x00000022,0x00000000,
79 0x00040047,0x0000000d,0x00000021,0x00000000,0x00040047,0x00000011,0x0000001e,0x00000000,
80 0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,0x00000020,
81 0x00040017,0x00000007,0x00000006,0x00000004,0x00040020,0x00000008,0x00000003,0x00000007,
82 0x0004003b,0x00000008,0x00000009,0x00000003,0x00090019,0x0000000a,0x00000006,0x00000001,
83 0x00000000,0x00000000,0x00000000,0x00000001,0x00000000,0x0003001b,0x0000000b,0x0000000a,
84 0x00040020,0x0000000c,0x00000000,0x0000000b,0x0004003b,0x0000000c,0x0000000d,0x00000000,
85 0x00040017,0x0000000f,0x00000006,0x00000002,0x00040020,0x00000010,0x00000001,0x0000000f,
86 0x0004003b,0x00000010,0x00000011,0x00000001,0x00050036,0x00000002,0x00000004,0x00000000,
87 0x00000003,0x000200f8,0x00000005,0x0004003d,0x0000000b,0x0000000e,0x0000000d,0x0004003d,
88 0x0000000f,0x00000012,0x00000011,0x00050057,0x00000007,0x00000013,0x0000000e,0x00000012,
89 0x0003003e,0x00000009,0x00000013,0x000100fd,0x00010038
90 };
91
92 static const uint32_t vertShaderCode[] = {
93 0x07230203,0x00010000,0x00080007,0x00000018,0x00000000,0x00020011,0x00000001,0x0006000b,
94 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
95 0x0009000f,0x00000000,0x00000004,0x6e69616d,0x00000000,0x00000009,0x0000000b,0x00000010,
96 0x00000014,0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,0x72617065,
97 0x5f657461,0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,0x735f4252,
98 0x69646168,0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,0x00000004,
99 0x6e69616d,0x00000000,0x00050005,0x00000009,0x63786574,0x64726f6f,0x00000000,0x00040005,
100 0x0000000b,0x72747461,0x00000000,0x00060005,0x0000000e,0x505f6c67,0x65567265,0x78657472,
101 0x00000000,0x00060006,0x0000000e,0x00000000,0x505f6c67,0x7469736f,0x006e6f69,0x00030005,
102 0x00000010,0x00000000,0x00030005,0x00000014,0x00736f70,0x00040047,0x00000009,0x0000001e,
103 0x00000000,0x00040047,0x0000000b,0x0000001e,0x00000001,0x00050048,0x0000000e,0x00000000,
104 0x0000000b,0x00000000,0x00030047,0x0000000e,0x00000002,0x00040047,0x00000014,0x0000001e,
105 0x00000000,0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,
106 0x00000020,0x00040017,0x00000007,0x00000006,0x00000002,0x00040020,0x00000008,0x00000003,
107 0x00000007,0x0004003b,0x00000008,0x00000009,0x00000003,0x00040020,0x0000000a,0x00000001,
108 0x00000007,0x0004003b,0x0000000a,0x0000000b,0x00000001,0x00040017,0x0000000d,0x00000006,
109 0x00000004,0x0003001e,0x0000000e,0x0000000d,0x00040020,0x0000000f,0x00000003,0x0000000e,
110 0x0004003b,0x0000000f,0x00000010,0x00000003,0x00040015,0x00000011,0x00000020,0x00000001,
111 0x0004002b,0x00000011,0x00000012,0x00000000,0x00040020,0x00000013,0x00000001,0x0000000d,
112 0x0004003b,0x00000013,0x00000014,0x00000001,0x00040020,0x00000016,0x00000003,0x0000000d,
113 0x00050036,0x00000002,0x00000004,0x00000000,0x00000003,0x000200f8,0x00000005,0x0004003d,
114 0x00000007,0x0000000c,0x0000000b,0x0003003e,0x00000009,0x0000000c,0x0004003d,0x0000000d,
115 0x00000015,0x00000014,0x00050041,0x00000016,0x00000017,0x00000010,0x00000012,0x0003003e,
116 0x00000017,0x00000015,0x000100fd,0x00010038
117 };
118
119 struct texture_object {
120 VkSampler sampler;
121
122 VkImage image;
123 VkImageLayout imageLayout;
124
125 VkDeviceMemory mem;
126 VkImageView view;
127 int32_t tex_width, tex_height;
128 };
129
130 static int validation_error = 0;
131
132 VKAPI_ATTR VkBool32 VKAPI_CALL
BreakCallback(VkFlags msgFlags,VkDebugReportObjectTypeEXT objType,uint64_t srcObject,size_t location,int32_t msgCode,const char * pLayerPrefix,const char * pMsg,void * pUserData)133 BreakCallback(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType,
134 uint64_t srcObject, size_t location, int32_t msgCode,
135 const char *pLayerPrefix, const char *pMsg,
136 void *pUserData) {
137 #ifdef _WIN32
138 DebugBreak();
139 #else
140 raise(SIGTRAP);
141 #endif
142
143 return false;
144 }
145
146 typedef struct {
147 VkImage image;
148 VkCommandBuffer cmd;
149 VkImageView view;
150 } SwapchainBuffers;
151
152 struct demo {
153 GLFWwindow* window;
154 VkSurfaceKHR surface;
155 bool use_staging_buffer;
156
157 VkInstance inst;
158 VkPhysicalDevice gpu;
159 VkDevice device;
160 VkQueue queue;
161 VkPhysicalDeviceProperties gpu_props;
162 VkPhysicalDeviceFeatures gpu_features;
163 VkQueueFamilyProperties *queue_props;
164 uint32_t graphics_queue_node_index;
165
166 uint32_t enabled_extension_count;
167 uint32_t enabled_layer_count;
168 const char *extension_names[64];
169 const char *enabled_layers[64];
170
171 int width, height;
172 VkFormat format;
173 VkColorSpaceKHR color_space;
174
175 uint32_t swapchainImageCount;
176 VkSwapchainKHR swapchain;
177 SwapchainBuffers *buffers;
178
179 VkCommandPool cmd_pool;
180
181 struct {
182 VkFormat format;
183
184 VkImage image;
185 VkDeviceMemory mem;
186 VkImageView view;
187 } depth;
188
189 struct texture_object textures[DEMO_TEXTURE_COUNT];
190
191 struct {
192 VkBuffer buf;
193 VkDeviceMemory mem;
194
195 VkPipelineVertexInputStateCreateInfo vi;
196 VkVertexInputBindingDescription vi_bindings[1];
197 VkVertexInputAttributeDescription vi_attrs[2];
198 } vertices;
199
200 VkCommandBuffer setup_cmd; // Command Buffer for initialization commands
201 VkCommandBuffer draw_cmd; // Command Buffer for drawing commands
202 VkPipelineLayout pipeline_layout;
203 VkDescriptorSetLayout desc_layout;
204 VkPipelineCache pipelineCache;
205 VkRenderPass render_pass;
206 VkPipeline pipeline;
207
208 VkShaderModule vert_shader_module;
209 VkShaderModule frag_shader_module;
210
211 VkDescriptorPool desc_pool;
212 VkDescriptorSet desc_set;
213
214 VkFramebuffer *framebuffers;
215
216 VkPhysicalDeviceMemoryProperties memory_properties;
217
218 int32_t curFrame;
219 int32_t frameCount;
220 bool validate;
221 bool use_break;
222 VkDebugReportCallbackEXT msg_callback;
223
224 float depthStencil;
225 float depthIncrement;
226
227 uint32_t current_buffer;
228 uint32_t queue_count;
229 };
230
231 VKAPI_ATTR VkBool32 VKAPI_CALL
dbgFunc(VkFlags msgFlags,VkDebugReportObjectTypeEXT objType,uint64_t srcObject,size_t location,int32_t msgCode,const char * pLayerPrefix,const char * pMsg,void * pUserData)232 dbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType,
233 uint64_t srcObject, size_t location, int32_t msgCode,
234 const char *pLayerPrefix, const char *pMsg, void *pUserData) {
235 char *message = (char *)malloc(strlen(pMsg) + 100);
236
237 assert(message);
238
239 validation_error = 1;
240
241 if (msgFlags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
242 sprintf(message, "ERROR: [%s] Code %d : %s", pLayerPrefix, msgCode,
243 pMsg);
244 } else if (msgFlags & VK_DEBUG_REPORT_WARNING_BIT_EXT) {
245 sprintf(message, "WARNING: [%s] Code %d : %s", pLayerPrefix, msgCode,
246 pMsg);
247 } else {
248 return false;
249 }
250
251 printf("%s\n", message);
252 fflush(stdout);
253 free(message);
254
255 /*
256 * false indicates that layer should not bail-out of an
257 * API call that had validation failures. This may mean that the
258 * app dies inside the driver due to invalid parameter(s).
259 * That's what would happen without validation layers, so we'll
260 * keep that behavior here.
261 */
262 return false;
263 }
264
265 // Forward declaration:
266 static void demo_resize(struct demo *demo);
267
memory_type_from_properties(struct demo * demo,uint32_t typeBits,VkFlags requirements_mask,uint32_t * typeIndex)268 static bool memory_type_from_properties(struct demo *demo, uint32_t typeBits,
269 VkFlags requirements_mask,
270 uint32_t *typeIndex) {
271 uint32_t i;
272 // Search memtypes to find first index with those properties
273 for (i = 0; i < VK_MAX_MEMORY_TYPES; i++) {
274 if ((typeBits & 1) == 1) {
275 // Type is available, does it match user properties?
276 if ((demo->memory_properties.memoryTypes[i].propertyFlags &
277 requirements_mask) == requirements_mask) {
278 *typeIndex = i;
279 return true;
280 }
281 }
282 typeBits >>= 1;
283 }
284 // No memory types matched, return failure
285 return false;
286 }
287
demo_flush_init_cmd(struct demo * demo)288 static void demo_flush_init_cmd(struct demo *demo) {
289 VkResult U_ASSERT_ONLY err;
290
291 if (demo->setup_cmd == VK_NULL_HANDLE)
292 return;
293
294 err = vkEndCommandBuffer(demo->setup_cmd);
295 assert(!err);
296
297 const VkCommandBuffer cmd_bufs[] = {demo->setup_cmd};
298 VkFence nullFence = {VK_NULL_HANDLE};
299 VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
300 .pNext = NULL,
301 .waitSemaphoreCount = 0,
302 .pWaitSemaphores = NULL,
303 .pWaitDstStageMask = NULL,
304 .commandBufferCount = 1,
305 .pCommandBuffers = cmd_bufs,
306 .signalSemaphoreCount = 0,
307 .pSignalSemaphores = NULL};
308
309 err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
310 assert(!err);
311
312 err = vkQueueWaitIdle(demo->queue);
313 assert(!err);
314
315 vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, cmd_bufs);
316 demo->setup_cmd = VK_NULL_HANDLE;
317 }
318
demo_set_image_layout(struct demo * demo,VkImage image,VkImageAspectFlags aspectMask,VkImageLayout old_image_layout,VkImageLayout new_image_layout,VkAccessFlagBits srcAccessMask)319 static void demo_set_image_layout(struct demo *demo, VkImage image,
320 VkImageAspectFlags aspectMask,
321 VkImageLayout old_image_layout,
322 VkImageLayout new_image_layout,
323 VkAccessFlagBits srcAccessMask) {
324
325 VkResult U_ASSERT_ONLY err;
326
327 if (demo->setup_cmd == VK_NULL_HANDLE) {
328 const VkCommandBufferAllocateInfo cmd = {
329 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
330 .pNext = NULL,
331 .commandPool = demo->cmd_pool,
332 .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
333 .commandBufferCount = 1,
334 };
335
336 err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->setup_cmd);
337 assert(!err);
338
339 VkCommandBufferBeginInfo cmd_buf_info = {
340 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
341 .pNext = NULL,
342 .flags = 0,
343 .pInheritanceInfo = NULL,
344 };
345 err = vkBeginCommandBuffer(demo->setup_cmd, &cmd_buf_info);
346 assert(!err);
347 }
348
349 VkImageMemoryBarrier image_memory_barrier = {
350 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
351 .pNext = NULL,
352 .srcAccessMask = srcAccessMask,
353 .dstAccessMask = 0,
354 .oldLayout = old_image_layout,
355 .newLayout = new_image_layout,
356 .image = image,
357 .subresourceRange = {aspectMask, 0, 1, 0, 1}};
358
359 if (new_image_layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
360 /* Make sure anything that was copying from this image has completed */
361 image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
362 }
363
364 if (new_image_layout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
365 image_memory_barrier.dstAccessMask =
366 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
367 }
368
369 if (new_image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
370 image_memory_barrier.dstAccessMask =
371 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
372 }
373
374 if (new_image_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
375 /* Make sure any Copy or CPU writes to image are flushed */
376 image_memory_barrier.dstAccessMask =
377 VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
378 }
379
380 VkImageMemoryBarrier *pmemory_barrier = &image_memory_barrier;
381
382 VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
383 VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
384
385 vkCmdPipelineBarrier(demo->setup_cmd, src_stages, dest_stages, 0, 0, NULL,
386 0, NULL, 1, pmemory_barrier);
387 }
388
demo_draw_build_cmd(struct demo * demo)389 static void demo_draw_build_cmd(struct demo *demo) {
390 const VkCommandBufferBeginInfo cmd_buf_info = {
391 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
392 .pNext = NULL,
393 .flags = 0,
394 .pInheritanceInfo = NULL,
395 };
396 const VkClearValue clear_values[2] = {
397 [0] = {.color.float32 = {0.2f, 0.2f, 0.2f, 0.2f}},
398 [1] = {.depthStencil = {demo->depthStencil, 0}},
399 };
400 const VkRenderPassBeginInfo rp_begin = {
401 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
402 .pNext = NULL,
403 .renderPass = demo->render_pass,
404 .framebuffer = demo->framebuffers[demo->current_buffer],
405 .renderArea.offset.x = 0,
406 .renderArea.offset.y = 0,
407 .renderArea.extent.width = demo->width,
408 .renderArea.extent.height = demo->height,
409 .clearValueCount = 2,
410 .pClearValues = clear_values,
411 };
412 VkResult U_ASSERT_ONLY err;
413
414 err = vkBeginCommandBuffer(demo->draw_cmd, &cmd_buf_info);
415 assert(!err);
416
417 // We can use LAYOUT_UNDEFINED as a wildcard here because we don't care what
418 // happens to the previous contents of the image
419 VkImageMemoryBarrier image_memory_barrier = {
420 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
421 .pNext = NULL,
422 .srcAccessMask = 0,
423 .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
424 .oldLayout = VK_IMAGE_LAYOUT_UNDEFINED,
425 .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
426 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
427 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
428 .image = demo->buffers[demo->current_buffer].image,
429 .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
430
431 vkCmdPipelineBarrier(demo->draw_cmd, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
432 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
433 NULL, 1, &image_memory_barrier);
434 vkCmdBeginRenderPass(demo->draw_cmd, &rp_begin, VK_SUBPASS_CONTENTS_INLINE);
435 vkCmdBindPipeline(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
436 demo->pipeline);
437 vkCmdBindDescriptorSets(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
438 demo->pipeline_layout, 0, 1, &demo->desc_set, 0,
439 NULL);
440
441 VkViewport viewport;
442 memset(&viewport, 0, sizeof(viewport));
443 viewport.height = (float)demo->height;
444 viewport.width = (float)demo->width;
445 viewport.minDepth = (float)0.0f;
446 viewport.maxDepth = (float)1.0f;
447 vkCmdSetViewport(demo->draw_cmd, 0, 1, &viewport);
448
449 VkRect2D scissor;
450 memset(&scissor, 0, sizeof(scissor));
451 scissor.extent.width = demo->width;
452 scissor.extent.height = demo->height;
453 scissor.offset.x = 0;
454 scissor.offset.y = 0;
455 vkCmdSetScissor(demo->draw_cmd, 0, 1, &scissor);
456
457 VkDeviceSize offsets[1] = {0};
458 vkCmdBindVertexBuffers(demo->draw_cmd, VERTEX_BUFFER_BIND_ID, 1,
459 &demo->vertices.buf, offsets);
460
461 vkCmdDraw(demo->draw_cmd, 3, 1, 0, 0);
462 vkCmdEndRenderPass(demo->draw_cmd);
463
464 VkImageMemoryBarrier prePresentBarrier = {
465 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
466 .pNext = NULL,
467 .srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
468 .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT,
469 .oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
470 .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
471 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
472 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
473 .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
474
475 prePresentBarrier.image = demo->buffers[demo->current_buffer].image;
476 VkImageMemoryBarrier *pmemory_barrier = &prePresentBarrier;
477 vkCmdPipelineBarrier(demo->draw_cmd, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
478 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
479 NULL, 1, pmemory_barrier);
480
481 err = vkEndCommandBuffer(demo->draw_cmd);
482 assert(!err);
483 }
484
demo_draw(struct demo * demo)485 static void demo_draw(struct demo *demo) {
486 VkResult U_ASSERT_ONLY err;
487 VkSemaphore imageAcquiredSemaphore, drawCompleteSemaphore;
488 VkSemaphoreCreateInfo semaphoreCreateInfo = {
489 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
490 .pNext = NULL,
491 .flags = 0,
492 };
493
494 err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo,
495 NULL, &imageAcquiredSemaphore);
496 assert(!err);
497
498 err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo,
499 NULL, &drawCompleteSemaphore);
500 assert(!err);
501
502 // Get the index of the next available swapchain image:
503 err = vkAcquireNextImageKHR(demo->device, demo->swapchain, UINT64_MAX,
504 imageAcquiredSemaphore,
505 (VkFence)0, // TODO: Show use of fence
506 &demo->current_buffer);
507 if (err == VK_ERROR_OUT_OF_DATE_KHR) {
508 // demo->swapchain is out of date (e.g. the window was resized) and
509 // must be recreated:
510 demo_resize(demo);
511 demo_draw(demo);
512 vkDestroySemaphore(demo->device, imageAcquiredSemaphore, NULL);
513 vkDestroySemaphore(demo->device, drawCompleteSemaphore, NULL);
514 return;
515 } else if (err == VK_SUBOPTIMAL_KHR) {
516 // demo->swapchain is not as optimal as it could be, but the platform's
517 // presentation engine will still present the image correctly.
518 } else {
519 assert(!err);
520 }
521
522 demo_flush_init_cmd(demo);
523
524 // Wait for the present complete semaphore to be signaled to ensure
525 // that the image won't be rendered to until the presentation
526 // engine has fully released ownership to the application, and it is
527 // okay to render to the image.
528
529 demo_draw_build_cmd(demo);
530 VkFence nullFence = VK_NULL_HANDLE;
531 VkPipelineStageFlags pipe_stage_flags =
532 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
533 VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
534 .pNext = NULL,
535 .waitSemaphoreCount = 1,
536 .pWaitSemaphores = &imageAcquiredSemaphore,
537 .pWaitDstStageMask = &pipe_stage_flags,
538 .commandBufferCount = 1,
539 .pCommandBuffers = &demo->draw_cmd,
540 .signalSemaphoreCount = 1,
541 .pSignalSemaphores = &drawCompleteSemaphore};
542
543 err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
544 assert(!err);
545
546 VkPresentInfoKHR present = {
547 .sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
548 .pNext = NULL,
549 .waitSemaphoreCount = 1,
550 .pWaitSemaphores = &drawCompleteSemaphore,
551 .swapchainCount = 1,
552 .pSwapchains = &demo->swapchain,
553 .pImageIndices = &demo->current_buffer,
554 };
555
556 err = vkQueuePresentKHR(demo->queue, &present);
557 if (err == VK_ERROR_OUT_OF_DATE_KHR) {
558 // demo->swapchain is out of date (e.g. the window was resized) and
559 // must be recreated:
560 demo_resize(demo);
561 } else if (err == VK_SUBOPTIMAL_KHR) {
562 // demo->swapchain is not as optimal as it could be, but the platform's
563 // presentation engine will still present the image correctly.
564 } else {
565 assert(!err);
566 }
567
568 err = vkQueueWaitIdle(demo->queue);
569 assert(err == VK_SUCCESS);
570
571 vkDestroySemaphore(demo->device, imageAcquiredSemaphore, NULL);
572 vkDestroySemaphore(demo->device, drawCompleteSemaphore, NULL);
573 }
574
demo_prepare_buffers(struct demo * demo)575 static void demo_prepare_buffers(struct demo *demo) {
576 VkResult U_ASSERT_ONLY err;
577 VkSwapchainKHR oldSwapchain = demo->swapchain;
578
579 // Check the surface capabilities and formats
580 VkSurfaceCapabilitiesKHR surfCapabilities;
581 err = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
582 demo->gpu, demo->surface, &surfCapabilities);
583 assert(!err);
584
585 uint32_t presentModeCount;
586 err = vkGetPhysicalDeviceSurfacePresentModesKHR(
587 demo->gpu, demo->surface, &presentModeCount, NULL);
588 assert(!err);
589 VkPresentModeKHR *presentModes =
590 (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
591 assert(presentModes);
592 err = vkGetPhysicalDeviceSurfacePresentModesKHR(
593 demo->gpu, demo->surface, &presentModeCount, presentModes);
594 assert(!err);
595
596 VkExtent2D swapchainExtent;
597 // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
598 if (surfCapabilities.currentExtent.width == 0xFFFFFFFF) {
599 // If the surface size is undefined, the size is set to the size
600 // of the images requested, which must fit within the minimum and
601 // maximum values.
602 swapchainExtent.width = demo->width;
603 swapchainExtent.height = demo->height;
604
605 if (swapchainExtent.width < surfCapabilities.minImageExtent.width) {
606 swapchainExtent.width = surfCapabilities.minImageExtent.width;
607 } else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width) {
608 swapchainExtent.width = surfCapabilities.maxImageExtent.width;
609 }
610
611 if (swapchainExtent.height < surfCapabilities.minImageExtent.height) {
612 swapchainExtent.height = surfCapabilities.minImageExtent.height;
613 } else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height) {
614 swapchainExtent.height = surfCapabilities.maxImageExtent.height;
615 }
616 } else {
617 // If the surface size is defined, the swap chain size must match
618 swapchainExtent = surfCapabilities.currentExtent;
619 demo->width = surfCapabilities.currentExtent.width;
620 demo->height = surfCapabilities.currentExtent.height;
621 }
622
623 VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
624
625 // Determine the number of VkImage's to use in the swap chain.
626 // Application desires to only acquire 1 image at a time (which is
627 // "surfCapabilities.minImageCount").
628 uint32_t desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
629 // If maxImageCount is 0, we can ask for as many images as we want;
630 // otherwise we're limited to maxImageCount
631 if ((surfCapabilities.maxImageCount > 0) &&
632 (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
633 // Application must settle for fewer images than desired:
634 desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
635 }
636
637 VkSurfaceTransformFlagsKHR preTransform;
638 if (surfCapabilities.supportedTransforms &
639 VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
640 preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
641 } else {
642 preTransform = surfCapabilities.currentTransform;
643 }
644
645 const VkSwapchainCreateInfoKHR swapchain = {
646 .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
647 .pNext = NULL,
648 .surface = demo->surface,
649 .minImageCount = desiredNumOfSwapchainImages,
650 .imageFormat = demo->format,
651 .imageColorSpace = demo->color_space,
652 .imageExtent =
653 {
654 .width = swapchainExtent.width, .height = swapchainExtent.height,
655 },
656 .imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
657 .preTransform = preTransform,
658 .compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
659 .imageArrayLayers = 1,
660 .imageSharingMode = VK_SHARING_MODE_EXCLUSIVE,
661 .queueFamilyIndexCount = 0,
662 .pQueueFamilyIndices = NULL,
663 .presentMode = swapchainPresentMode,
664 .oldSwapchain = oldSwapchain,
665 .clipped = true,
666 };
667 uint32_t i;
668
669 err = vkCreateSwapchainKHR(demo->device, &swapchain, NULL, &demo->swapchain);
670 assert(!err);
671
672 // If we just re-created an existing swapchain, we should destroy the old
673 // swapchain at this point.
674 // Note: destroying the swapchain also cleans up all its associated
675 // presentable images once the platform is done with them.
676 if (oldSwapchain != VK_NULL_HANDLE) {
677 vkDestroySwapchainKHR(demo->device, oldSwapchain, NULL);
678 }
679
680 err = vkGetSwapchainImagesKHR(demo->device, demo->swapchain,
681 &demo->swapchainImageCount, NULL);
682 assert(!err);
683
684 VkImage *swapchainImages =
685 (VkImage *)malloc(demo->swapchainImageCount * sizeof(VkImage));
686 assert(swapchainImages);
687 err = vkGetSwapchainImagesKHR(demo->device, demo->swapchain,
688 &demo->swapchainImageCount,
689 swapchainImages);
690 assert(!err);
691
692 demo->buffers = (SwapchainBuffers *)malloc(sizeof(SwapchainBuffers) *
693 demo->swapchainImageCount);
694 assert(demo->buffers);
695
696 for (i = 0; i < demo->swapchainImageCount; i++) {
697 VkImageViewCreateInfo color_attachment_view = {
698 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
699 .pNext = NULL,
700 .format = demo->format,
701 .components =
702 {
703 .r = VK_COMPONENT_SWIZZLE_R,
704 .g = VK_COMPONENT_SWIZZLE_G,
705 .b = VK_COMPONENT_SWIZZLE_B,
706 .a = VK_COMPONENT_SWIZZLE_A,
707 },
708 .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
709 .baseMipLevel = 0,
710 .levelCount = 1,
711 .baseArrayLayer = 0,
712 .layerCount = 1},
713 .viewType = VK_IMAGE_VIEW_TYPE_2D,
714 .flags = 0,
715 };
716
717 demo->buffers[i].image = swapchainImages[i];
718
719 color_attachment_view.image = demo->buffers[i].image;
720
721 err = vkCreateImageView(demo->device, &color_attachment_view, NULL,
722 &demo->buffers[i].view);
723 assert(!err);
724 }
725
726 demo->current_buffer = 0;
727
728 if (NULL != presentModes) {
729 free(presentModes);
730 }
731 }
732
demo_prepare_depth(struct demo * demo)733 static void demo_prepare_depth(struct demo *demo) {
734 const VkFormat depth_format = VK_FORMAT_D16_UNORM;
735 const VkImageCreateInfo image = {
736 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
737 .pNext = NULL,
738 .imageType = VK_IMAGE_TYPE_2D,
739 .format = depth_format,
740 .extent = {demo->width, demo->height, 1},
741 .mipLevels = 1,
742 .arrayLayers = 1,
743 .samples = VK_SAMPLE_COUNT_1_BIT,
744 .tiling = VK_IMAGE_TILING_OPTIMAL,
745 .usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
746 .flags = 0,
747 };
748 VkMemoryAllocateInfo mem_alloc = {
749 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
750 .pNext = NULL,
751 .allocationSize = 0,
752 .memoryTypeIndex = 0,
753 };
754 VkImageViewCreateInfo view = {
755 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
756 .pNext = NULL,
757 .image = VK_NULL_HANDLE,
758 .format = depth_format,
759 .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT,
760 .baseMipLevel = 0,
761 .levelCount = 1,
762 .baseArrayLayer = 0,
763 .layerCount = 1},
764 .flags = 0,
765 .viewType = VK_IMAGE_VIEW_TYPE_2D,
766 };
767
768 VkMemoryRequirements mem_reqs;
769 VkResult U_ASSERT_ONLY err;
770 bool U_ASSERT_ONLY pass;
771
772 demo->depth.format = depth_format;
773
774 /* create image */
775 err = vkCreateImage(demo->device, &image, NULL, &demo->depth.image);
776 assert(!err);
777
778 /* get memory requirements for this object */
779 vkGetImageMemoryRequirements(demo->device, demo->depth.image, &mem_reqs);
780
781 /* select memory size and type */
782 mem_alloc.allocationSize = mem_reqs.size;
783 pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
784 0, /* No requirements */
785 &mem_alloc.memoryTypeIndex);
786 assert(pass);
787
788 /* allocate memory */
789 err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->depth.mem);
790 assert(!err);
791
792 /* bind memory */
793 err =
794 vkBindImageMemory(demo->device, demo->depth.image, demo->depth.mem, 0);
795 assert(!err);
796
797 demo_set_image_layout(demo, demo->depth.image, VK_IMAGE_ASPECT_DEPTH_BIT,
798 VK_IMAGE_LAYOUT_UNDEFINED,
799 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
800 0);
801
802 /* create image view */
803 view.image = demo->depth.image;
804 err = vkCreateImageView(demo->device, &view, NULL, &demo->depth.view);
805 assert(!err);
806 }
807
808 static void
demo_prepare_texture_image(struct demo * demo,const uint32_t * tex_colors,struct texture_object * tex_obj,VkImageTiling tiling,VkImageUsageFlags usage,VkFlags required_props)809 demo_prepare_texture_image(struct demo *demo, const uint32_t *tex_colors,
810 struct texture_object *tex_obj, VkImageTiling tiling,
811 VkImageUsageFlags usage, VkFlags required_props) {
812 const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
813 const int32_t tex_width = 2;
814 const int32_t tex_height = 2;
815 VkResult U_ASSERT_ONLY err;
816 bool U_ASSERT_ONLY pass;
817
818 tex_obj->tex_width = tex_width;
819 tex_obj->tex_height = tex_height;
820
821 const VkImageCreateInfo image_create_info = {
822 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
823 .pNext = NULL,
824 .imageType = VK_IMAGE_TYPE_2D,
825 .format = tex_format,
826 .extent = {tex_width, tex_height, 1},
827 .mipLevels = 1,
828 .arrayLayers = 1,
829 .samples = VK_SAMPLE_COUNT_1_BIT,
830 .tiling = tiling,
831 .usage = usage,
832 .flags = 0,
833 .initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED
834 };
835 VkMemoryAllocateInfo mem_alloc = {
836 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
837 .pNext = NULL,
838 .allocationSize = 0,
839 .memoryTypeIndex = 0,
840 };
841
842 VkMemoryRequirements mem_reqs;
843
844 err =
845 vkCreateImage(demo->device, &image_create_info, NULL, &tex_obj->image);
846 assert(!err);
847
848 vkGetImageMemoryRequirements(demo->device, tex_obj->image, &mem_reqs);
849
850 mem_alloc.allocationSize = mem_reqs.size;
851 pass =
852 memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
853 required_props, &mem_alloc.memoryTypeIndex);
854 assert(pass);
855
856 /* allocate memory */
857 err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &tex_obj->mem);
858 assert(!err);
859
860 /* bind memory */
861 err = vkBindImageMemory(demo->device, tex_obj->image, tex_obj->mem, 0);
862 assert(!err);
863
864 if (required_props & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
865 const VkImageSubresource subres = {
866 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
867 .mipLevel = 0,
868 .arrayLayer = 0,
869 };
870 VkSubresourceLayout layout;
871 void *data;
872 int32_t x, y;
873
874 vkGetImageSubresourceLayout(demo->device, tex_obj->image, &subres,
875 &layout);
876
877 err = vkMapMemory(demo->device, tex_obj->mem, 0,
878 mem_alloc.allocationSize, 0, &data);
879 assert(!err);
880
881 for (y = 0; y < tex_height; y++) {
882 uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
883 for (x = 0; x < tex_width; x++)
884 row[x] = tex_colors[(x & 1) ^ (y & 1)];
885 }
886
887 vkUnmapMemory(demo->device, tex_obj->mem);
888 }
889
890 tex_obj->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
891 demo_set_image_layout(demo, tex_obj->image, VK_IMAGE_ASPECT_COLOR_BIT,
892 VK_IMAGE_LAYOUT_PREINITIALIZED, tex_obj->imageLayout,
893 VK_ACCESS_HOST_WRITE_BIT);
894 /* setting the image layout does not reference the actual memory so no need
895 * to add a mem ref */
896 }
897
demo_destroy_texture_image(struct demo * demo,struct texture_object * tex_obj)898 static void demo_destroy_texture_image(struct demo *demo,
899 struct texture_object *tex_obj) {
900 /* clean up staging resources */
901 vkDestroyImage(demo->device, tex_obj->image, NULL);
902 vkFreeMemory(demo->device, tex_obj->mem, NULL);
903 }
904
demo_prepare_textures(struct demo * demo)905 static void demo_prepare_textures(struct demo *demo) {
906 const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
907 VkFormatProperties props;
908 const uint32_t tex_colors[DEMO_TEXTURE_COUNT][2] = {
909 {0xffff0000, 0xff00ff00},
910 };
911 uint32_t i;
912 VkResult U_ASSERT_ONLY err;
913
914 vkGetPhysicalDeviceFormatProperties(demo->gpu, tex_format, &props);
915
916 for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
917 if ((props.linearTilingFeatures &
918 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
919 !demo->use_staging_buffer) {
920 /* Device can texture using linear textures */
921 demo_prepare_texture_image(
922 demo, tex_colors[i], &demo->textures[i], VK_IMAGE_TILING_LINEAR,
923 VK_IMAGE_USAGE_SAMPLED_BIT,
924 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
925 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
926 } else if (props.optimalTilingFeatures &
927 VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) {
928 /* Must use staging buffer to copy linear texture to optimized */
929 struct texture_object staging_texture;
930
931 memset(&staging_texture, 0, sizeof(staging_texture));
932 demo_prepare_texture_image(
933 demo, tex_colors[i], &staging_texture, VK_IMAGE_TILING_LINEAR,
934 VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
935 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
936 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
937
938 demo_prepare_texture_image(
939 demo, tex_colors[i], &demo->textures[i],
940 VK_IMAGE_TILING_OPTIMAL,
941 (VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),
942 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
943
944 demo_set_image_layout(demo, staging_texture.image,
945 VK_IMAGE_ASPECT_COLOR_BIT,
946 staging_texture.imageLayout,
947 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
948 0);
949
950 demo_set_image_layout(demo, demo->textures[i].image,
951 VK_IMAGE_ASPECT_COLOR_BIT,
952 demo->textures[i].imageLayout,
953 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
954 0);
955
956 VkImageCopy copy_region = {
957 .srcSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
958 .srcOffset = {0, 0, 0},
959 .dstSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
960 .dstOffset = {0, 0, 0},
961 .extent = {staging_texture.tex_width,
962 staging_texture.tex_height, 1},
963 };
964 vkCmdCopyImage(
965 demo->setup_cmd, staging_texture.image,
966 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, demo->textures[i].image,
967 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ©_region);
968
969 demo_set_image_layout(demo, demo->textures[i].image,
970 VK_IMAGE_ASPECT_COLOR_BIT,
971 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
972 demo->textures[i].imageLayout,
973 0);
974
975 demo_flush_init_cmd(demo);
976
977 demo_destroy_texture_image(demo, &staging_texture);
978 } else {
979 /* Can't support VK_FORMAT_B8G8R8A8_UNORM !? */
980 assert(!"No support for B8G8R8A8_UNORM as texture image format");
981 }
982
983 const VkSamplerCreateInfo sampler = {
984 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
985 .pNext = NULL,
986 .magFilter = VK_FILTER_NEAREST,
987 .minFilter = VK_FILTER_NEAREST,
988 .mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST,
989 .addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT,
990 .addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT,
991 .addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT,
992 .mipLodBias = 0.0f,
993 .anisotropyEnable = VK_FALSE,
994 .maxAnisotropy = 1,
995 .compareOp = VK_COMPARE_OP_NEVER,
996 .minLod = 0.0f,
997 .maxLod = 0.0f,
998 .borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
999 .unnormalizedCoordinates = VK_FALSE,
1000 };
1001 VkImageViewCreateInfo view = {
1002 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
1003 .pNext = NULL,
1004 .image = VK_NULL_HANDLE,
1005 .viewType = VK_IMAGE_VIEW_TYPE_2D,
1006 .format = tex_format,
1007 .components =
1008 {
1009 VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G,
1010 VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A,
1011 },
1012 .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
1013 .flags = 0,
1014 };
1015
1016 /* create sampler */
1017 err = vkCreateSampler(demo->device, &sampler, NULL,
1018 &demo->textures[i].sampler);
1019 assert(!err);
1020
1021 /* create image view */
1022 view.image = demo->textures[i].image;
1023 err = vkCreateImageView(demo->device, &view, NULL,
1024 &demo->textures[i].view);
1025 assert(!err);
1026 }
1027 }
1028
demo_prepare_vertices(struct demo * demo)1029 static void demo_prepare_vertices(struct demo *demo) {
1030 // clang-format off
1031 const float vb[3][5] = {
1032 /* position texcoord */
1033 { -1.0f, -1.0f, 0.25f, 0.0f, 0.0f },
1034 { 1.0f, -1.0f, 0.25f, 1.0f, 0.0f },
1035 { 0.0f, 1.0f, 1.0f, 0.5f, 1.0f },
1036 };
1037 // clang-format on
1038 const VkBufferCreateInfo buf_info = {
1039 .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
1040 .pNext = NULL,
1041 .size = sizeof(vb),
1042 .usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
1043 .flags = 0,
1044 };
1045 VkMemoryAllocateInfo mem_alloc = {
1046 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
1047 .pNext = NULL,
1048 .allocationSize = 0,
1049 .memoryTypeIndex = 0,
1050 };
1051 VkMemoryRequirements mem_reqs;
1052 VkResult U_ASSERT_ONLY err;
1053 bool U_ASSERT_ONLY pass;
1054 void *data;
1055
1056 memset(&demo->vertices, 0, sizeof(demo->vertices));
1057
1058 err = vkCreateBuffer(demo->device, &buf_info, NULL, &demo->vertices.buf);
1059 assert(!err);
1060
1061 vkGetBufferMemoryRequirements(demo->device, demo->vertices.buf, &mem_reqs);
1062 assert(!err);
1063
1064 mem_alloc.allocationSize = mem_reqs.size;
1065 pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
1066 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1067 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
1068 &mem_alloc.memoryTypeIndex);
1069 assert(pass);
1070
1071 err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->vertices.mem);
1072 assert(!err);
1073
1074 err = vkMapMemory(demo->device, demo->vertices.mem, 0,
1075 mem_alloc.allocationSize, 0, &data);
1076 assert(!err);
1077
1078 memcpy(data, vb, sizeof(vb));
1079
1080 vkUnmapMemory(demo->device, demo->vertices.mem);
1081
1082 err = vkBindBufferMemory(demo->device, demo->vertices.buf,
1083 demo->vertices.mem, 0);
1084 assert(!err);
1085
1086 demo->vertices.vi.sType =
1087 VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1088 demo->vertices.vi.pNext = NULL;
1089 demo->vertices.vi.vertexBindingDescriptionCount = 1;
1090 demo->vertices.vi.pVertexBindingDescriptions = demo->vertices.vi_bindings;
1091 demo->vertices.vi.vertexAttributeDescriptionCount = 2;
1092 demo->vertices.vi.pVertexAttributeDescriptions = demo->vertices.vi_attrs;
1093
1094 demo->vertices.vi_bindings[0].binding = VERTEX_BUFFER_BIND_ID;
1095 demo->vertices.vi_bindings[0].stride = sizeof(vb[0]);
1096 demo->vertices.vi_bindings[0].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1097
1098 demo->vertices.vi_attrs[0].binding = VERTEX_BUFFER_BIND_ID;
1099 demo->vertices.vi_attrs[0].location = 0;
1100 demo->vertices.vi_attrs[0].format = VK_FORMAT_R32G32B32_SFLOAT;
1101 demo->vertices.vi_attrs[0].offset = 0;
1102
1103 demo->vertices.vi_attrs[1].binding = VERTEX_BUFFER_BIND_ID;
1104 demo->vertices.vi_attrs[1].location = 1;
1105 demo->vertices.vi_attrs[1].format = VK_FORMAT_R32G32_SFLOAT;
1106 demo->vertices.vi_attrs[1].offset = sizeof(float) * 3;
1107 }
1108
demo_prepare_descriptor_layout(struct demo * demo)1109 static void demo_prepare_descriptor_layout(struct demo *demo) {
1110 const VkDescriptorSetLayoutBinding layout_binding = {
1111 .binding = 0,
1112 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
1113 .descriptorCount = DEMO_TEXTURE_COUNT,
1114 .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
1115 .pImmutableSamplers = NULL,
1116 };
1117 const VkDescriptorSetLayoutCreateInfo descriptor_layout = {
1118 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
1119 .pNext = NULL,
1120 .bindingCount = 1,
1121 .pBindings = &layout_binding,
1122 };
1123 VkResult U_ASSERT_ONLY err;
1124
1125 err = vkCreateDescriptorSetLayout(demo->device, &descriptor_layout, NULL,
1126 &demo->desc_layout);
1127 assert(!err);
1128
1129 const VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = {
1130 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
1131 .pNext = NULL,
1132 .setLayoutCount = 1,
1133 .pSetLayouts = &demo->desc_layout,
1134 };
1135
1136 err = vkCreatePipelineLayout(demo->device, &pPipelineLayoutCreateInfo, NULL,
1137 &demo->pipeline_layout);
1138 assert(!err);
1139 }
1140
demo_prepare_render_pass(struct demo * demo)1141 static void demo_prepare_render_pass(struct demo *demo) {
1142 const VkAttachmentDescription attachments[2] = {
1143 [0] =
1144 {
1145 .format = demo->format,
1146 .samples = VK_SAMPLE_COUNT_1_BIT,
1147 .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
1148 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
1149 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
1150 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
1151 .initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
1152 .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
1153 },
1154 [1] =
1155 {
1156 .format = demo->depth.format,
1157 .samples = VK_SAMPLE_COUNT_1_BIT,
1158 .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
1159 .storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
1160 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
1161 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
1162 .initialLayout =
1163 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
1164 .finalLayout =
1165 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
1166 },
1167 };
1168 const VkAttachmentReference color_reference = {
1169 .attachment = 0, .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
1170 };
1171 const VkAttachmentReference depth_reference = {
1172 .attachment = 1,
1173 .layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
1174 };
1175 const VkSubpassDescription subpass = {
1176 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
1177 .flags = 0,
1178 .inputAttachmentCount = 0,
1179 .pInputAttachments = NULL,
1180 .colorAttachmentCount = 1,
1181 .pColorAttachments = &color_reference,
1182 .pResolveAttachments = NULL,
1183 .pDepthStencilAttachment = &depth_reference,
1184 .preserveAttachmentCount = 0,
1185 .pPreserveAttachments = NULL,
1186 };
1187 const VkRenderPassCreateInfo rp_info = {
1188 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
1189 .pNext = NULL,
1190 .attachmentCount = 2,
1191 .pAttachments = attachments,
1192 .subpassCount = 1,
1193 .pSubpasses = &subpass,
1194 .dependencyCount = 0,
1195 .pDependencies = NULL,
1196 };
1197 VkResult U_ASSERT_ONLY err;
1198
1199 err = vkCreateRenderPass(demo->device, &rp_info, NULL, &demo->render_pass);
1200 assert(!err);
1201 }
1202
1203 static VkShaderModule
demo_prepare_shader_module(struct demo * demo,const void * code,size_t size)1204 demo_prepare_shader_module(struct demo *demo, const void *code, size_t size) {
1205 VkShaderModuleCreateInfo moduleCreateInfo;
1206 VkShaderModule module;
1207 VkResult U_ASSERT_ONLY err;
1208
1209 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1210 moduleCreateInfo.pNext = NULL;
1211
1212 moduleCreateInfo.codeSize = size;
1213 moduleCreateInfo.pCode = code;
1214 moduleCreateInfo.flags = 0;
1215 err = vkCreateShaderModule(demo->device, &moduleCreateInfo, NULL, &module);
1216 assert(!err);
1217
1218 return module;
1219 }
1220
demo_prepare_vs(struct demo * demo)1221 static VkShaderModule demo_prepare_vs(struct demo *demo) {
1222 size_t size = sizeof(vertShaderCode);
1223
1224 demo->vert_shader_module =
1225 demo_prepare_shader_module(demo, vertShaderCode, size);
1226
1227 return demo->vert_shader_module;
1228 }
1229
demo_prepare_fs(struct demo * demo)1230 static VkShaderModule demo_prepare_fs(struct demo *demo) {
1231 size_t size = sizeof(fragShaderCode);
1232
1233 demo->frag_shader_module =
1234 demo_prepare_shader_module(demo, fragShaderCode, size);
1235
1236 return demo->frag_shader_module;
1237 }
1238
demo_prepare_pipeline(struct demo * demo)1239 static void demo_prepare_pipeline(struct demo *demo) {
1240 VkGraphicsPipelineCreateInfo pipeline;
1241 VkPipelineCacheCreateInfo pipelineCache;
1242
1243 VkPipelineVertexInputStateCreateInfo vi;
1244 VkPipelineInputAssemblyStateCreateInfo ia;
1245 VkPipelineRasterizationStateCreateInfo rs;
1246 VkPipelineColorBlendStateCreateInfo cb;
1247 VkPipelineDepthStencilStateCreateInfo ds;
1248 VkPipelineViewportStateCreateInfo vp;
1249 VkPipelineMultisampleStateCreateInfo ms;
1250 VkDynamicState dynamicStateEnables[(VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1)];
1251 VkPipelineDynamicStateCreateInfo dynamicState;
1252
1253 VkResult U_ASSERT_ONLY err;
1254
1255 memset(dynamicStateEnables, 0, sizeof dynamicStateEnables);
1256 memset(&dynamicState, 0, sizeof dynamicState);
1257 dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
1258 dynamicState.pDynamicStates = dynamicStateEnables;
1259
1260 memset(&pipeline, 0, sizeof(pipeline));
1261 pipeline.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
1262 pipeline.layout = demo->pipeline_layout;
1263
1264 vi = demo->vertices.vi;
1265
1266 memset(&ia, 0, sizeof(ia));
1267 ia.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1268 ia.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1269
1270 memset(&rs, 0, sizeof(rs));
1271 rs.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
1272 rs.polygonMode = VK_POLYGON_MODE_FILL;
1273 rs.cullMode = VK_CULL_MODE_BACK_BIT;
1274 rs.frontFace = VK_FRONT_FACE_CLOCKWISE;
1275 rs.depthClampEnable = VK_FALSE;
1276 rs.rasterizerDiscardEnable = VK_FALSE;
1277 rs.depthBiasEnable = VK_FALSE;
1278 rs.lineWidth = 1.0f;
1279
1280 memset(&cb, 0, sizeof(cb));
1281 cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1282 VkPipelineColorBlendAttachmentState att_state[1];
1283 memset(att_state, 0, sizeof(att_state));
1284 att_state[0].colorWriteMask = 0xf;
1285 att_state[0].blendEnable = VK_FALSE;
1286 cb.attachmentCount = 1;
1287 cb.pAttachments = att_state;
1288
1289 memset(&vp, 0, sizeof(vp));
1290 vp.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
1291 vp.viewportCount = 1;
1292 dynamicStateEnables[dynamicState.dynamicStateCount++] =
1293 VK_DYNAMIC_STATE_VIEWPORT;
1294 vp.scissorCount = 1;
1295 dynamicStateEnables[dynamicState.dynamicStateCount++] =
1296 VK_DYNAMIC_STATE_SCISSOR;
1297
1298 memset(&ds, 0, sizeof(ds));
1299 ds.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
1300 ds.depthTestEnable = VK_TRUE;
1301 ds.depthWriteEnable = VK_TRUE;
1302 ds.depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL;
1303 ds.depthBoundsTestEnable = VK_FALSE;
1304 ds.back.failOp = VK_STENCIL_OP_KEEP;
1305 ds.back.passOp = VK_STENCIL_OP_KEEP;
1306 ds.back.compareOp = VK_COMPARE_OP_ALWAYS;
1307 ds.stencilTestEnable = VK_FALSE;
1308 ds.front = ds.back;
1309
1310 memset(&ms, 0, sizeof(ms));
1311 ms.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1312 ms.pSampleMask = NULL;
1313 ms.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
1314
1315 // Two stages: vs and fs
1316 pipeline.stageCount = 2;
1317 VkPipelineShaderStageCreateInfo shaderStages[2];
1318 memset(&shaderStages, 0, 2 * sizeof(VkPipelineShaderStageCreateInfo));
1319
1320 shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1321 shaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
1322 shaderStages[0].module = demo_prepare_vs(demo);
1323 shaderStages[0].pName = "main";
1324
1325 shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1326 shaderStages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
1327 shaderStages[1].module = demo_prepare_fs(demo);
1328 shaderStages[1].pName = "main";
1329
1330 pipeline.pVertexInputState = &vi;
1331 pipeline.pInputAssemblyState = &ia;
1332 pipeline.pRasterizationState = &rs;
1333 pipeline.pColorBlendState = &cb;
1334 pipeline.pMultisampleState = &ms;
1335 pipeline.pViewportState = &vp;
1336 pipeline.pDepthStencilState = &ds;
1337 pipeline.pStages = shaderStages;
1338 pipeline.renderPass = demo->render_pass;
1339 pipeline.pDynamicState = &dynamicState;
1340
1341 memset(&pipelineCache, 0, sizeof(pipelineCache));
1342 pipelineCache.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1343
1344 err = vkCreatePipelineCache(demo->device, &pipelineCache, NULL,
1345 &demo->pipelineCache);
1346 assert(!err);
1347 err = vkCreateGraphicsPipelines(demo->device, demo->pipelineCache, 1,
1348 &pipeline, NULL, &demo->pipeline);
1349 assert(!err);
1350
1351 vkDestroyPipelineCache(demo->device, demo->pipelineCache, NULL);
1352
1353 vkDestroyShaderModule(demo->device, demo->frag_shader_module, NULL);
1354 vkDestroyShaderModule(demo->device, demo->vert_shader_module, NULL);
1355 }
1356
demo_prepare_descriptor_pool(struct demo * demo)1357 static void demo_prepare_descriptor_pool(struct demo *demo) {
1358 const VkDescriptorPoolSize type_count = {
1359 .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
1360 .descriptorCount = DEMO_TEXTURE_COUNT,
1361 };
1362 const VkDescriptorPoolCreateInfo descriptor_pool = {
1363 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
1364 .pNext = NULL,
1365 .maxSets = 1,
1366 .poolSizeCount = 1,
1367 .pPoolSizes = &type_count,
1368 };
1369 VkResult U_ASSERT_ONLY err;
1370
1371 err = vkCreateDescriptorPool(demo->device, &descriptor_pool, NULL,
1372 &demo->desc_pool);
1373 assert(!err);
1374 }
1375
demo_prepare_descriptor_set(struct demo * demo)1376 static void demo_prepare_descriptor_set(struct demo *demo) {
1377 VkDescriptorImageInfo tex_descs[DEMO_TEXTURE_COUNT];
1378 VkWriteDescriptorSet write;
1379 VkResult U_ASSERT_ONLY err;
1380 uint32_t i;
1381
1382 VkDescriptorSetAllocateInfo alloc_info = {
1383 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
1384 .pNext = NULL,
1385 .descriptorPool = demo->desc_pool,
1386 .descriptorSetCount = 1,
1387 .pSetLayouts = &demo->desc_layout};
1388 err = vkAllocateDescriptorSets(demo->device, &alloc_info, &demo->desc_set);
1389 assert(!err);
1390
1391 memset(&tex_descs, 0, sizeof(tex_descs));
1392 for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
1393 tex_descs[i].sampler = demo->textures[i].sampler;
1394 tex_descs[i].imageView = demo->textures[i].view;
1395 tex_descs[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
1396 }
1397
1398 memset(&write, 0, sizeof(write));
1399 write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1400 write.dstSet = demo->desc_set;
1401 write.descriptorCount = DEMO_TEXTURE_COUNT;
1402 write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1403 write.pImageInfo = tex_descs;
1404
1405 vkUpdateDescriptorSets(demo->device, 1, &write, 0, NULL);
1406 }
1407
demo_prepare_framebuffers(struct demo * demo)1408 static void demo_prepare_framebuffers(struct demo *demo) {
1409 VkImageView attachments[2];
1410 attachments[1] = demo->depth.view;
1411
1412 const VkFramebufferCreateInfo fb_info = {
1413 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
1414 .pNext = NULL,
1415 .renderPass = demo->render_pass,
1416 .attachmentCount = 2,
1417 .pAttachments = attachments,
1418 .width = demo->width,
1419 .height = demo->height,
1420 .layers = 1,
1421 };
1422 VkResult U_ASSERT_ONLY err;
1423 uint32_t i;
1424
1425 demo->framebuffers = (VkFramebuffer *)malloc(demo->swapchainImageCount *
1426 sizeof(VkFramebuffer));
1427 assert(demo->framebuffers);
1428
1429 for (i = 0; i < demo->swapchainImageCount; i++) {
1430 attachments[0] = demo->buffers[i].view;
1431 err = vkCreateFramebuffer(demo->device, &fb_info, NULL,
1432 &demo->framebuffers[i]);
1433 assert(!err);
1434 }
1435 }
1436
demo_prepare(struct demo * demo)1437 static void demo_prepare(struct demo *demo) {
1438 VkResult U_ASSERT_ONLY err;
1439
1440 const VkCommandPoolCreateInfo cmd_pool_info = {
1441 .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
1442 .pNext = NULL,
1443 .queueFamilyIndex = demo->graphics_queue_node_index,
1444 .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
1445 };
1446 err = vkCreateCommandPool(demo->device, &cmd_pool_info, NULL,
1447 &demo->cmd_pool);
1448 assert(!err);
1449
1450 const VkCommandBufferAllocateInfo cmd = {
1451 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
1452 .pNext = NULL,
1453 .commandPool = demo->cmd_pool,
1454 .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
1455 .commandBufferCount = 1,
1456 };
1457 err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->draw_cmd);
1458 assert(!err);
1459
1460 demo_prepare_buffers(demo);
1461 demo_prepare_depth(demo);
1462 demo_prepare_textures(demo);
1463 demo_prepare_vertices(demo);
1464 demo_prepare_descriptor_layout(demo);
1465 demo_prepare_render_pass(demo);
1466 demo_prepare_pipeline(demo);
1467
1468 demo_prepare_descriptor_pool(demo);
1469 demo_prepare_descriptor_set(demo);
1470
1471 demo_prepare_framebuffers(demo);
1472 }
1473
demo_error_callback(int error,const char * description)1474 static void demo_error_callback(int error, const char* description) {
1475 printf("GLFW error: %s\n", description);
1476 fflush(stdout);
1477 }
1478
demo_key_callback(GLFWwindow * window,int key,int scancode,int action,int mods)1479 static void demo_key_callback(GLFWwindow* window, int key, int scancode, int action, int mods) {
1480 if (key == GLFW_KEY_ESCAPE && action == GLFW_RELEASE)
1481 glfwSetWindowShouldClose(window, GLFW_TRUE);
1482 }
1483
demo_refresh_callback(GLFWwindow * window)1484 static void demo_refresh_callback(GLFWwindow* window) {
1485 struct demo* demo = glfwGetWindowUserPointer(window);
1486 demo_draw(demo);
1487 }
1488
demo_resize_callback(GLFWwindow * window,int width,int height)1489 static void demo_resize_callback(GLFWwindow* window, int width, int height) {
1490 struct demo* demo = glfwGetWindowUserPointer(window);
1491 demo->width = width;
1492 demo->height = height;
1493 demo_resize(demo);
1494 }
1495
demo_run(struct demo * demo)1496 static void demo_run(struct demo *demo) {
1497 while (!glfwWindowShouldClose(demo->window)) {
1498 glfwPollEvents();
1499
1500 demo_draw(demo);
1501
1502 if (demo->depthStencil > 0.99f)
1503 demo->depthIncrement = -0.001f;
1504 if (demo->depthStencil < 0.8f)
1505 demo->depthIncrement = 0.001f;
1506
1507 demo->depthStencil += demo->depthIncrement;
1508
1509 // Wait for work to finish before updating MVP.
1510 vkDeviceWaitIdle(demo->device);
1511 demo->curFrame++;
1512 if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount)
1513 glfwSetWindowShouldClose(demo->window, GLFW_TRUE);
1514 }
1515 }
1516
demo_create_window(struct demo * demo)1517 static void demo_create_window(struct demo *demo) {
1518 glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API);
1519
1520 demo->window = glfwCreateWindow(demo->width,
1521 demo->height,
1522 APP_LONG_NAME,
1523 NULL,
1524 NULL);
1525 if (!demo->window) {
1526 // It didn't work, so try to give a useful error:
1527 printf("Cannot create a window in which to draw!\n");
1528 fflush(stdout);
1529 exit(1);
1530 }
1531
1532 glfwSetWindowUserPointer(demo->window, demo);
1533 glfwSetWindowRefreshCallback(demo->window, demo_refresh_callback);
1534 glfwSetFramebufferSizeCallback(demo->window, demo_resize_callback);
1535 glfwSetKeyCallback(demo->window, demo_key_callback);
1536 }
1537
1538 /*
1539 * Return 1 (true) if all layer names specified in check_names
1540 * can be found in given layer properties.
1541 */
demo_check_layers(uint32_t check_count,const char ** check_names,uint32_t layer_count,VkLayerProperties * layers)1542 static VkBool32 demo_check_layers(uint32_t check_count, const char **check_names,
1543 uint32_t layer_count,
1544 VkLayerProperties *layers) {
1545 uint32_t i, j;
1546 for (i = 0; i < check_count; i++) {
1547 VkBool32 found = 0;
1548 for (j = 0; j < layer_count; j++) {
1549 if (!strcmp(check_names[i], layers[j].layerName)) {
1550 found = 1;
1551 break;
1552 }
1553 }
1554 if (!found) {
1555 fprintf(stderr, "Cannot find layer: %s\n", check_names[i]);
1556 return 0;
1557 }
1558 }
1559 return 1;
1560 }
1561
demo_init_vk(struct demo * demo)1562 static void demo_init_vk(struct demo *demo) {
1563 VkResult err;
1564 VkBool32 portability_enumeration = VK_FALSE;
1565 uint32_t i = 0;
1566 uint32_t required_extension_count = 0;
1567 uint32_t instance_extension_count = 0;
1568 uint32_t instance_layer_count = 0;
1569 uint32_t validation_layer_count = 0;
1570 const char **required_extensions = NULL;
1571 const char **instance_validation_layers = NULL;
1572 demo->enabled_extension_count = 0;
1573 demo->enabled_layer_count = 0;
1574
1575 char *instance_validation_layers_alt1[] = {
1576 "VK_LAYER_LUNARG_standard_validation"
1577 };
1578
1579 char *instance_validation_layers_alt2[] = {
1580 "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
1581 "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_image",
1582 "VK_LAYER_LUNARG_core_validation", "VK_LAYER_LUNARG_swapchain",
1583 "VK_LAYER_GOOGLE_unique_objects"
1584 };
1585
1586 /* Look for validation layers */
1587 VkBool32 validation_found = 0;
1588 if (demo->validate) {
1589
1590 err = vkEnumerateInstanceLayerProperties(&instance_layer_count, NULL);
1591 assert(!err);
1592
1593 instance_validation_layers = (const char**) instance_validation_layers_alt1;
1594 if (instance_layer_count > 0) {
1595 VkLayerProperties *instance_layers =
1596 malloc(sizeof (VkLayerProperties) * instance_layer_count);
1597 err = vkEnumerateInstanceLayerProperties(&instance_layer_count,
1598 instance_layers);
1599 assert(!err);
1600
1601
1602 validation_found = demo_check_layers(
1603 ARRAY_SIZE(instance_validation_layers_alt1),
1604 instance_validation_layers, instance_layer_count,
1605 instance_layers);
1606 if (validation_found) {
1607 demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers_alt1);
1608 demo->enabled_layers[0] = "VK_LAYER_LUNARG_standard_validation";
1609 validation_layer_count = 1;
1610 } else {
1611 // use alternative set of validation layers
1612 instance_validation_layers =
1613 (const char**) instance_validation_layers_alt2;
1614 demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers_alt2);
1615 validation_found = demo_check_layers(
1616 ARRAY_SIZE(instance_validation_layers_alt2),
1617 instance_validation_layers, instance_layer_count,
1618 instance_layers);
1619 validation_layer_count =
1620 ARRAY_SIZE(instance_validation_layers_alt2);
1621 for (i = 0; i < validation_layer_count; i++) {
1622 demo->enabled_layers[i] = instance_validation_layers[i];
1623 }
1624 }
1625 free(instance_layers);
1626 }
1627
1628 if (!validation_found) {
1629 ERR_EXIT("vkEnumerateInstanceLayerProperties failed to find "
1630 "required validation layer.\n\n"
1631 "Please look at the Getting Started guide for additional "
1632 "information.\n",
1633 "vkCreateInstance Failure");
1634 }
1635 }
1636
1637 /* Look for instance extensions */
1638 required_extensions = glfwGetRequiredInstanceExtensions(&required_extension_count);
1639 if (!required_extensions) {
1640 ERR_EXIT("glfwGetRequiredInstanceExtensions failed to find the "
1641 "platform surface extensions.\n\nDo you have a compatible "
1642 "Vulkan installable client driver (ICD) installed?\nPlease "
1643 "look at the Getting Started guide for additional "
1644 "information.\n",
1645 "vkCreateInstance Failure");
1646 }
1647
1648 for (i = 0; i < required_extension_count; i++) {
1649 demo->extension_names[demo->enabled_extension_count++] = required_extensions[i];
1650 assert(demo->enabled_extension_count < 64);
1651 }
1652
1653 err = vkEnumerateInstanceExtensionProperties(
1654 NULL, &instance_extension_count, NULL);
1655 assert(!err);
1656
1657 if (instance_extension_count > 0) {
1658 VkExtensionProperties *instance_extensions =
1659 malloc(sizeof(VkExtensionProperties) * instance_extension_count);
1660 err = vkEnumerateInstanceExtensionProperties(
1661 NULL, &instance_extension_count, instance_extensions);
1662 assert(!err);
1663 for (i = 0; i < instance_extension_count; i++) {
1664 if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
1665 instance_extensions[i].extensionName)) {
1666 if (demo->validate) {
1667 demo->extension_names[demo->enabled_extension_count++] =
1668 VK_EXT_DEBUG_REPORT_EXTENSION_NAME;
1669 }
1670 }
1671 assert(demo->enabled_extension_count < 64);
1672 if (!strcmp(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME,
1673 instance_extensions[i].extensionName)) {
1674 demo->extension_names[demo->enabled_extension_count++] =
1675 VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME;
1676 portability_enumeration = VK_TRUE;
1677 }
1678 assert(demo->enabled_extension_count < 64);
1679 }
1680
1681 free(instance_extensions);
1682 }
1683
1684 const VkApplicationInfo app = {
1685 .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
1686 .pNext = NULL,
1687 .pApplicationName = APP_SHORT_NAME,
1688 .applicationVersion = 0,
1689 .pEngineName = APP_SHORT_NAME,
1690 .engineVersion = 0,
1691 .apiVersion = VK_API_VERSION_1_0,
1692 };
1693 VkInstanceCreateInfo inst_info = {
1694 .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
1695 .pNext = NULL,
1696 .pApplicationInfo = &app,
1697 .enabledLayerCount = demo->enabled_layer_count,
1698 .ppEnabledLayerNames = (const char *const *)instance_validation_layers,
1699 .enabledExtensionCount = demo->enabled_extension_count,
1700 .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
1701 };
1702
1703 if (portability_enumeration)
1704 inst_info.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
1705
1706 uint32_t gpu_count;
1707
1708 err = vkCreateInstance(&inst_info, NULL, &demo->inst);
1709 if (err == VK_ERROR_INCOMPATIBLE_DRIVER) {
1710 ERR_EXIT("Cannot find a compatible Vulkan installable client driver "
1711 "(ICD).\n\nPlease look at the Getting Started guide for "
1712 "additional information.\n",
1713 "vkCreateInstance Failure");
1714 } else if (err == VK_ERROR_EXTENSION_NOT_PRESENT) {
1715 ERR_EXIT("Cannot find a specified extension library"
1716 ".\nMake sure your layers path is set appropriately\n",
1717 "vkCreateInstance Failure");
1718 } else if (err) {
1719 ERR_EXIT("vkCreateInstance failed.\n\nDo you have a compatible Vulkan "
1720 "installable client driver (ICD) installed?\nPlease look at "
1721 "the Getting Started guide for additional information.\n",
1722 "vkCreateInstance Failure");
1723 }
1724
1725 gladLoadVulkanUserPtr(NULL, (GLADuserptrloadfunc) glfwGetInstanceProcAddress, demo->inst);
1726
1727 /* Make initial call to query gpu_count, then second call for gpu info*/
1728 err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count, NULL);
1729 assert(!err && gpu_count > 0);
1730
1731 if (gpu_count > 0) {
1732 VkPhysicalDevice *physical_devices =
1733 malloc(sizeof(VkPhysicalDevice) * gpu_count);
1734 err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count,
1735 physical_devices);
1736 assert(!err);
1737 /* For tri demo we just grab the first physical device */
1738 demo->gpu = physical_devices[0];
1739 free(physical_devices);
1740 } else {
1741 ERR_EXIT("vkEnumeratePhysicalDevices reported zero accessible devices."
1742 "\n\nDo you have a compatible Vulkan installable client"
1743 " driver (ICD) installed?\nPlease look at the Getting Started"
1744 " guide for additional information.\n",
1745 "vkEnumeratePhysicalDevices Failure");
1746 }
1747
1748 gladLoadVulkanUserPtr(demo->gpu, (GLADuserptrloadfunc) glfwGetInstanceProcAddress, demo->inst);
1749
1750 /* Look for device extensions */
1751 uint32_t device_extension_count = 0;
1752 VkBool32 swapchainExtFound = 0;
1753 demo->enabled_extension_count = 0;
1754
1755 err = vkEnumerateDeviceExtensionProperties(demo->gpu, NULL,
1756 &device_extension_count, NULL);
1757 assert(!err);
1758
1759 if (device_extension_count > 0) {
1760 VkExtensionProperties *device_extensions =
1761 malloc(sizeof(VkExtensionProperties) * device_extension_count);
1762 err = vkEnumerateDeviceExtensionProperties(
1763 demo->gpu, NULL, &device_extension_count, device_extensions);
1764 assert(!err);
1765
1766 for (i = 0; i < device_extension_count; i++) {
1767 if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME,
1768 device_extensions[i].extensionName)) {
1769 swapchainExtFound = 1;
1770 demo->extension_names[demo->enabled_extension_count++] =
1771 VK_KHR_SWAPCHAIN_EXTENSION_NAME;
1772 }
1773 assert(demo->enabled_extension_count < 64);
1774 }
1775
1776 free(device_extensions);
1777 }
1778
1779 if (!swapchainExtFound) {
1780 ERR_EXIT("vkEnumerateDeviceExtensionProperties failed to find "
1781 "the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
1782 " extension.\n\nDo you have a compatible "
1783 "Vulkan installable client driver (ICD) installed?\nPlease "
1784 "look at the Getting Started guide for additional "
1785 "information.\n",
1786 "vkCreateInstance Failure");
1787 }
1788
1789 if (demo->validate) {
1790 VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
1791 dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
1792 dbgCreateInfo.flags =
1793 VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
1794 dbgCreateInfo.pfnCallback = demo->use_break ? BreakCallback : dbgFunc;
1795 dbgCreateInfo.pUserData = demo;
1796 dbgCreateInfo.pNext = NULL;
1797 err = vkCreateDebugReportCallbackEXT(demo->inst, &dbgCreateInfo, NULL,
1798 &demo->msg_callback);
1799 switch (err) {
1800 case VK_SUCCESS:
1801 break;
1802 case VK_ERROR_OUT_OF_HOST_MEMORY:
1803 ERR_EXIT("CreateDebugReportCallback: out of host memory\n",
1804 "CreateDebugReportCallback Failure");
1805 break;
1806 default:
1807 ERR_EXIT("CreateDebugReportCallback: unknown failure\n",
1808 "CreateDebugReportCallback Failure");
1809 break;
1810 }
1811 }
1812
1813 vkGetPhysicalDeviceProperties(demo->gpu, &demo->gpu_props);
1814
1815 // Query with NULL data to get count
1816 vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
1817 NULL);
1818
1819 demo->queue_props = (VkQueueFamilyProperties *)malloc(
1820 demo->queue_count * sizeof(VkQueueFamilyProperties));
1821 vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
1822 demo->queue_props);
1823 assert(demo->queue_count >= 1);
1824
1825 vkGetPhysicalDeviceFeatures(demo->gpu, &demo->gpu_features);
1826
1827 // Graphics queue and MemMgr queue can be separate.
1828 // TODO: Add support for separate queues, including synchronization,
1829 // and appropriate tracking for QueueSubmit
1830 }
1831
demo_init_device(struct demo * demo)1832 static void demo_init_device(struct demo *demo) {
1833 VkResult U_ASSERT_ONLY err;
1834
1835 float queue_priorities[1] = {0.0};
1836 const VkDeviceQueueCreateInfo queue = {
1837 .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
1838 .pNext = NULL,
1839 .queueFamilyIndex = demo->graphics_queue_node_index,
1840 .queueCount = 1,
1841 .pQueuePriorities = queue_priorities};
1842
1843
1844 VkPhysicalDeviceFeatures features;
1845 memset(&features, 0, sizeof(features));
1846 if (demo->gpu_features.shaderClipDistance) {
1847 features.shaderClipDistance = VK_TRUE;
1848 }
1849
1850 VkDeviceCreateInfo device = {
1851 .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
1852 .pNext = NULL,
1853 .queueCreateInfoCount = 1,
1854 .pQueueCreateInfos = &queue,
1855 .enabledLayerCount = 0,
1856 .ppEnabledLayerNames = NULL,
1857 .enabledExtensionCount = demo->enabled_extension_count,
1858 .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
1859 .pEnabledFeatures = &features,
1860 };
1861
1862 err = vkCreateDevice(demo->gpu, &device, NULL, &demo->device);
1863 assert(!err);
1864 }
1865
demo_init_vk_swapchain(struct demo * demo)1866 static void demo_init_vk_swapchain(struct demo *demo) {
1867 VkResult U_ASSERT_ONLY err;
1868 uint32_t i;
1869
1870 // Create a WSI surface for the window:
1871 glfwCreateWindowSurface(demo->inst, demo->window, NULL, &demo->surface);
1872
1873 // Iterate over each queue to learn whether it supports presenting:
1874 VkBool32 *supportsPresent =
1875 (VkBool32 *)malloc(demo->queue_count * sizeof(VkBool32));
1876 for (i = 0; i < demo->queue_count; i++) {
1877 vkGetPhysicalDeviceSurfaceSupportKHR(demo->gpu, i, demo->surface,
1878 &supportsPresent[i]);
1879 }
1880
1881 // Search for a graphics and a present queue in the array of queue
1882 // families, try to find one that supports both
1883 uint32_t graphicsQueueNodeIndex = UINT32_MAX;
1884 uint32_t presentQueueNodeIndex = UINT32_MAX;
1885 for (i = 0; i < demo->queue_count; i++) {
1886 if ((demo->queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
1887 if (graphicsQueueNodeIndex == UINT32_MAX) {
1888 graphicsQueueNodeIndex = i;
1889 }
1890
1891 if (supportsPresent[i] == VK_TRUE) {
1892 graphicsQueueNodeIndex = i;
1893 presentQueueNodeIndex = i;
1894 break;
1895 }
1896 }
1897 }
1898 if (presentQueueNodeIndex == UINT32_MAX) {
1899 // If didn't find a queue that supports both graphics and present, then
1900 // find a separate present queue.
1901 for (i = 0; i < demo->queue_count; ++i) {
1902 if (supportsPresent[i] == VK_TRUE) {
1903 presentQueueNodeIndex = i;
1904 break;
1905 }
1906 }
1907 }
1908 free(supportsPresent);
1909
1910 // Generate error if could not find both a graphics and a present queue
1911 if (graphicsQueueNodeIndex == UINT32_MAX ||
1912 presentQueueNodeIndex == UINT32_MAX) {
1913 ERR_EXIT("Could not find a graphics and a present queue\n",
1914 "Swapchain Initialization Failure");
1915 }
1916
1917 // TODO: Add support for separate queues, including presentation,
1918 // synchronization, and appropriate tracking for QueueSubmit.
1919 // NOTE: While it is possible for an application to use a separate graphics
1920 // and a present queues, this demo program assumes it is only using
1921 // one:
1922 if (graphicsQueueNodeIndex != presentQueueNodeIndex) {
1923 ERR_EXIT("Could not find a common graphics and a present queue\n",
1924 "Swapchain Initialization Failure");
1925 }
1926
1927 demo->graphics_queue_node_index = graphicsQueueNodeIndex;
1928
1929 demo_init_device(demo);
1930
1931 vkGetDeviceQueue(demo->device, demo->graphics_queue_node_index, 0,
1932 &demo->queue);
1933
1934 // Get the list of VkFormat's that are supported:
1935 uint32_t formatCount;
1936 err = vkGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
1937 &formatCount, NULL);
1938 assert(!err);
1939 VkSurfaceFormatKHR *surfFormats =
1940 (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
1941 err = vkGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
1942 &formatCount, surfFormats);
1943 assert(!err);
1944 // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
1945 // the surface has no preferred format. Otherwise, at least one
1946 // supported format will be returned.
1947 if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED) {
1948 demo->format = VK_FORMAT_B8G8R8A8_UNORM;
1949 } else {
1950 assert(formatCount >= 1);
1951 demo->format = surfFormats[0].format;
1952 }
1953 demo->color_space = surfFormats[0].colorSpace;
1954
1955 demo->curFrame = 0;
1956
1957 // Get Memory information and properties
1958 vkGetPhysicalDeviceMemoryProperties(demo->gpu, &demo->memory_properties);
1959 }
1960
demo_init_connection(struct demo * demo)1961 static void demo_init_connection(struct demo *demo) {
1962 glfwSetErrorCallback(demo_error_callback);
1963
1964 if (!glfwInit()) {
1965 printf("Cannot initialize GLFW.\nExiting ...\n");
1966 fflush(stdout);
1967 exit(1);
1968 }
1969
1970 if (!glfwVulkanSupported()) {
1971 printf("GLFW failed to find the Vulkan loader.\nExiting ...\n");
1972 fflush(stdout);
1973 exit(1);
1974 }
1975
1976 gladLoadVulkanUserPtr(NULL, (GLADuserptrloadfunc) glfwGetInstanceProcAddress, NULL);
1977 }
1978
demo_init(struct demo * demo,const int argc,const char * argv[])1979 static void demo_init(struct demo *demo, const int argc, const char *argv[])
1980 {
1981 int i;
1982 memset(demo, 0, sizeof(*demo));
1983 demo->frameCount = INT32_MAX;
1984
1985 for (i = 1; i < argc; i++) {
1986 if (strcmp(argv[i], "--use_staging") == 0) {
1987 demo->use_staging_buffer = true;
1988 continue;
1989 }
1990 if (strcmp(argv[i], "--break") == 0) {
1991 demo->use_break = true;
1992 continue;
1993 }
1994 if (strcmp(argv[i], "--validate") == 0) {
1995 demo->validate = true;
1996 continue;
1997 }
1998 if (strcmp(argv[i], "--c") == 0 && demo->frameCount == INT32_MAX &&
1999 i < argc - 1 && sscanf(argv[i + 1], "%d", &demo->frameCount) == 1 &&
2000 demo->frameCount >= 0) {
2001 i++;
2002 continue;
2003 }
2004
2005 fprintf(stderr, "Usage:\n %s [--use_staging] [--validate] [--break] "
2006 "[--c <framecount>]\n",
2007 APP_SHORT_NAME);
2008 fflush(stderr);
2009 exit(1);
2010 }
2011
2012 demo_init_connection(demo);
2013 demo_init_vk(demo);
2014
2015 demo->width = 300;
2016 demo->height = 300;
2017 demo->depthStencil = 1.0;
2018 demo->depthIncrement = -0.01f;
2019 }
2020
demo_cleanup(struct demo * demo)2021 static void demo_cleanup(struct demo *demo) {
2022 uint32_t i;
2023
2024 for (i = 0; i < demo->swapchainImageCount; i++) {
2025 vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
2026 }
2027 free(demo->framebuffers);
2028 vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
2029
2030 if (demo->setup_cmd) {
2031 vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
2032 }
2033 vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
2034 vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
2035
2036 vkDestroyPipeline(demo->device, demo->pipeline, NULL);
2037 vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
2038 vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
2039 vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
2040
2041 vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
2042 vkFreeMemory(demo->device, demo->vertices.mem, NULL);
2043
2044 for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
2045 vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
2046 vkDestroyImage(demo->device, demo->textures[i].image, NULL);
2047 vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
2048 vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
2049 }
2050
2051 for (i = 0; i < demo->swapchainImageCount; i++) {
2052 vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
2053 }
2054
2055 vkDestroyImageView(demo->device, demo->depth.view, NULL);
2056 vkDestroyImage(demo->device, demo->depth.image, NULL);
2057 vkFreeMemory(demo->device, demo->depth.mem, NULL);
2058
2059 vkDestroySwapchainKHR(demo->device, demo->swapchain, NULL);
2060 free(demo->buffers);
2061
2062 vkDestroyDevice(demo->device, NULL);
2063 if (demo->validate) {
2064 vkDestroyDebugReportCallbackEXT(demo->inst, demo->msg_callback, NULL);
2065 }
2066 vkDestroySurfaceKHR(demo->inst, demo->surface, NULL);
2067 vkDestroyInstance(demo->inst, NULL);
2068
2069 free(demo->queue_props);
2070
2071 glfwDestroyWindow(demo->window);
2072 glfwTerminate();
2073 }
2074
demo_resize(struct demo * demo)2075 static void demo_resize(struct demo *demo) {
2076 uint32_t i;
2077
2078 // In order to properly resize the window, we must re-create the swapchain
2079 // AND redo the command buffers, etc.
2080 //
2081 // First, perform part of the demo_cleanup() function:
2082
2083 for (i = 0; i < demo->swapchainImageCount; i++) {
2084 vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
2085 }
2086 free(demo->framebuffers);
2087 vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
2088
2089 if (demo->setup_cmd) {
2090 vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
2091 demo->setup_cmd = VK_NULL_HANDLE;
2092 }
2093 vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
2094 vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
2095
2096 vkDestroyPipeline(demo->device, demo->pipeline, NULL);
2097 vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
2098 vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
2099 vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
2100
2101 vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
2102 vkFreeMemory(demo->device, demo->vertices.mem, NULL);
2103
2104 for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
2105 vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
2106 vkDestroyImage(demo->device, demo->textures[i].image, NULL);
2107 vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
2108 vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
2109 }
2110
2111 for (i = 0; i < demo->swapchainImageCount; i++) {
2112 vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
2113 }
2114
2115 vkDestroyImageView(demo->device, demo->depth.view, NULL);
2116 vkDestroyImage(demo->device, demo->depth.image, NULL);
2117 vkFreeMemory(demo->device, demo->depth.mem, NULL);
2118
2119 free(demo->buffers);
2120
2121 // Second, re-perform the demo_prepare() function, which will re-create the
2122 // swapchain:
2123 demo_prepare(demo);
2124 }
2125
main(const int argc,const char * argv[])2126 int main(const int argc, const char *argv[]) {
2127 struct demo demo;
2128
2129 demo_init(&demo, argc, argv);
2130 demo_create_window(&demo);
2131 demo_init_vk_swapchain(&demo);
2132
2133 demo_prepare(&demo);
2134 demo_run(&demo);
2135
2136 demo_cleanup(&demo);
2137
2138 return validation_error;
2139 }
2140
2141