• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Vulkan Samples
3  *
4  * Copyright (C) 2015-2016 Valve Corporation
5  * Copyright (C) 2015-2016 LunarG, Inc.
6  * Copyright (C) 2015-2018 Google, Inc.
7  *
8  * Licensed under the Apache License, Version 2.0 (the "License");
9  * you may not use this file except in compliance with the License.
10  * You may obtain a copy of the License at
11  *
12  *     http://www.apache.org/licenses/LICENSE-2.0
13  *
14  * Unless required by applicable law or agreed to in writing, software
15  * distributed under the License is distributed on an "AS IS" BASIS,
16  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17  * See the License for the specific language governing permissions and
18  * limitations under the License.
19  */
20 
21 /*
22 VULKAN_SAMPLE_DESCRIPTION
23 samples utility functions
24 */
25 
26 #include "vulkan_command_buffer_utils.h"
27 
28 #include <assert.h>
29 #include <string.h>
30 #include <cstdlib>
31 #include <iterator>
32 
33 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
34 #    include <linux/input.h>
35 #endif
36 
37 using namespace std;
38 
39 /*
40  * TODO: function description here
41  */
init_global_extension_properties(layer_properties & layer_props)42 VkResult init_global_extension_properties(layer_properties &layer_props)
43 {
44     VkExtensionProperties *instance_extensions;
45     uint32_t instance_extension_count;
46     VkResult res;
47     char *layer_name = NULL;
48 
49     layer_name = layer_props.properties.layerName;
50 
51     do
52     {
53         res = vkEnumerateInstanceExtensionProperties(layer_name, &instance_extension_count, NULL);
54         if (res)
55             return res;
56 
57         if (instance_extension_count == 0)
58         {
59             return VK_SUCCESS;
60         }
61 
62         layer_props.instance_extensions.resize(instance_extension_count);
63         instance_extensions = layer_props.instance_extensions.data();
64         res = vkEnumerateInstanceExtensionProperties(layer_name, &instance_extension_count,
65                                                      instance_extensions);
66     } while (res == VK_INCOMPLETE);
67 
68     return res;
69 }
70 
71 /*
72  * TODO: function description here
73  */
init_global_layer_properties(struct sample_info & info)74 VkResult init_global_layer_properties(struct sample_info &info)
75 {
76     uint32_t instance_layer_count;
77     VkLayerProperties *vk_props = NULL;
78     VkResult res;
79 
80     /*
81      * It's possible, though very rare, that the number of
82      * instance layers could change. For example, installing something
83      * could include new layers that the loader would pick up
84      * between the initial query for the count and the
85      * request for VkLayerProperties. The loader indicates that
86      * by returning a VK_INCOMPLETE status and will update the
87      * the count parameter.
88      * The count parameter will be updated with the number of
89      * entries loaded into the data pointer - in case the number
90      * of layers went down or is smaller than the size given.
91      */
92     do
93     {
94         res = vkEnumerateInstanceLayerProperties(&instance_layer_count, NULL);
95         if (res)
96             return res;
97 
98         if (instance_layer_count == 0)
99         {
100             return VK_SUCCESS;
101         }
102 
103         vk_props = (VkLayerProperties *)realloc(vk_props,
104                                                 instance_layer_count * sizeof(VkLayerProperties));
105 
106         res = vkEnumerateInstanceLayerProperties(&instance_layer_count, vk_props);
107     } while (res == VK_INCOMPLETE);
108 
109     /*
110      * Now gather the extension list for each instance layer.
111      */
112     for (uint32_t i = 0; i < instance_layer_count; i++)
113     {
114         layer_properties layer_props;
115         layer_props.properties = vk_props[i];
116         res                    = init_global_extension_properties(layer_props);
117         if (res)
118             return res;
119         info.instance_layer_properties.push_back(layer_props);
120     }
121     free(vk_props);
122 
123     return res;
124 }
125 
init_device_extension_properties(struct sample_info & info,layer_properties & layer_props)126 VkResult init_device_extension_properties(struct sample_info &info, layer_properties &layer_props)
127 {
128     VkExtensionProperties *device_extensions;
129     uint32_t device_extension_count;
130     VkResult res;
131     char *layer_name = NULL;
132 
133     layer_name = layer_props.properties.layerName;
134 
135     do
136     {
137         res = vkEnumerateDeviceExtensionProperties(info.gpus[0], layer_name,
138                                                    &device_extension_count, NULL);
139         if (res)
140             return res;
141 
142         if (device_extension_count == 0)
143         {
144             return VK_SUCCESS;
145         }
146 
147         layer_props.device_extensions.resize(device_extension_count);
148         device_extensions = layer_props.device_extensions.data();
149         res               = vkEnumerateDeviceExtensionProperties(info.gpus[0], layer_name,
150                                                    &device_extension_count, device_extensions);
151     } while (res == VK_INCOMPLETE);
152 
153     return res;
154 }
155 
156 /*
157  * Return 1 (true) if all layer names specified in check_names
158  * can be found in given layer properties.
159  */
demo_check_layers(const std::vector<layer_properties> & layer_props,const std::vector<const char * > & layer_names)160 VkBool32 demo_check_layers(const std::vector<layer_properties> &layer_props,
161                            const std::vector<const char *> &layer_names)
162 {
163     uint32_t check_count = layer_names.size();
164     uint32_t layer_count = layer_props.size();
165     for (uint32_t i = 0; i < check_count; i++)
166     {
167         VkBool32 found = 0;
168         for (uint32_t j = 0; j < layer_count; j++)
169         {
170             if (!strcmp(layer_names[i], layer_props[j].properties.layerName))
171             {
172                 found = 1;
173             }
174         }
175         if (!found)
176         {
177             std::cout << "Cannot find layer: " << layer_names[i] << std::endl;
178             return 0;
179         }
180     }
181     return 1;
182 }
183 
init_instance_extension_names(struct sample_info & info)184 void init_instance_extension_names(struct sample_info &info)
185 {
186     info.instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
187 #ifdef __ANDROID__
188     info.instance_extension_names.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
189 #elif defined(_WIN32)
190     info.instance_extension_names.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
191 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
192     info.instance_extension_names.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
193 #else
194     info.instance_extension_names.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
195 #endif
196 #ifndef __ANDROID__
197     info.instance_layer_names.push_back("VK_LAYER_LUNARG_standard_validation");
198     if (!demo_check_layers(info.instance_layer_properties, info.instance_layer_names))
199     {
200         // If standard validation is not present, search instead for the
201         // individual layers that make it up, in the correct order.
202         //
203 
204         info.instance_layer_names.clear();
205         info.instance_layer_names.push_back("VK_LAYER_GOOGLE_threading");
206         info.instance_layer_names.push_back("VK_LAYER_LUNARG_parameter_validation");
207         info.instance_layer_names.push_back("VK_LAYER_LUNARG_object_tracker");
208         info.instance_layer_names.push_back("VK_LAYER_LUNARG_core_validation");
209         info.instance_layer_names.push_back("VK_LAYER_LUNARG_image");
210         info.instance_layer_names.push_back("VK_LAYER_LUNARG_swapchain");
211         info.instance_layer_names.push_back("VK_LAYER_GOOGLE_unique_objects");
212 
213         if (!demo_check_layers(info.instance_layer_properties, info.instance_layer_names))
214         {
215             exit(1);
216         }
217     }
218 
219     // Enable debug callback extension
220     info.instance_extension_names.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
221 #endif
222 }
223 
init_instance(struct sample_info & info,char const * const app_short_name)224 VkResult init_instance(struct sample_info &info, char const *const app_short_name)
225 {
226     VkResult res = volkInitialize();
227     ASSERT(res == VK_SUCCESS);
228     VkApplicationInfo app_info  = {};
229     app_info.sType              = VK_STRUCTURE_TYPE_APPLICATION_INFO;
230     app_info.pNext              = NULL;
231     app_info.pApplicationName   = app_short_name;
232     app_info.applicationVersion = 1;
233     app_info.pEngineName        = app_short_name;
234     app_info.engineVersion      = 1;
235     app_info.apiVersion         = VK_API_VERSION_1_0;
236 
237     VkInstanceCreateInfo inst_info = {};
238     inst_info.sType                = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
239     inst_info.pNext                = NULL;
240     inst_info.flags                = 0;
241     inst_info.pApplicationInfo     = &app_info;
242     inst_info.enabledLayerCount    = info.instance_layer_names.size();
243     inst_info.ppEnabledLayerNames =
244         info.instance_layer_names.size() ? info.instance_layer_names.data() : NULL;
245     inst_info.enabledExtensionCount   = info.instance_extension_names.size();
246     inst_info.ppEnabledExtensionNames = info.instance_extension_names.data();
247 
248     res = vkCreateInstance(&inst_info, NULL, &info.inst);
249     ASSERT(res == VK_SUCCESS);
250     volkLoadInstance(info.inst);
251 
252     return res;
253 }
254 
init_device_extension_names(struct sample_info & info)255 void init_device_extension_names(struct sample_info &info)
256 {
257     info.device_extension_names.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
258 }
259 
init_enumerate_device(struct sample_info & info,uint32_t gpu_count)260 VkResult init_enumerate_device(struct sample_info &info, uint32_t gpu_count)
261 {
262     VkResult res = vkEnumeratePhysicalDevices(info.inst, &gpu_count, NULL);
263     ASSERT(gpu_count);
264     info.gpus.resize(gpu_count);
265 
266     res = vkEnumeratePhysicalDevices(info.inst, &gpu_count, info.gpus.data());
267     ASSERT(!res);
268 
269     vkGetPhysicalDeviceQueueFamilyProperties(info.gpus[0], &info.queue_family_count, NULL);
270     ASSERT(info.queue_family_count >= 1);
271 
272     info.queue_props.resize(info.queue_family_count);
273     vkGetPhysicalDeviceQueueFamilyProperties(info.gpus[0], &info.queue_family_count,
274                                              info.queue_props.data());
275     ASSERT(info.queue_family_count >= 1);
276 
277     /* This is as good a place as any to do this */
278     vkGetPhysicalDeviceMemoryProperties(info.gpus[0], &info.memory_properties);
279     vkGetPhysicalDeviceProperties(info.gpus[0], &info.gpu_props);
280     /* query device extensions for enabled layers */
281     for (auto &layer_props : info.instance_layer_properties)
282     {
283         init_device_extension_properties(info, layer_props);
284     }
285 
286     return res;
287 }
288 
289 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
290 
handle_ping(void * data,wl_shell_surface * shell_surface,uint32_t serial)291 static void handle_ping(void *data, wl_shell_surface *shell_surface, uint32_t serial)
292 {
293     wl_shell_surface_pong(shell_surface, serial);
294 }
295 
handle_configure(void * data,wl_shell_surface * shell_surface,uint32_t edges,int32_t width,int32_t height)296 static void handle_configure(void *data,
297                              wl_shell_surface *shell_surface,
298                              uint32_t edges,
299                              int32_t width,
300                              int32_t height)
301 {}
302 
handle_popup_done(void * data,wl_shell_surface * shell_surface)303 static void handle_popup_done(void *data, wl_shell_surface *shell_surface) {}
304 
305 static const wl_shell_surface_listener shell_surface_listener = {handle_ping, handle_configure,
306                                                                  handle_popup_done};
307 
registry_handle_global(void * data,wl_registry * registry,uint32_t id,const char * interface,uint32_t version)308 static void registry_handle_global(void *data,
309                                    wl_registry *registry,
310                                    uint32_t id,
311                                    const char *interface,
312                                    uint32_t version)
313 {
314     sample_info *info = (sample_info *)data;
315     // pickup wayland objects when they appear
316     if (strcmp(interface, "wl_compositor") == 0)
317     {
318         info->compositor =
319             (wl_compositor *)wl_registry_bind(registry, id, &wl_compositor_interface, 1);
320     }
321     else if (strcmp(interface, "wl_shell") == 0)
322     {
323         info->shell = (wl_shell *)wl_registry_bind(registry, id, &wl_shell_interface, 1);
324     }
325 }
326 
registry_handle_global_remove(void * data,wl_registry * registry,uint32_t name)327 static void registry_handle_global_remove(void *data, wl_registry *registry, uint32_t name) {}
328 
329 static const wl_registry_listener registry_listener = {registry_handle_global,
330                                                        registry_handle_global_remove};
331 
332 #endif
333 
init_connection(struct sample_info & info)334 void init_connection(struct sample_info &info)
335 {
336 #if defined(VK_USE_PLATFORM_XCB_KHR)
337     const xcb_setup_t *setup;
338     xcb_screen_iterator_t iter;
339     int scr;
340 
341     info.connection = xcb_connect(NULL, &scr);
342     if (info.connection == NULL || xcb_connection_has_error(info.connection))
343     {
344         std::cout << "Unable to make an XCB connection\n";
345         exit(-1);
346     }
347 
348     setup = xcb_get_setup(info.connection);
349     iter  = xcb_setup_roots_iterator(setup);
350     while (scr-- > 0)
351         xcb_screen_next(&iter);
352 
353     info.screen = iter.data;
354 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
355     info.display = wl_display_connect(nullptr);
356 
357     if (info.display == nullptr)
358     {
359         printf(
360             "Cannot find a compatible Vulkan installable client driver "
361             "(ICD).\nExiting ...\n");
362         fflush(stdout);
363         exit(1);
364     }
365 
366     info.registry = wl_display_get_registry(info.display);
367     wl_registry_add_listener(info.registry, &registry_listener, &info);
368     wl_display_dispatch(info.display);
369 #endif
370 }
371 #ifdef _WIN32
run(struct sample_info * info)372 static void run(struct sample_info *info)
373 { /* Placeholder for samples that want to show dynamic content */
374 }
375 
376 // MS-Windows event handling function:
WndProc(HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)377 LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
378 {
379     struct sample_info *info =
380         reinterpret_cast<struct sample_info *>(GetWindowLongPtr(hWnd, GWLP_USERDATA));
381 
382     switch (uMsg)
383     {
384         case WM_CLOSE:
385             PostQuitMessage(0);
386             break;
387         case WM_PAINT:
388             run(info);
389             return 0;
390         default:
391             break;
392     }
393     return (DefWindowProc(hWnd, uMsg, wParam, lParam));
394 }
395 
init_window(struct sample_info & info)396 void init_window(struct sample_info &info)
397 {
398     WNDCLASSEXA win_class;
399     ASSERT(info.width > 0);
400     ASSERT(info.height > 0);
401 
402     info.connection = GetModuleHandle(NULL);
403     sprintf(info.name, "Sample");
404 
405     // Initialize the window class structure:
406     win_class.cbSize        = sizeof(WNDCLASSEX);
407     win_class.style         = CS_HREDRAW | CS_VREDRAW;
408     win_class.lpfnWndProc   = WndProc;
409     win_class.cbClsExtra    = 0;
410     win_class.cbWndExtra    = 0;
411     win_class.hInstance     = info.connection;  // hInstance
412     win_class.hIcon         = LoadIcon(NULL, IDI_APPLICATION);
413     win_class.hCursor       = LoadCursor(NULL, IDC_ARROW);
414     win_class.hbrBackground = (HBRUSH)GetStockObject(WHITE_BRUSH);
415     win_class.lpszMenuName  = NULL;
416     win_class.lpszClassName = info.name;
417     win_class.hIconSm       = LoadIcon(NULL, IDI_WINLOGO);
418     // Register window class:
419     if (!RegisterClassExA(&win_class))
420     {
421         // It didn't work, so try to give a useful error:
422         printf("Unexpected error trying to start the application!\n");
423         fflush(stdout);
424         exit(1);
425     }
426     // Create window with the registered class:
427     RECT wr = {0, 0, info.width, info.height};
428     AdjustWindowRect(&wr, WS_OVERLAPPEDWINDOW, FALSE);
429     info.window = CreateWindowExA(0,
430                                   info.name,             // class name
431                                   info.name,             // app name
432                                   WS_OVERLAPPEDWINDOW |  // window style
433                                       WS_VISIBLE | WS_SYSMENU,
434                                   100, 100,            // x/y coords
435                                   wr.right - wr.left,  // width
436                                   wr.bottom - wr.top,  // height
437                                   NULL,                // handle to parent
438                                   NULL,                // handle to menu
439                                   info.connection,     // hInstance
440                                   NULL);               // no extra parameters
441     if (!info.window)
442     {
443         // It didn't work, so try to give a useful error:
444         printf("Cannot create a window in which to draw!\n");
445         fflush(stdout);
446         exit(1);
447     }
448     SetWindowLongPtr(info.window, GWLP_USERDATA, (LONG_PTR)&info);
449 }
450 
destroy_window(struct sample_info & info)451 void destroy_window(struct sample_info &info)
452 {
453     vkDestroySurfaceKHR(info.inst, info.surface, NULL);
454     DestroyWindow(info.window);
455     UnregisterClassA(info.name, GetModuleHandle(NULL));
456 }
457 
458 #elif defined(__ANDROID__)
459 // Android implementation.
init_window(struct sample_info & info)460 void init_window(struct sample_info &info) {}
461 
destroy_window(struct sample_info & info)462 void destroy_window(struct sample_info &info)
463 {
464     vkDestroySurfaceKHR(info.inst, info.surface, NULL);
465 }
466 
467 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
468 
init_window(struct sample_info & info)469 void init_window(struct sample_info &info)
470 {
471     ASSERT(info.width > 0);
472     ASSERT(info.height > 0);
473 
474     info.window = wl_compositor_create_surface(info.compositor);
475     if (!info.window)
476     {
477         printf("Can not create wayland_surface from compositor!\n");
478         fflush(stdout);
479         exit(1);
480     }
481 
482     info.shell_surface = wl_shell_get_shell_surface(info.shell, info.window);
483     if (!info.shell_surface)
484     {
485         printf("Can not get shell_surface from wayland_surface!\n");
486         fflush(stdout);
487         exit(1);
488     }
489 
490     wl_shell_surface_add_listener(info.shell_surface, &shell_surface_listener, &info);
491     wl_shell_surface_set_toplevel(info.shell_surface);
492 }
493 
destroy_window(struct sample_info & info)494 void destroy_window(struct sample_info &info)
495 {
496     wl_shell_surface_destroy(info.shell_surface);
497     wl_surface_destroy(info.window);
498     wl_shell_destroy(info.shell);
499     wl_compositor_destroy(info.compositor);
500     wl_registry_destroy(info.registry);
501     wl_display_disconnect(info.display);
502 }
503 
504 #else
505 
init_window(struct sample_info & info)506 void init_window(struct sample_info &info)
507 {
508     ASSERT(info.width > 0);
509     ASSERT(info.height > 0);
510 
511     uint32_t value_mask, value_list[32];
512 
513     info.window = xcb_generate_id(info.connection);
514 
515     value_mask = XCB_CW_BACK_PIXEL | XCB_CW_EVENT_MASK;
516     value_list[0] = info.screen->black_pixel;
517     value_list[1] = XCB_EVENT_MASK_KEY_RELEASE | XCB_EVENT_MASK_EXPOSURE;
518 
519     xcb_create_window(info.connection, XCB_COPY_FROM_PARENT, info.window, info.screen->root, 0, 0,
520                       info.width, info.height, 0, XCB_WINDOW_CLASS_INPUT_OUTPUT,
521                       info.screen->root_visual, value_mask, value_list);
522 
523     /* Magic code that will send notification when window is destroyed */
524     xcb_intern_atom_cookie_t cookie = xcb_intern_atom(info.connection, 1, 12, "WM_PROTOCOLS");
525     xcb_intern_atom_reply_t *reply = xcb_intern_atom_reply(info.connection, cookie, 0);
526 
527     xcb_intern_atom_cookie_t cookie2 = xcb_intern_atom(info.connection, 0, 16, "WM_DELETE_WINDOW");
528     info.atom_wm_delete_window = xcb_intern_atom_reply(info.connection, cookie2, 0);
529 
530     xcb_change_property(info.connection, XCB_PROP_MODE_REPLACE, info.window, (*reply).atom, 4, 32,
531                         1, &(*info.atom_wm_delete_window).atom);
532     free(reply);
533 
534     xcb_map_window(info.connection, info.window);
535 
536     // Force the x/y coordinates to 100,100 results are identical in consecutive
537     // runs
538     const uint32_t coords[] = {100, 100};
539     xcb_configure_window(info.connection, info.window, XCB_CONFIG_WINDOW_X | XCB_CONFIG_WINDOW_Y,
540                          coords);
541     xcb_flush(info.connection);
542 
543     xcb_generic_event_t *e;
544     while ((e = xcb_wait_for_event(info.connection)))
545     {
546         if ((e->response_type & ~0x80) == XCB_EXPOSE)
547             break;
548     }
549 }
550 
destroy_window(struct sample_info & info)551 void destroy_window(struct sample_info &info)
552 {
553     vkDestroySurfaceKHR(info.inst, info.surface, NULL);
554     xcb_destroy_window(info.connection, info.window);
555     xcb_disconnect(info.connection);
556 }
557 
558 #endif  // _WIN32
559 
init_window_size(struct sample_info & info,int32_t default_width,int32_t default_height)560 void init_window_size(struct sample_info &info, int32_t default_width, int32_t default_height)
561 {
562 #ifdef __ANDROID__
563     info.mOSWindow = OSWindow::New();
564     ASSERT(info.mOSWindow != nullptr);
565     info.mOSWindow->initialize("VulkanTest", default_width, default_height);
566 #endif
567     info.width  = default_width;
568     info.height = default_height;
569 }
570 
init_swapchain_extension(struct sample_info & info)571 void init_swapchain_extension(struct sample_info &info)
572 {
573     /* DEPENDS on init_connection() and init_window() */
574 
575     VkResult res;
576 
577 // Construct the surface description:
578 #ifdef _WIN32
579     VkWin32SurfaceCreateInfoKHR createInfo = {};
580     createInfo.sType                       = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
581     createInfo.pNext                       = NULL;
582     createInfo.hinstance                   = info.connection;
583     createInfo.hwnd                        = info.window;
584     res = vkCreateWin32SurfaceKHR(info.inst, &createInfo, NULL, &info.surface);
585 #elif defined(__ANDROID__)
586     GET_INSTANCE_PROC_ADDR(info.inst, CreateAndroidSurfaceKHR);
587     VkAndroidSurfaceCreateInfoKHR createInfo;
588     createInfo.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
589     createInfo.pNext = nullptr;
590     createInfo.flags = 0;
591     createInfo.window = info.mOSWindow->getNativeWindow();
592     res = info.fpCreateAndroidSurfaceKHR(info.inst, &createInfo, nullptr, &info.surface);
593 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
594     VkWaylandSurfaceCreateInfoKHR createInfo = {};
595     createInfo.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
596     createInfo.pNext = NULL;
597     createInfo.display = info.display;
598     createInfo.surface = info.window;
599     res = vkCreateWaylandSurfaceKHR(info.inst, &createInfo, NULL, &info.surface);
600 #else
601     VkXcbSurfaceCreateInfoKHR createInfo = {};
602     createInfo.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
603     createInfo.pNext = NULL;
604     createInfo.connection = info.connection;
605     createInfo.window = info.window;
606     res = vkCreateXcbSurfaceKHR(info.inst, &createInfo, NULL, &info.surface);
607 #endif  // __ANDROID__  && _WIN32
608     ASSERT(res == VK_SUCCESS);
609 
610     // Iterate over each queue to learn whether it supports presenting:
611     VkBool32 *pSupportsPresent = (VkBool32 *)malloc(info.queue_family_count * sizeof(VkBool32));
612     for (uint32_t i = 0; i < info.queue_family_count; i++)
613     {
614         vkGetPhysicalDeviceSurfaceSupportKHR(info.gpus[0], i, info.surface, &pSupportsPresent[i]);
615     }
616 
617     // Search for a graphics and a present queue in the array of queue
618     // families, try to find one that supports both
619     info.graphics_queue_family_index = UINT32_MAX;
620     info.present_queue_family_index  = UINT32_MAX;
621     for (uint32_t i = 0; i < info.queue_family_count; ++i)
622     {
623         if ((info.queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
624         {
625             if (info.graphics_queue_family_index == UINT32_MAX)
626                 info.graphics_queue_family_index = i;
627 
628             if (pSupportsPresent[i] == VK_TRUE)
629             {
630                 info.graphics_queue_family_index = i;
631                 info.present_queue_family_index  = i;
632                 break;
633             }
634         }
635     }
636 
637     if (info.present_queue_family_index == UINT32_MAX)
638     {
639         // If didn't find a queue that supports both graphics and present, then
640         // find a separate present queue.
641         for (size_t i = 0; i < info.queue_family_count; ++i)
642             if (pSupportsPresent[i] == VK_TRUE)
643             {
644                 info.present_queue_family_index = i;
645                 break;
646             }
647     }
648     free(pSupportsPresent);
649 
650     // Generate error if could not find queues that support graphics
651     // and present
652     if (info.graphics_queue_family_index == UINT32_MAX ||
653         info.present_queue_family_index == UINT32_MAX)
654     {
655         std::cout << "Could not find a queues for both graphics and present";
656         exit(-1);
657     }
658 
659     // Get the list of VkFormats that are supported:
660     uint32_t formatCount;
661     res = vkGetPhysicalDeviceSurfaceFormatsKHR(info.gpus[0], info.surface, &formatCount, NULL);
662     ASSERT(res == VK_SUCCESS);
663     VkSurfaceFormatKHR *surfFormats =
664         (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
665     res =
666         vkGetPhysicalDeviceSurfaceFormatsKHR(info.gpus[0], info.surface, &formatCount, surfFormats);
667     ASSERT(res == VK_SUCCESS);
668     // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
669     // the surface has no preferred format.  Otherwise, at least one
670     // supported format will be returned.
671     if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED)
672     {
673         info.format = VK_FORMAT_B8G8R8A8_UNORM;
674     }
675     else
676     {
677         ASSERT(formatCount >= 1);
678         info.format = surfFormats[0].format;
679     }
680     free(surfFormats);
681 }
682 
init_device(struct sample_info & info)683 VkResult init_device(struct sample_info &info)
684 {
685     VkResult res;
686     VkDeviceQueueCreateInfo queue_info = {};
687 
688     float queue_priorities[1]   = {0.0};
689     queue_info.sType            = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
690     queue_info.pNext            = NULL;
691     queue_info.queueCount       = 1;
692     queue_info.pQueuePriorities = queue_priorities;
693     queue_info.queueFamilyIndex = info.graphics_queue_family_index;
694 
695     VkDeviceCreateInfo device_info    = {};
696     device_info.sType                 = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
697     device_info.pNext                 = NULL;
698     device_info.queueCreateInfoCount  = 1;
699     device_info.pQueueCreateInfos     = &queue_info;
700     device_info.enabledExtensionCount = info.device_extension_names.size();
701     device_info.ppEnabledExtensionNames =
702         device_info.enabledExtensionCount ? info.device_extension_names.data() : NULL;
703     device_info.pEnabledFeatures = NULL;
704 
705     res = vkCreateDevice(info.gpus[0], &device_info, NULL, &info.device);
706     ASSERT(res == VK_SUCCESS);
707     volkLoadDevice(info.device);
708 
709     return res;
710 }
711 
init_command_pool(struct sample_info & info,VkCommandPoolCreateFlags cmd_pool_create_flags)712 void init_command_pool(struct sample_info &info, VkCommandPoolCreateFlags cmd_pool_create_flags)
713 {
714     /* DEPENDS on init_swapchain_extension() */
715     VkResult res;
716 
717     VkCommandPoolCreateInfo cmd_pool_info = {};
718     cmd_pool_info.sType                   = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
719     cmd_pool_info.pNext                   = NULL;
720     cmd_pool_info.queueFamilyIndex        = info.graphics_queue_family_index;
721     cmd_pool_info.flags                   = cmd_pool_create_flags;
722 
723     res = vkCreateCommandPool(info.device, &cmd_pool_info, NULL, &info.cmd_pool);
724     ASSERT(res == VK_SUCCESS);
725 }
726 
init_command_buffer(struct sample_info & info)727 void init_command_buffer(struct sample_info &info)
728 {
729     /* DEPENDS on init_swapchain_extension() and init_command_pool() */
730     VkResult res;
731 
732     VkCommandBufferAllocateInfo cmd = {};
733     cmd.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
734     cmd.pNext                       = NULL;
735     cmd.commandPool                 = info.cmd_pool;
736     cmd.level                       = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
737     cmd.commandBufferCount          = 1;
738 
739     res = vkAllocateCommandBuffers(info.device, &cmd, &info.cmd);
740     ASSERT(res == VK_SUCCESS);
741 }
742 
init_command_buffer_array(struct sample_info & info,int numBuffers)743 void init_command_buffer_array(struct sample_info &info, int numBuffers)
744 {
745     /* DEPENDS on init_swapchain_extension() and init_command_pool() */
746     VkResult res;
747     info.cmds.resize(numBuffers);
748     ASSERT(info.cmds.data() != NULL);
749 
750     VkCommandBufferAllocateInfo cmd = {};
751     cmd.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
752     cmd.pNext                       = NULL;
753     cmd.commandPool                 = info.cmd_pool;
754     cmd.level                       = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
755     cmd.commandBufferCount          = numBuffers;
756 
757     res = vkAllocateCommandBuffers(info.device, &cmd, info.cmds.data());
758     ASSERT(res == VK_SUCCESS);
759 }
760 
init_command_buffer2_array(struct sample_info & info,int numBuffers)761 void init_command_buffer2_array(struct sample_info &info, int numBuffers)
762 {
763     /* DEPENDS on init_swapchain_extension() and init_command_pool() */
764     VkResult res;
765     info.cmd2s.resize(numBuffers);
766     VkCommandBufferAllocateInfo cmd = {};
767     cmd.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
768     cmd.pNext                       = NULL;
769     cmd.commandPool                 = info.cmd_pool;
770     cmd.level                       = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
771     cmd.commandBufferCount          = numBuffers;
772 
773     res = vkAllocateCommandBuffers(info.device, &cmd, info.cmd2s.data());
774     ASSERT(res == VK_SUCCESS);
775 }
776 
init_device_queue(struct sample_info & info)777 void init_device_queue(struct sample_info &info)
778 {
779     /* DEPENDS on init_swapchain_extension() */
780 
781     vkGetDeviceQueue(info.device, info.graphics_queue_family_index, 0, &info.graphics_queue);
782     if (info.graphics_queue_family_index == info.present_queue_family_index)
783     {
784         info.present_queue = info.graphics_queue;
785     }
786     else
787     {
788         vkGetDeviceQueue(info.device, info.present_queue_family_index, 0, &info.present_queue);
789     }
790 }
791 
init_swap_chain(struct sample_info & info,VkImageUsageFlags usageFlags)792 void init_swap_chain(struct sample_info &info, VkImageUsageFlags usageFlags)
793 {
794     /* DEPENDS on info.cmd and info.queue initialized */
795 
796     VkResult res;
797     VkSurfaceCapabilitiesKHR surfCapabilities;
798 
799     res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(info.gpus[0], info.surface, &surfCapabilities);
800     ASSERT(res == VK_SUCCESS);
801 
802     uint32_t presentModeCount;
803     res = vkGetPhysicalDeviceSurfacePresentModesKHR(info.gpus[0], info.surface, &presentModeCount,
804                                                     NULL);
805     ASSERT(res == VK_SUCCESS);
806     VkPresentModeKHR *presentModes =
807         (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
808     ASSERT(presentModes);
809     res = vkGetPhysicalDeviceSurfacePresentModesKHR(info.gpus[0], info.surface, &presentModeCount,
810                                                     presentModes);
811     ASSERT(res == VK_SUCCESS);
812 
813     VkExtent2D swapchainExtent;
814     // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
815     if (surfCapabilities.currentExtent.width == 0xFFFFFFFF)
816     {
817         // If the surface size is undefined, the size is set to
818         // the size of the images requested.
819         swapchainExtent.width  = info.width;
820         swapchainExtent.height = info.height;
821         if (swapchainExtent.width < surfCapabilities.minImageExtent.width)
822         {
823             swapchainExtent.width = surfCapabilities.minImageExtent.width;
824         }
825         else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width)
826         {
827             swapchainExtent.width = surfCapabilities.maxImageExtent.width;
828         }
829 
830         if (swapchainExtent.height < surfCapabilities.minImageExtent.height)
831         {
832             swapchainExtent.height = surfCapabilities.minImageExtent.height;
833         }
834         else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height)
835         {
836             swapchainExtent.height = surfCapabilities.maxImageExtent.height;
837         }
838     }
839     else
840     {
841         // If the surface size is defined, the swap chain size must match
842         swapchainExtent = surfCapabilities.currentExtent;
843     }
844 
845     // The FIFO present mode is guaranteed by the spec to be supported
846     // Also note that current Android driver only supports FIFO
847     VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
848 
849     for (uint32_t presentModeIndex = 0; presentModeIndex < presentModeCount; ++presentModeIndex)
850     {
851         if (presentModes[presentModeIndex] == VK_PRESENT_MODE_IMMEDIATE_KHR)
852         {
853             swapchainPresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
854             break;
855         }
856     }
857 
858     // Determine the number of VkImage's to use in the swap chain.
859     // We need to acquire only 1 presentable image at at time.
860     // Asking for minImageCount images ensures that we can acquire
861     // 1 presentable image as long as we present it before attempting
862     // to acquire another.
863     uint32_t desiredNumberOfSwapChainImages = surfCapabilities.minImageCount;
864 
865     VkSurfaceTransformFlagBitsKHR preTransform;
866     if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR)
867     {
868         preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
869     }
870     else
871     {
872         preTransform = surfCapabilities.currentTransform;
873     }
874 
875     // Find a supported composite alpha mode - one of these is guaranteed to be set
876     VkCompositeAlphaFlagBitsKHR compositeAlpha         = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
877     VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = {
878         VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
879         VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
880         VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
881         VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
882     };
883     for (uint32_t i = 0; i < sizeof(compositeAlphaFlags); i++)
884     {
885         if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i])
886         {
887             compositeAlpha = compositeAlphaFlags[i];
888             break;
889         }
890     }
891 
892     VkSwapchainCreateInfoKHR swapchain_ci = {};
893     swapchain_ci.sType                    = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
894     swapchain_ci.pNext                    = NULL;
895     swapchain_ci.surface                  = info.surface;
896     swapchain_ci.minImageCount            = desiredNumberOfSwapChainImages;
897     swapchain_ci.imageFormat              = info.format;
898     swapchain_ci.imageExtent.width        = swapchainExtent.width;
899     swapchain_ci.imageExtent.height       = swapchainExtent.height;
900     swapchain_ci.preTransform             = preTransform;
901     swapchain_ci.compositeAlpha           = compositeAlpha;
902     swapchain_ci.imageArrayLayers         = 1;
903     swapchain_ci.presentMode              = swapchainPresentMode;
904     swapchain_ci.oldSwapchain             = VK_NULL_HANDLE;
905 #ifndef __ANDROID__
906     swapchain_ci.clipped = true;
907 #else
908     swapchain_ci.clipped = false;
909 #endif
910     swapchain_ci.imageColorSpace       = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
911     swapchain_ci.imageUsage            = usageFlags;
912     swapchain_ci.imageSharingMode      = VK_SHARING_MODE_EXCLUSIVE;
913     swapchain_ci.queueFamilyIndexCount = 0;
914     swapchain_ci.pQueueFamilyIndices   = NULL;
915     uint32_t queueFamilyIndices[2]     = {(uint32_t)info.graphics_queue_family_index,
916                                       (uint32_t)info.present_queue_family_index};
917     if (info.graphics_queue_family_index != info.present_queue_family_index)
918     {
919         // If the graphics and present queues are from different queue families,
920         // we either have to explicitly transfer ownership of images between the
921         // queues, or we have to create the swapchain with imageSharingMode
922         // as VK_SHARING_MODE_CONCURRENT
923         swapchain_ci.imageSharingMode      = VK_SHARING_MODE_CONCURRENT;
924         swapchain_ci.queueFamilyIndexCount = 2;
925         swapchain_ci.pQueueFamilyIndices   = queueFamilyIndices;
926     }
927 
928     res = vkCreateSwapchainKHR(info.device, &swapchain_ci, NULL, &info.swap_chain);
929     ASSERT(res == VK_SUCCESS);
930 
931     res = vkGetSwapchainImagesKHR(info.device, info.swap_chain, &info.swapchainImageCount, NULL);
932     ASSERT(res == VK_SUCCESS);
933 
934     VkImage *swapchainImages = (VkImage *)malloc(info.swapchainImageCount * sizeof(VkImage));
935     ASSERT(swapchainImages);
936     res = vkGetSwapchainImagesKHR(info.device, info.swap_chain, &info.swapchainImageCount,
937                                   swapchainImages);
938     ASSERT(res == VK_SUCCESS);
939 
940     for (uint32_t i = 0; i < info.swapchainImageCount; i++)
941     {
942         swap_chain_buffer sc_buffer;
943 
944         VkImageViewCreateInfo color_image_view           = {};
945         color_image_view.sType                           = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
946         color_image_view.pNext                           = NULL;
947         color_image_view.format                          = info.format;
948         color_image_view.components.r                    = VK_COMPONENT_SWIZZLE_R;
949         color_image_view.components.g                    = VK_COMPONENT_SWIZZLE_G;
950         color_image_view.components.b                    = VK_COMPONENT_SWIZZLE_B;
951         color_image_view.components.a                    = VK_COMPONENT_SWIZZLE_A;
952         color_image_view.subresourceRange.aspectMask     = VK_IMAGE_ASPECT_COLOR_BIT;
953         color_image_view.subresourceRange.baseMipLevel   = 0;
954         color_image_view.subresourceRange.levelCount     = 1;
955         color_image_view.subresourceRange.baseArrayLayer = 0;
956         color_image_view.subresourceRange.layerCount     = 1;
957         color_image_view.viewType                        = VK_IMAGE_VIEW_TYPE_2D;
958         color_image_view.flags                           = 0;
959 
960         sc_buffer.image = swapchainImages[i];
961 
962         color_image_view.image = sc_buffer.image;
963 
964         res = vkCreateImageView(info.device, &color_image_view, NULL, &sc_buffer.view);
965         info.buffers.push_back(sc_buffer);
966         ASSERT(res == VK_SUCCESS);
967     }
968     free(swapchainImages);
969     info.current_buffer = 0;
970 
971     if (NULL != presentModes)
972     {
973         free(presentModes);
974     }
975 }
976 
memory_type_from_properties(struct sample_info & info,uint32_t typeBits,VkFlags requirements_mask,uint32_t * typeIndex)977 bool memory_type_from_properties(struct sample_info &info,
978                                  uint32_t typeBits,
979                                  VkFlags requirements_mask,
980                                  uint32_t *typeIndex)
981 {
982     // Search memtypes to find first index with those properties
983     for (uint32_t i = 0; i < info.memory_properties.memoryTypeCount; i++)
984     {
985         if ((typeBits & 1) == 1)
986         {
987             // Type is available, does it match user properties?
988             if ((info.memory_properties.memoryTypes[i].propertyFlags & requirements_mask) ==
989                 requirements_mask)
990             {
991                 *typeIndex = i;
992                 return true;
993             }
994         }
995         typeBits >>= 1;
996     }
997     // No memory types matched, return failure
998     return false;
999 }
1000 
init_depth_buffer(struct sample_info & info)1001 void init_depth_buffer(struct sample_info &info)
1002 {
1003     VkResult res;
1004     bool pass;
1005     VkImageCreateInfo image_info = {};
1006 
1007 /* allow custom depth formats */
1008 #ifdef __ANDROID__
1009     // Depth format needs to be VK_FORMAT_D24_UNORM_S8_UINT on Android.
1010     info.depth.format = VK_FORMAT_D24_UNORM_S8_UINT;
1011 #else
1012     if (info.depth.format == VK_FORMAT_UNDEFINED)
1013         info.depth.format = VK_FORMAT_D16_UNORM;
1014 #endif
1015 
1016     const VkFormat depth_format = info.depth.format;
1017     VkFormatProperties props;
1018     vkGetPhysicalDeviceFormatProperties(info.gpus[0], depth_format, &props);
1019     if (props.linearTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
1020     {
1021         image_info.tiling = VK_IMAGE_TILING_LINEAR;
1022     }
1023     else if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
1024     {
1025         image_info.tiling = VK_IMAGE_TILING_OPTIMAL;
1026     }
1027     else
1028     {
1029         /* Try other depth formats? */
1030         std::cout << "depth_format " << depth_format << " Unsupported.\n";
1031         exit(-1);
1032     }
1033 
1034     image_info.sType                 = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
1035     image_info.pNext                 = NULL;
1036     image_info.imageType             = VK_IMAGE_TYPE_2D;
1037     image_info.format                = depth_format;
1038     image_info.extent.width          = info.width;
1039     image_info.extent.height         = info.height;
1040     image_info.extent.depth          = 1;
1041     image_info.mipLevels             = 1;
1042     image_info.arrayLayers           = 1;
1043     image_info.samples               = NUM_SAMPLES;
1044     image_info.initialLayout         = VK_IMAGE_LAYOUT_UNDEFINED;
1045     image_info.queueFamilyIndexCount = 0;
1046     image_info.pQueueFamilyIndices   = NULL;
1047     image_info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
1048     image_info.usage                 = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
1049     image_info.flags                 = 0;
1050 
1051     VkMemoryAllocateInfo mem_alloc = {};
1052     mem_alloc.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1053     mem_alloc.pNext                = NULL;
1054     mem_alloc.allocationSize       = 0;
1055     mem_alloc.memoryTypeIndex      = 0;
1056 
1057     VkImageViewCreateInfo view_info           = {};
1058     view_info.sType                           = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1059     view_info.pNext                           = NULL;
1060     view_info.image                           = VK_NULL_HANDLE;
1061     view_info.format                          = depth_format;
1062     view_info.components.r                    = VK_COMPONENT_SWIZZLE_R;
1063     view_info.components.g                    = VK_COMPONENT_SWIZZLE_G;
1064     view_info.components.b                    = VK_COMPONENT_SWIZZLE_B;
1065     view_info.components.a                    = VK_COMPONENT_SWIZZLE_A;
1066     view_info.subresourceRange.aspectMask     = VK_IMAGE_ASPECT_DEPTH_BIT;
1067     view_info.subresourceRange.baseMipLevel   = 0;
1068     view_info.subresourceRange.levelCount     = 1;
1069     view_info.subresourceRange.baseArrayLayer = 0;
1070     view_info.subresourceRange.layerCount     = 1;
1071     view_info.viewType                        = VK_IMAGE_VIEW_TYPE_2D;
1072     view_info.flags                           = 0;
1073 
1074     if (depth_format == VK_FORMAT_D16_UNORM_S8_UINT ||
1075         depth_format == VK_FORMAT_D24_UNORM_S8_UINT || depth_format == VK_FORMAT_D32_SFLOAT_S8_UINT)
1076     {
1077         view_info.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
1078     }
1079 
1080     VkMemoryRequirements mem_reqs;
1081 
1082     /* Create image */
1083     res = vkCreateImage(info.device, &image_info, NULL, &info.depth.image);
1084     ASSERT(res == VK_SUCCESS);
1085 
1086     vkGetImageMemoryRequirements(info.device, info.depth.image, &mem_reqs);
1087 
1088     mem_alloc.allocationSize = mem_reqs.size;
1089     /* Use the memory properties to determine the type of memory required */
1090     pass = memory_type_from_properties(info, mem_reqs.memoryTypeBits,
1091                                        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1092                                        &mem_alloc.memoryTypeIndex);
1093     ASSERT(pass);
1094 
1095     /* Allocate memory */
1096     res = vkAllocateMemory(info.device, &mem_alloc, NULL, &info.depth.mem);
1097     ASSERT(res == VK_SUCCESS);
1098 
1099     /* Bind memory */
1100     res = vkBindImageMemory(info.device, info.depth.image, info.depth.mem, 0);
1101     ASSERT(res == VK_SUCCESS);
1102 
1103     /* Create image view */
1104     view_info.image = info.depth.image;
1105     res             = vkCreateImageView(info.device, &view_info, NULL, &info.depth.view);
1106     ASSERT(res == VK_SUCCESS);
1107 }
1108 
init_uniform_buffer(struct sample_info & info)1109 void init_uniform_buffer(struct sample_info &info)
1110 {
1111     VkResult res;
1112     bool pass;
1113 
1114     info.MVP = {1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f,
1115                 0.0f, 0.0f, 0.5f, 0.0f, 0.0f, 0.0f,  0.5f, 1.0f};
1116 
1117     /* VULKAN_KEY_START */
1118     VkBufferCreateInfo buf_info    = {};
1119     buf_info.sType                 = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1120     buf_info.pNext                 = NULL;
1121     buf_info.usage                 = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
1122     buf_info.size                  = sizeof(float) * 16;  // info.MVP.data() size
1123     buf_info.queueFamilyIndexCount = 0;
1124     buf_info.pQueueFamilyIndices   = NULL;
1125     buf_info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
1126     buf_info.flags                 = 0;
1127     res = vkCreateBuffer(info.device, &buf_info, NULL, &info.uniform_data.buf);
1128     ASSERT(res == VK_SUCCESS);
1129 
1130     VkMemoryRequirements mem_reqs;
1131     vkGetBufferMemoryRequirements(info.device, info.uniform_data.buf, &mem_reqs);
1132 
1133     VkMemoryAllocateInfo alloc_info = {};
1134     alloc_info.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1135     alloc_info.pNext                = NULL;
1136     alloc_info.memoryTypeIndex      = 0;
1137 
1138     alloc_info.allocationSize = mem_reqs.size;
1139     pass                      = memory_type_from_properties(
1140         info, mem_reqs.memoryTypeBits,
1141         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
1142         &alloc_info.memoryTypeIndex);
1143     ASSERT(pass && "No mappable, coherent memory");
1144 
1145     res = vkAllocateMemory(info.device, &alloc_info, NULL, &(info.uniform_data.mem));
1146     ASSERT(res == VK_SUCCESS);
1147 
1148     uint8_t *pData;
1149     res = vkMapMemory(info.device, info.uniform_data.mem, 0, mem_reqs.size, 0, (void **)&pData);
1150     ASSERT(res == VK_SUCCESS);
1151 
1152     memcpy(pData, info.MVP.data(), sizeof(float) * 16);  // info.MVP.data() size
1153 
1154     vkUnmapMemory(info.device, info.uniform_data.mem);
1155 
1156     res = vkBindBufferMemory(info.device, info.uniform_data.buf, info.uniform_data.mem, 0);
1157     ASSERT(res == VK_SUCCESS);
1158 
1159     info.uniform_data.buffer_info.buffer = info.uniform_data.buf;
1160     info.uniform_data.buffer_info.offset = 0;
1161     info.uniform_data.buffer_info.range  = sizeof(float) * 16;  // info.MVP.data() size
1162 }
1163 
init_descriptor_and_pipeline_layouts(struct sample_info & info,bool use_texture,VkDescriptorSetLayoutCreateFlags descSetLayoutCreateFlags)1164 void init_descriptor_and_pipeline_layouts(struct sample_info &info,
1165                                           bool use_texture,
1166                                           VkDescriptorSetLayoutCreateFlags descSetLayoutCreateFlags)
1167 {
1168     VkDescriptorSetLayoutBinding layout_bindings[2];
1169     layout_bindings[0].binding            = 0;
1170     layout_bindings[0].descriptorType     = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1171     layout_bindings[0].descriptorCount    = 1;
1172     layout_bindings[0].stageFlags         = VK_SHADER_STAGE_VERTEX_BIT;
1173     layout_bindings[0].pImmutableSamplers = NULL;
1174 
1175     if (use_texture)
1176     {
1177         layout_bindings[1].binding            = 1;
1178         layout_bindings[1].descriptorType     = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1179         layout_bindings[1].descriptorCount    = 1;
1180         layout_bindings[1].stageFlags         = VK_SHADER_STAGE_FRAGMENT_BIT;
1181         layout_bindings[1].pImmutableSamplers = NULL;
1182     }
1183 
1184     /* Next take layout bindings and use them to create a descriptor set layout
1185      */
1186     VkDescriptorSetLayoutCreateInfo descriptor_layout = {};
1187     descriptor_layout.sType        = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
1188     descriptor_layout.pNext        = NULL;
1189     descriptor_layout.flags        = descSetLayoutCreateFlags;
1190     descriptor_layout.bindingCount = use_texture ? 2 : 1;
1191     descriptor_layout.pBindings    = layout_bindings;
1192 
1193     VkResult res;
1194 
1195     info.desc_layout.resize(NUM_DESCRIPTOR_SETS);
1196     res =
1197         vkCreateDescriptorSetLayout(info.device, &descriptor_layout, NULL, info.desc_layout.data());
1198     ASSERT(res == VK_SUCCESS);
1199 
1200     /* Now use the descriptor layout to create a pipeline layout */
1201     VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = {};
1202     pPipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1203     pPipelineLayoutCreateInfo.pNext = NULL;
1204     pPipelineLayoutCreateInfo.pushConstantRangeCount = 0;
1205     pPipelineLayoutCreateInfo.pPushConstantRanges    = NULL;
1206     pPipelineLayoutCreateInfo.setLayoutCount         = NUM_DESCRIPTOR_SETS;
1207     pPipelineLayoutCreateInfo.pSetLayouts            = info.desc_layout.data();
1208 
1209     res = vkCreatePipelineLayout(info.device, &pPipelineLayoutCreateInfo, NULL,
1210                                  &info.pipeline_layout);
1211     ASSERT(res == VK_SUCCESS);
1212 }
1213 
init_renderpass(struct sample_info & info,bool include_depth,bool clear,VkImageLayout finalLayout)1214 void init_renderpass(struct sample_info &info,
1215                      bool include_depth,
1216                      bool clear,
1217                      VkImageLayout finalLayout)
1218 {
1219     /* DEPENDS on init_swap_chain() and init_depth_buffer() */
1220 
1221     VkResult res;
1222     /* Need attachments for render target and depth buffer */
1223     VkAttachmentDescription attachments[2];
1224     attachments[0].format        = info.format;
1225     attachments[0].samples       = NUM_SAMPLES;
1226     attachments[0].loadOp        = clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
1227     attachments[0].storeOp       = VK_ATTACHMENT_STORE_OP_STORE;
1228     attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
1229     attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1230     attachments[0].initialLayout  = VK_IMAGE_LAYOUT_UNDEFINED;
1231     attachments[0].finalLayout    = finalLayout;
1232     attachments[0].flags          = 0;
1233 
1234     if (include_depth)
1235     {
1236         attachments[1].format  = info.depth.format;
1237         attachments[1].samples = NUM_SAMPLES;
1238         attachments[1].loadOp  = clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
1239         attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
1240         attachments[1].stencilLoadOp  = VK_ATTACHMENT_LOAD_OP_LOAD;
1241         attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
1242         attachments[1].initialLayout  = VK_IMAGE_LAYOUT_UNDEFINED;
1243         attachments[1].finalLayout    = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1244         attachments[1].flags          = 0;
1245     }
1246 
1247     VkAttachmentReference color_reference = {};
1248     color_reference.attachment            = 0;
1249     color_reference.layout                = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1250 
1251     VkAttachmentReference depth_reference = {};
1252     depth_reference.attachment            = 1;
1253     depth_reference.layout                = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1254 
1255     VkSubpassDescription subpass    = {};
1256     subpass.pipelineBindPoint       = VK_PIPELINE_BIND_POINT_GRAPHICS;
1257     subpass.flags                   = 0;
1258     subpass.inputAttachmentCount    = 0;
1259     subpass.pInputAttachments       = NULL;
1260     subpass.colorAttachmentCount    = 1;
1261     subpass.pColorAttachments       = &color_reference;
1262     subpass.pResolveAttachments     = NULL;
1263     subpass.pDepthStencilAttachment = include_depth ? &depth_reference : NULL;
1264     subpass.preserveAttachmentCount = 0;
1265     subpass.pPreserveAttachments    = NULL;
1266 
1267     VkRenderPassCreateInfo rp_info = {};
1268     rp_info.sType                  = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
1269     rp_info.pNext                  = NULL;
1270     rp_info.attachmentCount        = include_depth ? 2 : 1;
1271     rp_info.pAttachments           = attachments;
1272     rp_info.subpassCount           = 1;
1273     rp_info.pSubpasses             = &subpass;
1274     rp_info.dependencyCount        = 0;
1275     rp_info.pDependencies          = NULL;
1276 
1277     res = vkCreateRenderPass(info.device, &rp_info, NULL, &info.render_pass);
1278     ASSERT(res == VK_SUCCESS);
1279 }
1280 
init_framebuffers(struct sample_info & info,bool include_depth)1281 void init_framebuffers(struct sample_info &info, bool include_depth)
1282 {
1283     /* DEPENDS on init_depth_buffer(), init_renderpass() and
1284      * init_swapchain_extension() */
1285 
1286     VkResult res;
1287     VkImageView attachments[2];
1288     attachments[1] = info.depth.view;
1289 
1290     VkFramebufferCreateInfo fb_info = {};
1291     fb_info.sType                   = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
1292     fb_info.pNext                   = NULL;
1293     fb_info.renderPass              = info.render_pass;
1294     fb_info.attachmentCount         = include_depth ? 2 : 1;
1295     fb_info.pAttachments            = attachments;
1296     fb_info.width                   = info.width;
1297     fb_info.height                  = info.height;
1298     fb_info.layers                  = 1;
1299 
1300     uint32_t i;
1301 
1302     info.framebuffers = (VkFramebuffer *)malloc(info.swapchainImageCount * sizeof(VkFramebuffer));
1303 
1304     for (i = 0; i < info.swapchainImageCount; i++)
1305     {
1306         attachments[0] = info.buffers[i].view;
1307         res            = vkCreateFramebuffer(info.device, &fb_info, NULL, &info.framebuffers[i]);
1308         ASSERT(res == VK_SUCCESS);
1309     }
1310 }
1311 
init_vertex_buffer(struct sample_info & info,const void * vertexData,uint32_t dataSize,uint32_t dataStride,bool use_texture)1312 void init_vertex_buffer(struct sample_info &info,
1313                         const void *vertexData,
1314                         uint32_t dataSize,
1315                         uint32_t dataStride,
1316                         bool use_texture)
1317 {
1318     VkResult res;
1319     bool pass;
1320 
1321     VkBufferCreateInfo buf_info    = {};
1322     buf_info.sType                 = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1323     buf_info.pNext                 = NULL;
1324     buf_info.usage                 = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1325     buf_info.size                  = dataSize;
1326     buf_info.queueFamilyIndexCount = 0;
1327     buf_info.pQueueFamilyIndices   = NULL;
1328     buf_info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
1329     buf_info.flags                 = 0;
1330     res = vkCreateBuffer(info.device, &buf_info, NULL, &info.vertex_buffer.buf);
1331     ASSERT(res == VK_SUCCESS);
1332 
1333     VkMemoryRequirements mem_reqs;
1334     vkGetBufferMemoryRequirements(info.device, info.vertex_buffer.buf, &mem_reqs);
1335 
1336     VkMemoryAllocateInfo alloc_info = {};
1337     alloc_info.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1338     alloc_info.pNext                = NULL;
1339     alloc_info.memoryTypeIndex      = 0;
1340 
1341     alloc_info.allocationSize = mem_reqs.size;
1342     pass                      = memory_type_from_properties(
1343         info, mem_reqs.memoryTypeBits,
1344         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
1345         &alloc_info.memoryTypeIndex);
1346     ASSERT(pass && "No mappable, coherent memory");
1347 
1348     res = vkAllocateMemory(info.device, &alloc_info, NULL, &(info.vertex_buffer.mem));
1349     ASSERT(res == VK_SUCCESS);
1350     info.vertex_buffer.buffer_info.range  = mem_reqs.size;
1351     info.vertex_buffer.buffer_info.offset = 0;
1352 
1353     uint8_t *pData;
1354     res = vkMapMemory(info.device, info.vertex_buffer.mem, 0, mem_reqs.size, 0, (void **)&pData);
1355     ASSERT(res == VK_SUCCESS);
1356 
1357     memcpy(pData, vertexData, dataSize);
1358 
1359     vkUnmapMemory(info.device, info.vertex_buffer.mem);
1360 
1361     res = vkBindBufferMemory(info.device, info.vertex_buffer.buf, info.vertex_buffer.mem, 0);
1362     ASSERT(res == VK_SUCCESS);
1363 
1364     info.vi_binding.binding   = 0;
1365     info.vi_binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1366     info.vi_binding.stride    = dataStride;
1367 
1368     info.vi_attribs[0].binding  = 0;
1369     info.vi_attribs[0].location = 0;
1370     info.vi_attribs[0].format   = VK_FORMAT_R32G32B32A32_SFLOAT;
1371     info.vi_attribs[0].offset   = 0;
1372     info.vi_attribs[1].binding  = 0;
1373     info.vi_attribs[1].location = 1;
1374     info.vi_attribs[1].format =
1375         use_texture ? VK_FORMAT_R32G32_SFLOAT : VK_FORMAT_R32G32B32A32_SFLOAT;
1376     info.vi_attribs[1].offset = 16;
1377 }
1378 
init_descriptor_pool(struct sample_info & info,bool use_texture)1379 void init_descriptor_pool(struct sample_info &info, bool use_texture)
1380 {
1381     /* DEPENDS on init_uniform_buffer() and
1382      * init_descriptor_and_pipeline_layouts() */
1383 
1384     VkResult res;
1385     VkDescriptorPoolSize type_count[2];
1386     type_count[0].type            = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1387     type_count[0].descriptorCount = 1;
1388     if (use_texture)
1389     {
1390         type_count[1].type            = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1391         type_count[1].descriptorCount = 1;
1392     }
1393 
1394     VkDescriptorPoolCreateInfo descriptor_pool = {};
1395     descriptor_pool.sType                      = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
1396     descriptor_pool.pNext                      = NULL;
1397     descriptor_pool.maxSets                    = 1;
1398     descriptor_pool.poolSizeCount              = use_texture ? 2 : 1;
1399     descriptor_pool.pPoolSizes                 = type_count;
1400 
1401     res = vkCreateDescriptorPool(info.device, &descriptor_pool, NULL, &info.desc_pool);
1402     ASSERT(res == VK_SUCCESS);
1403 }
1404 
init_descriptor_set(struct sample_info & info)1405 void init_descriptor_set(struct sample_info &info)
1406 {
1407     /* DEPENDS on init_descriptor_pool() */
1408 
1409     VkResult res;
1410 
1411     VkDescriptorSetAllocateInfo alloc_info[1];
1412     alloc_info[0].sType              = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
1413     alloc_info[0].pNext              = NULL;
1414     alloc_info[0].descriptorPool     = info.desc_pool;
1415     alloc_info[0].descriptorSetCount = NUM_DESCRIPTOR_SETS;
1416     alloc_info[0].pSetLayouts        = info.desc_layout.data();
1417 
1418     info.desc_set.resize(NUM_DESCRIPTOR_SETS);
1419     res = vkAllocateDescriptorSets(info.device, alloc_info, info.desc_set.data());
1420     ASSERT(res == VK_SUCCESS);
1421 
1422     VkWriteDescriptorSet writes[2];
1423 
1424     writes[0]                 = {};
1425     writes[0].sType           = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1426     writes[0].pNext           = NULL;
1427     writes[0].dstSet          = info.desc_set[0];
1428     writes[0].descriptorCount = 1;
1429     writes[0].descriptorType  = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1430     writes[0].pBufferInfo     = &info.uniform_data.buffer_info;
1431     writes[0].dstArrayElement = 0;
1432     writes[0].dstBinding      = 0;
1433 
1434     vkUpdateDescriptorSets(info.device, 1, writes, 0, NULL);
1435 }
1436 
GLSLtoSPV(const VkShaderStageFlagBits shader_type,const char * pshader,std::vector<unsigned int> & spirv)1437 bool GLSLtoSPV(const VkShaderStageFlagBits shader_type,
1438                const char *pshader,
1439                std::vector<unsigned int> &spirv)
1440 {
1441     EShLanguage stage = FindLanguage(shader_type);
1442     glslang::TShader shader(stage);
1443     glslang::TProgram program;
1444     const char *shaderStrings[1];
1445     TBuiltInResource Resources;
1446     init_resources(Resources);
1447 
1448     // Enable SPIR-V and Vulkan rules when parsing GLSL
1449     EShMessages messages = (EShMessages)(EShMsgSpvRules | EShMsgVulkanRules);
1450 
1451     shaderStrings[0] = pshader;
1452     shader.setStrings(shaderStrings, 1);
1453 
1454     if (!shader.parse(&Resources, 100, false, messages))
1455     {
1456         puts(shader.getInfoLog());
1457         puts(shader.getInfoDebugLog());
1458         return false;  // something didn't work
1459     }
1460 
1461     program.addShader(&shader);
1462 
1463     //
1464     // Program-level processing...
1465     //
1466 
1467     if (!program.link(messages))
1468     {
1469         puts(shader.getInfoLog());
1470         puts(shader.getInfoDebugLog());
1471         fflush(stdout);
1472         return false;
1473     }
1474 
1475     glslang::GlslangToSpv(*program.getIntermediate(stage), spirv);
1476     return true;
1477 }
1478 
init_shaders(struct sample_info & info,const char * vertShaderText,const char * fragShaderText)1479 void init_shaders(struct sample_info &info, const char *vertShaderText, const char *fragShaderText)
1480 {
1481     VkResult res;
1482     bool retVal;
1483 
1484     // If no shaders were submitted, just return
1485     if (!(vertShaderText || fragShaderText))
1486         return;
1487 
1488     glslang::InitializeProcess();
1489     VkShaderModuleCreateInfo moduleCreateInfo;
1490 
1491     if (vertShaderText)
1492     {
1493         std::vector<unsigned int> vtx_spv;
1494         info.shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1495         info.shaderStages[0].pNext = NULL;
1496         info.shaderStages[0].pSpecializationInfo = NULL;
1497         info.shaderStages[0].flags               = 0;
1498         info.shaderStages[0].stage               = VK_SHADER_STAGE_VERTEX_BIT;
1499         info.shaderStages[0].pName               = "main";
1500 
1501         retVal = GLSLtoSPV(VK_SHADER_STAGE_VERTEX_BIT, vertShaderText, vtx_spv);
1502         ASSERT(retVal);
1503 
1504         moduleCreateInfo.sType    = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1505         moduleCreateInfo.pNext    = NULL;
1506         moduleCreateInfo.flags    = 0;
1507         moduleCreateInfo.codeSize = vtx_spv.size() * sizeof(unsigned int);
1508         moduleCreateInfo.pCode    = vtx_spv.data();
1509         res                       = vkCreateShaderModule(info.device, &moduleCreateInfo, NULL,
1510                                    &info.shaderStages[0].module);
1511         ASSERT(res == VK_SUCCESS);
1512     }
1513 
1514     if (fragShaderText)
1515     {
1516         std::vector<unsigned int> frag_spv;
1517         info.shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1518         info.shaderStages[1].pNext = NULL;
1519         info.shaderStages[1].pSpecializationInfo = NULL;
1520         info.shaderStages[1].flags               = 0;
1521         info.shaderStages[1].stage               = VK_SHADER_STAGE_FRAGMENT_BIT;
1522         info.shaderStages[1].pName               = "main";
1523 
1524         retVal = GLSLtoSPV(VK_SHADER_STAGE_FRAGMENT_BIT, fragShaderText, frag_spv);
1525         ASSERT(retVal);
1526 
1527         moduleCreateInfo.sType    = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1528         moduleCreateInfo.pNext    = NULL;
1529         moduleCreateInfo.flags    = 0;
1530         moduleCreateInfo.codeSize = frag_spv.size() * sizeof(unsigned int);
1531         moduleCreateInfo.pCode    = frag_spv.data();
1532         res                       = vkCreateShaderModule(info.device, &moduleCreateInfo, NULL,
1533                                    &info.shaderStages[1].module);
1534         ASSERT(res == VK_SUCCESS);
1535     }
1536 
1537     glslang::FinalizeProcess();
1538 }
1539 
init_pipeline_cache(struct sample_info & info)1540 void init_pipeline_cache(struct sample_info &info)
1541 {
1542     VkResult res;
1543 
1544     VkPipelineCacheCreateInfo pipelineCache;
1545     pipelineCache.sType           = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1546     pipelineCache.pNext           = NULL;
1547     pipelineCache.initialDataSize = 0;
1548     pipelineCache.pInitialData    = NULL;
1549     pipelineCache.flags           = 0;
1550     res = vkCreatePipelineCache(info.device, &pipelineCache, NULL, &info.pipelineCache);
1551     ASSERT(res == VK_SUCCESS);
1552 }
1553 
init_pipeline(struct sample_info & info,VkBool32 include_depth,VkBool32 include_vi)1554 void init_pipeline(struct sample_info &info, VkBool32 include_depth, VkBool32 include_vi)
1555 {
1556     VkResult res;
1557 
1558     VkDynamicState dynamicStateEnables[VK_DYNAMIC_STATE_RANGE_SIZE];
1559     VkPipelineDynamicStateCreateInfo dynamicState = {};
1560     memset(dynamicStateEnables, 0, sizeof dynamicStateEnables);
1561     dynamicState.sType             = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
1562     dynamicState.pNext             = NULL;
1563     dynamicState.pDynamicStates    = dynamicStateEnables;
1564     dynamicState.dynamicStateCount = 0;
1565 
1566     VkPipelineVertexInputStateCreateInfo vi;
1567     memset(&vi, 0, sizeof(vi));
1568     vi.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1569     if (include_vi)
1570     {
1571         vi.pNext                           = NULL;
1572         vi.flags                           = 0;
1573         vi.vertexBindingDescriptionCount   = 1;
1574         vi.pVertexBindingDescriptions      = &info.vi_binding;
1575         vi.vertexAttributeDescriptionCount = 2;
1576         vi.pVertexAttributeDescriptions    = info.vi_attribs;
1577     }
1578     VkPipelineInputAssemblyStateCreateInfo ia;
1579     ia.sType                  = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1580     ia.pNext                  = NULL;
1581     ia.flags                  = 0;
1582     ia.primitiveRestartEnable = VK_FALSE;
1583     ia.topology               = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1584 
1585     VkPipelineRasterizationStateCreateInfo rs;
1586     rs.sType                   = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
1587     rs.pNext                   = NULL;
1588     rs.flags                   = 0;
1589     rs.polygonMode             = VK_POLYGON_MODE_FILL;
1590     rs.cullMode                = VK_CULL_MODE_BACK_BIT;
1591     rs.frontFace               = VK_FRONT_FACE_CLOCKWISE;
1592     rs.depthClampEnable        = VK_FALSE;
1593     rs.rasterizerDiscardEnable = VK_FALSE;
1594     rs.depthBiasEnable         = VK_FALSE;
1595     rs.depthBiasConstantFactor = 0;
1596     rs.depthBiasClamp          = 0;
1597     rs.depthBiasSlopeFactor    = 0;
1598     rs.lineWidth               = 1.0f;
1599 
1600     VkPipelineColorBlendStateCreateInfo cb;
1601     cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1602     cb.flags = 0;
1603     cb.pNext = NULL;
1604     VkPipelineColorBlendAttachmentState att_state[1];
1605     att_state[0].colorWriteMask      = 0xf;
1606     att_state[0].blendEnable         = VK_FALSE;
1607     att_state[0].alphaBlendOp        = VK_BLEND_OP_ADD;
1608     att_state[0].colorBlendOp        = VK_BLEND_OP_ADD;
1609     att_state[0].srcColorBlendFactor = VK_BLEND_FACTOR_ZERO;
1610     att_state[0].dstColorBlendFactor = VK_BLEND_FACTOR_ZERO;
1611     att_state[0].srcAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
1612     att_state[0].dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
1613     cb.attachmentCount               = 1;
1614     cb.pAttachments                  = att_state;
1615     cb.logicOpEnable                 = VK_FALSE;
1616     cb.logicOp                       = VK_LOGIC_OP_NO_OP;
1617     cb.blendConstants[0]             = 1.0f;
1618     cb.blendConstants[1]             = 1.0f;
1619     cb.blendConstants[2]             = 1.0f;
1620     cb.blendConstants[3]             = 1.0f;
1621 
1622     VkPipelineViewportStateCreateInfo vp = {};
1623     vp.sType                             = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
1624     vp.pNext                             = NULL;
1625     vp.flags                             = 0;
1626 #ifndef __ANDROID__
1627     vp.viewportCount                                      = NUM_VIEWPORTS;
1628     dynamicStateEnables[dynamicState.dynamicStateCount++] = VK_DYNAMIC_STATE_VIEWPORT;
1629     vp.scissorCount                                       = NUM_SCISSORS;
1630     dynamicStateEnables[dynamicState.dynamicStateCount++] = VK_DYNAMIC_STATE_SCISSOR;
1631     vp.pScissors                                          = NULL;
1632     vp.pViewports                                         = NULL;
1633 #else
1634     // Temporary disabling dynamic viewport on Android because some of drivers doesn't
1635     // support the feature.
1636     VkViewport viewports;
1637     viewports.minDepth = 0.0f;
1638     viewports.maxDepth = 1.0f;
1639     viewports.x = 0;
1640     viewports.y = 0;
1641     viewports.width = info.width;
1642     viewports.height = info.height;
1643     VkRect2D scissor;
1644     scissor.extent.width = info.width;
1645     scissor.extent.height = info.height;
1646     scissor.offset.x = 0;
1647     scissor.offset.y = 0;
1648     vp.viewportCount = NUM_VIEWPORTS;
1649     vp.scissorCount = NUM_SCISSORS;
1650     vp.pScissors = &scissor;
1651     vp.pViewports = &viewports;
1652 #endif
1653     VkPipelineDepthStencilStateCreateInfo ds;
1654     ds.sType                 = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
1655     ds.pNext                 = NULL;
1656     ds.flags                 = 0;
1657     ds.depthTestEnable       = include_depth;
1658     ds.depthWriteEnable      = include_depth;
1659     ds.depthCompareOp        = VK_COMPARE_OP_LESS_OR_EQUAL;
1660     ds.depthBoundsTestEnable = VK_FALSE;
1661     ds.stencilTestEnable     = VK_FALSE;
1662     ds.back.failOp           = VK_STENCIL_OP_KEEP;
1663     ds.back.passOp           = VK_STENCIL_OP_KEEP;
1664     ds.back.compareOp        = VK_COMPARE_OP_ALWAYS;
1665     ds.back.compareMask      = 0;
1666     ds.back.reference        = 0;
1667     ds.back.depthFailOp      = VK_STENCIL_OP_KEEP;
1668     ds.back.writeMask        = 0;
1669     ds.minDepthBounds        = 0;
1670     ds.maxDepthBounds        = 0;
1671     ds.stencilTestEnable     = VK_FALSE;
1672     ds.front                 = ds.back;
1673 
1674     VkPipelineMultisampleStateCreateInfo ms;
1675     ms.sType                 = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1676     ms.pNext                 = NULL;
1677     ms.flags                 = 0;
1678     ms.pSampleMask           = NULL;
1679     ms.rasterizationSamples  = NUM_SAMPLES;
1680     ms.sampleShadingEnable   = VK_FALSE;
1681     ms.alphaToCoverageEnable = VK_FALSE;
1682     ms.alphaToOneEnable      = VK_FALSE;
1683     ms.minSampleShading      = 0.0;
1684 
1685     VkGraphicsPipelineCreateInfo pipeline;
1686     pipeline.sType               = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
1687     pipeline.pNext               = NULL;
1688     pipeline.layout              = info.pipeline_layout;
1689     pipeline.basePipelineHandle  = VK_NULL_HANDLE;
1690     pipeline.basePipelineIndex   = 0;
1691     pipeline.flags               = 0;
1692     pipeline.pVertexInputState   = &vi;
1693     pipeline.pInputAssemblyState = &ia;
1694     pipeline.pRasterizationState = &rs;
1695     pipeline.pColorBlendState    = &cb;
1696     pipeline.pTessellationState  = NULL;
1697     pipeline.pMultisampleState   = &ms;
1698     pipeline.pDynamicState       = &dynamicState;
1699     pipeline.pViewportState      = &vp;
1700     pipeline.pDepthStencilState  = &ds;
1701     pipeline.pStages             = info.shaderStages;
1702     pipeline.stageCount          = 2;
1703     pipeline.renderPass          = info.render_pass;
1704     pipeline.subpass             = 0;
1705 
1706     res = vkCreateGraphicsPipelines(info.device, info.pipelineCache, 1, &pipeline, NULL,
1707                                     &info.pipeline);
1708     ASSERT(res == VK_SUCCESS);
1709 }
1710 
init_viewports(struct sample_info & info)1711 void init_viewports(struct sample_info &info)
1712 {
1713 #ifdef __ANDROID__
1714 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic viewport
1715 // feature.
1716 #else
1717     info.viewport.height = (float)info.height;
1718     info.viewport.width = (float)info.width;
1719     info.viewport.minDepth = (float)0.0f;
1720     info.viewport.maxDepth = (float)1.0f;
1721     info.viewport.x = 0;
1722     info.viewport.y = 0;
1723     vkCmdSetViewport(info.cmd, 0, NUM_VIEWPORTS, &info.viewport);
1724 #endif
1725 }
1726 
init_viewports_array(struct sample_info & info,int index)1727 void init_viewports_array(struct sample_info &info, int index)
1728 {
1729 #ifdef __ANDROID__
1730 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic viewport
1731 // feature.
1732 #else
1733     info.viewport.height = (float)info.height;
1734     info.viewport.width = (float)info.width;
1735     info.viewport.minDepth = (float)0.0f;
1736     info.viewport.maxDepth = (float)1.0f;
1737     info.viewport.x = 0;
1738     info.viewport.y = 0;
1739     vkCmdSetViewport(info.cmds[index], 0, NUM_VIEWPORTS, &info.viewport);
1740 #endif
1741 }
1742 
init_viewports2_array(struct sample_info & info,int index)1743 void init_viewports2_array(struct sample_info &info, int index)
1744 {
1745 #ifdef __ANDROID__
1746 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic viewport
1747 // feature.
1748 #else
1749     info.viewport.height = (float)info.height;
1750     info.viewport.width = (float)info.width;
1751     info.viewport.minDepth = (float)0.0f;
1752     info.viewport.maxDepth = (float)1.0f;
1753     info.viewport.x = 0;
1754     info.viewport.y = 0;
1755     vkCmdSetViewport(info.cmd2s[index], 0, NUM_VIEWPORTS, &info.viewport);
1756 #endif
1757 }
1758 
init_scissors(struct sample_info & info)1759 void init_scissors(struct sample_info &info)
1760 {
1761 #ifdef __ANDROID__
1762 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic scissors
1763 // feature.
1764 #else
1765     info.scissor.extent.width = info.width;
1766     info.scissor.extent.height = info.height;
1767     info.scissor.offset.x = 0;
1768     info.scissor.offset.y = 0;
1769     vkCmdSetScissor(info.cmd, 0, NUM_SCISSORS, &info.scissor);
1770 #endif
1771 }
1772 
init_scissors_array(struct sample_info & info,int index)1773 void init_scissors_array(struct sample_info &info, int index)
1774 {
1775 #ifdef __ANDROID__
1776 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic scissors
1777 // feature.
1778 #else
1779     info.scissor.extent.width = info.width;
1780     info.scissor.extent.height = info.height;
1781     info.scissor.offset.x = 0;
1782     info.scissor.offset.y = 0;
1783     vkCmdSetScissor(info.cmds[index], 0, NUM_SCISSORS, &info.scissor);
1784 #endif
1785 }
1786 
init_scissors2_array(struct sample_info & info,int index)1787 void init_scissors2_array(struct sample_info &info, int index)
1788 {
1789 #ifdef __ANDROID__
1790 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic scissors
1791 // feature.
1792 #else
1793     info.scissor.extent.width = info.width;
1794     info.scissor.extent.height = info.height;
1795     info.scissor.offset.x = 0;
1796     info.scissor.offset.y = 0;
1797     vkCmdSetScissor(info.cmd2s[index], 0, NUM_SCISSORS, &info.scissor);
1798 #endif
1799 }
1800 
destroy_pipeline(struct sample_info & info)1801 void destroy_pipeline(struct sample_info &info)
1802 {
1803     vkDestroyPipeline(info.device, info.pipeline, NULL);
1804 }
1805 
destroy_pipeline_cache(struct sample_info & info)1806 void destroy_pipeline_cache(struct sample_info &info)
1807 {
1808     vkDestroyPipelineCache(info.device, info.pipelineCache, NULL);
1809 }
1810 
destroy_uniform_buffer(struct sample_info & info)1811 void destroy_uniform_buffer(struct sample_info &info)
1812 {
1813     vkDestroyBuffer(info.device, info.uniform_data.buf, NULL);
1814     vkFreeMemory(info.device, info.uniform_data.mem, NULL);
1815 }
1816 
destroy_descriptor_and_pipeline_layouts(struct sample_info & info)1817 void destroy_descriptor_and_pipeline_layouts(struct sample_info &info)
1818 {
1819     for (int i = 0; i < NUM_DESCRIPTOR_SETS; i++)
1820         vkDestroyDescriptorSetLayout(info.device, info.desc_layout[i], NULL);
1821     vkDestroyPipelineLayout(info.device, info.pipeline_layout, NULL);
1822 }
1823 
destroy_descriptor_pool(struct sample_info & info)1824 void destroy_descriptor_pool(struct sample_info &info)
1825 {
1826     vkDestroyDescriptorPool(info.device, info.desc_pool, NULL);
1827 }
1828 
destroy_shaders(struct sample_info & info)1829 void destroy_shaders(struct sample_info &info)
1830 {
1831     vkDestroyShaderModule(info.device, info.shaderStages[0].module, NULL);
1832     vkDestroyShaderModule(info.device, info.shaderStages[1].module, NULL);
1833 }
1834 
destroy_command_buffer(struct sample_info & info)1835 void destroy_command_buffer(struct sample_info &info)
1836 {
1837     VkCommandBuffer cmd_bufs[1] = {info.cmd};
1838     vkFreeCommandBuffers(info.device, info.cmd_pool, 1, cmd_bufs);
1839 }
1840 
destroy_command_buffer_array(struct sample_info & info,int numBuffers)1841 void destroy_command_buffer_array(struct sample_info &info, int numBuffers)
1842 {
1843     vkFreeCommandBuffers(info.device, info.cmd_pool, numBuffers, info.cmds.data());
1844 }
1845 
reset_command_buffer2_array(struct sample_info & info,VkCommandBufferResetFlags cmd_buffer_reset_flags)1846 void reset_command_buffer2_array(struct sample_info &info,
1847                                  VkCommandBufferResetFlags cmd_buffer_reset_flags)
1848 {
1849     for (auto cb : info.cmd2s)
1850     {
1851         vkResetCommandBuffer(cb, cmd_buffer_reset_flags);
1852     }
1853 }
1854 
destroy_command_buffer2_array(struct sample_info & info,int numBuffers)1855 void destroy_command_buffer2_array(struct sample_info &info, int numBuffers)
1856 {
1857     vkFreeCommandBuffers(info.device, info.cmd_pool, numBuffers, info.cmd2s.data());
1858 }
1859 
reset_command_pool(struct sample_info & info,VkCommandPoolResetFlags cmd_pool_reset_flags)1860 void reset_command_pool(struct sample_info &info, VkCommandPoolResetFlags cmd_pool_reset_flags)
1861 {
1862     vkResetCommandPool(info.device, info.cmd_pool, cmd_pool_reset_flags);
1863 }
1864 
destroy_command_pool(struct sample_info & info)1865 void destroy_command_pool(struct sample_info &info)
1866 {
1867     vkDestroyCommandPool(info.device, info.cmd_pool, NULL);
1868 }
1869 
destroy_depth_buffer(struct sample_info & info)1870 void destroy_depth_buffer(struct sample_info &info)
1871 {
1872     vkDestroyImageView(info.device, info.depth.view, NULL);
1873     vkDestroyImage(info.device, info.depth.image, NULL);
1874     vkFreeMemory(info.device, info.depth.mem, NULL);
1875 }
1876 
destroy_vertex_buffer(struct sample_info & info)1877 void destroy_vertex_buffer(struct sample_info &info)
1878 {
1879     vkDestroyBuffer(info.device, info.vertex_buffer.buf, NULL);
1880     vkFreeMemory(info.device, info.vertex_buffer.mem, NULL);
1881 }
1882 
destroy_swap_chain(struct sample_info & info)1883 void destroy_swap_chain(struct sample_info &info)
1884 {
1885     for (uint32_t i = 0; i < info.swapchainImageCount; i++)
1886     {
1887         vkDestroyImageView(info.device, info.buffers[i].view, NULL);
1888     }
1889     vkDestroySwapchainKHR(info.device, info.swap_chain, NULL);
1890 }
1891 
destroy_framebuffers(struct sample_info & info)1892 void destroy_framebuffers(struct sample_info &info)
1893 {
1894     for (uint32_t i = 0; i < info.swapchainImageCount; i++)
1895     {
1896         vkDestroyFramebuffer(info.device, info.framebuffers[i], NULL);
1897     }
1898     free(info.framebuffers);
1899 }
1900 
destroy_renderpass(struct sample_info & info)1901 void destroy_renderpass(struct sample_info &info)
1902 {
1903     vkDestroyRenderPass(info.device, info.render_pass, NULL);
1904 }
1905 
destroy_device(struct sample_info & info)1906 void destroy_device(struct sample_info &info)
1907 {
1908     vkDeviceWaitIdle(info.device);
1909     vkDestroyDevice(info.device, NULL);
1910 }
1911 
destroy_instance(struct sample_info & info)1912 void destroy_instance(struct sample_info &info)
1913 {
1914     vkDestroyInstance(info.inst, NULL);
1915 }
1916