• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Vulkan Samples
3  *
4  * Copyright (C) 2015-2016 Valve Corporation
5  * Copyright (C) 2015-2016 LunarG, Inc.
6  * Copyright (C) 2015-2018 Google, Inc.
7  *
8  * Licensed under the Apache License, Version 2.0 (the "License");
9  * you may not use this file except in compliance with the License.
10  * You may obtain a copy of the License at
11  *
12  *     http://www.apache.org/licenses/LICENSE-2.0
13  *
14  * Unless required by applicable law or agreed to in writing, software
15  * distributed under the License is distributed on an "AS IS" BASIS,
16  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17  * See the License for the specific language governing permissions and
18  * limitations under the License.
19  */
20 
21 /*
22 VULKAN_SAMPLE_DESCRIPTION
23 samples utility functions
24 */
25 
26 #include "vulkan_command_buffer_utils.h"
27 
28 #include <assert.h>
29 #include <string.h>
30 #include <cstdlib>
31 #include <iterator>
32 
33 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
34 #    include <linux/input.h>
35 #endif
36 
37 using namespace std;
38 
39 /*
40  * TODO: function description here
41  */
init_global_extension_properties(layer_properties & layer_props)42 VkResult init_global_extension_properties(layer_properties &layer_props)
43 {
44     VkExtensionProperties *instance_extensions;
45     uint32_t instance_extension_count;
46     VkResult res;
47     char *layer_name = NULL;
48 
49     layer_name = layer_props.properties.layerName;
50 
51     do
52     {
53         res = vkEnumerateInstanceExtensionProperties(layer_name, &instance_extension_count, NULL);
54         if (res)
55             return res;
56 
57         if (instance_extension_count == 0)
58         {
59             return VK_SUCCESS;
60         }
61 
62         layer_props.instance_extensions.resize(instance_extension_count);
63         instance_extensions = layer_props.instance_extensions.data();
64         res = vkEnumerateInstanceExtensionProperties(layer_name, &instance_extension_count,
65                                                      instance_extensions);
66     } while (res == VK_INCOMPLETE);
67 
68     return res;
69 }
70 
71 /*
72  * TODO: function description here
73  */
init_global_layer_properties(struct sample_info & info)74 VkResult init_global_layer_properties(struct sample_info &info)
75 {
76     uint32_t instance_layer_count;
77     VkLayerProperties *vk_props = NULL;
78     VkResult res;
79 
80     /*
81      * It's possible, though very rare, that the number of
82      * instance layers could change. For example, installing something
83      * could include new layers that the loader would pick up
84      * between the initial query for the count and the
85      * request for VkLayerProperties. The loader indicates that
86      * by returning a VK_INCOMPLETE status and will update the
87      * the count parameter.
88      * The count parameter will be updated with the number of
89      * entries loaded into the data pointer - in case the number
90      * of layers went down or is smaller than the size given.
91      */
92     do
93     {
94         res = vkEnumerateInstanceLayerProperties(&instance_layer_count, NULL);
95         if (res)
96             return res;
97 
98         if (instance_layer_count == 0)
99         {
100             return VK_SUCCESS;
101         }
102 
103         vk_props = (VkLayerProperties *)realloc(vk_props,
104                                                 instance_layer_count * sizeof(VkLayerProperties));
105 
106         res = vkEnumerateInstanceLayerProperties(&instance_layer_count, vk_props);
107     } while (res == VK_INCOMPLETE);
108 
109     /*
110      * Now gather the extension list for each instance layer.
111      */
112     for (uint32_t i = 0; i < instance_layer_count; i++)
113     {
114         layer_properties layer_props;
115         layer_props.properties = vk_props[i];
116         res                    = init_global_extension_properties(layer_props);
117         if (res)
118             return res;
119         info.instance_layer_properties.push_back(layer_props);
120     }
121     free(vk_props);
122 
123     return res;
124 }
125 
init_device_extension_properties(struct sample_info & info,layer_properties & layer_props)126 VkResult init_device_extension_properties(struct sample_info &info, layer_properties &layer_props)
127 {
128     VkExtensionProperties *device_extensions;
129     uint32_t device_extension_count;
130     VkResult res;
131     char *layer_name = NULL;
132 
133     layer_name = layer_props.properties.layerName;
134 
135     do
136     {
137         res = vkEnumerateDeviceExtensionProperties(info.gpus[0], layer_name,
138                                                    &device_extension_count, NULL);
139         if (res)
140             return res;
141 
142         if (device_extension_count == 0)
143         {
144             return VK_SUCCESS;
145         }
146 
147         layer_props.device_extensions.resize(device_extension_count);
148         device_extensions = layer_props.device_extensions.data();
149         res               = vkEnumerateDeviceExtensionProperties(info.gpus[0], layer_name,
150                                                    &device_extension_count, device_extensions);
151     } while (res == VK_INCOMPLETE);
152 
153     return res;
154 }
155 
156 /*
157  * Return 1 (true) if all layer names specified in check_names
158  * can be found in given layer properties.
159  */
demo_check_layers(const std::vector<layer_properties> & layer_props,const std::vector<const char * > & layer_names)160 VkBool32 demo_check_layers(const std::vector<layer_properties> &layer_props,
161                            const std::vector<const char *> &layer_names)
162 {
163     uint32_t check_count = layer_names.size();
164     uint32_t layer_count = layer_props.size();
165     for (uint32_t i = 0; i < check_count; i++)
166     {
167         VkBool32 found = 0;
168         for (uint32_t j = 0; j < layer_count; j++)
169         {
170             if (!strcmp(layer_names[i], layer_props[j].properties.layerName))
171             {
172                 found = 1;
173             }
174         }
175         if (!found)
176         {
177             std::cout << "Cannot find layer: " << layer_names[i] << std::endl;
178             return 0;
179         }
180     }
181     return 1;
182 }
183 
init_instance_extension_names(struct sample_info & info)184 void init_instance_extension_names(struct sample_info &info)
185 {
186     info.instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
187 #ifdef __ANDROID__
188     info.instance_extension_names.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
189 #elif defined(_WIN32)
190     info.instance_extension_names.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
191 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
192     info.instance_extension_names.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
193 #else
194     info.instance_extension_names.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
195 #endif
196 #ifndef __ANDROID__
197     info.instance_layer_names.push_back("VK_LAYER_LUNARG_standard_validation");
198     if (!demo_check_layers(info.instance_layer_properties, info.instance_layer_names))
199     {
200         // If standard validation is not present, search instead for the
201         // individual layers that make it up, in the correct order.
202         //
203 
204         info.instance_layer_names.clear();
205         info.instance_layer_names.push_back("VK_LAYER_GOOGLE_threading");
206         info.instance_layer_names.push_back("VK_LAYER_LUNARG_parameter_validation");
207         info.instance_layer_names.push_back("VK_LAYER_LUNARG_object_tracker");
208         info.instance_layer_names.push_back("VK_LAYER_LUNARG_core_validation");
209         info.instance_layer_names.push_back("VK_LAYER_LUNARG_image");
210         info.instance_layer_names.push_back("VK_LAYER_LUNARG_swapchain");
211         info.instance_layer_names.push_back("VK_LAYER_GOOGLE_unique_objects");
212 
213         if (!demo_check_layers(info.instance_layer_properties, info.instance_layer_names))
214         {
215             exit(1);
216         }
217     }
218 
219     // Enable debug callback extension
220     info.instance_extension_names.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
221 #endif
222 }
223 
init_instance(struct sample_info & info,char const * const app_short_name)224 VkResult init_instance(struct sample_info &info, char const *const app_short_name)
225 {
226     VkResult res = VK_SUCCESS;
227 #if ANGLE_SHARED_LIBVULKAN
228     res = volkInitialize();
229     ASSERT(res == VK_SUCCESS);
230 #endif  // ANGLE_SHARED_LIBVULKAN
231     VkApplicationInfo app_info  = {};
232     app_info.sType              = VK_STRUCTURE_TYPE_APPLICATION_INFO;
233     app_info.pNext              = NULL;
234     app_info.pApplicationName   = app_short_name;
235     app_info.applicationVersion = 1;
236     app_info.pEngineName        = app_short_name;
237     app_info.engineVersion      = 1;
238     app_info.apiVersion         = VK_API_VERSION_1_0;
239 
240     VkInstanceCreateInfo inst_info = {};
241     inst_info.sType                = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
242     inst_info.pNext                = NULL;
243     inst_info.flags                = 0;
244     inst_info.pApplicationInfo     = &app_info;
245     inst_info.enabledLayerCount    = info.instance_layer_names.size();
246     inst_info.ppEnabledLayerNames =
247         info.instance_layer_names.size() ? info.instance_layer_names.data() : NULL;
248     inst_info.enabledExtensionCount   = info.instance_extension_names.size();
249     inst_info.ppEnabledExtensionNames = info.instance_extension_names.data();
250 
251     res = vkCreateInstance(&inst_info, NULL, &info.inst);
252     ASSERT(res == VK_SUCCESS);
253 #if ANGLE_SHARED_LIBVULKAN
254     volkLoadInstance(info.inst);
255 #endif  // ANGLE_SHARED_LIBVULKAN
256 
257     return res;
258 }
259 
init_device_extension_names(struct sample_info & info)260 void init_device_extension_names(struct sample_info &info)
261 {
262     info.device_extension_names.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
263 }
264 
init_enumerate_device(struct sample_info & info,uint32_t gpu_count)265 VkResult init_enumerate_device(struct sample_info &info, uint32_t gpu_count)
266 {
267     VkResult res = vkEnumeratePhysicalDevices(info.inst, &gpu_count, NULL);
268     ASSERT(gpu_count);
269     info.gpus.resize(gpu_count);
270 
271     res = vkEnumeratePhysicalDevices(info.inst, &gpu_count, info.gpus.data());
272     ASSERT(!res);
273 
274     vkGetPhysicalDeviceQueueFamilyProperties(info.gpus[0], &info.queue_family_count, NULL);
275     ASSERT(info.queue_family_count >= 1);
276 
277     info.queue_props.resize(info.queue_family_count);
278     vkGetPhysicalDeviceQueueFamilyProperties(info.gpus[0], &info.queue_family_count,
279                                              info.queue_props.data());
280     ASSERT(info.queue_family_count >= 1);
281 
282     /* This is as good a place as any to do this */
283     vkGetPhysicalDeviceMemoryProperties(info.gpus[0], &info.memory_properties);
284     vkGetPhysicalDeviceProperties(info.gpus[0], &info.gpu_props);
285     /* query device extensions for enabled layers */
286     for (auto &layer_props : info.instance_layer_properties)
287     {
288         init_device_extension_properties(info, layer_props);
289     }
290 
291     return res;
292 }
293 
294 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
295 
handle_ping(void * data,wl_shell_surface * shell_surface,uint32_t serial)296 static void handle_ping(void *data, wl_shell_surface *shell_surface, uint32_t serial)
297 {
298     wl_shell_surface_pong(shell_surface, serial);
299 }
300 
handle_configure(void * data,wl_shell_surface * shell_surface,uint32_t edges,int32_t width,int32_t height)301 static void handle_configure(void *data,
302                              wl_shell_surface *shell_surface,
303                              uint32_t edges,
304                              int32_t width,
305                              int32_t height)
306 {}
307 
handle_popup_done(void * data,wl_shell_surface * shell_surface)308 static void handle_popup_done(void *data, wl_shell_surface *shell_surface) {}
309 
310 static const wl_shell_surface_listener shell_surface_listener = {handle_ping, handle_configure,
311                                                                  handle_popup_done};
312 
registry_handle_global(void * data,wl_registry * registry,uint32_t id,const char * interface,uint32_t version)313 static void registry_handle_global(void *data,
314                                    wl_registry *registry,
315                                    uint32_t id,
316                                    const char *interface,
317                                    uint32_t version)
318 {
319     sample_info *info = (sample_info *)data;
320     // pickup wayland objects when they appear
321     if (strcmp(interface, "wl_compositor") == 0)
322     {
323         info->compositor =
324             (wl_compositor *)wl_registry_bind(registry, id, &wl_compositor_interface, 1);
325     }
326     else if (strcmp(interface, "wl_shell") == 0)
327     {
328         info->shell = (wl_shell *)wl_registry_bind(registry, id, &wl_shell_interface, 1);
329     }
330 }
331 
registry_handle_global_remove(void * data,wl_registry * registry,uint32_t name)332 static void registry_handle_global_remove(void *data, wl_registry *registry, uint32_t name) {}
333 
334 static const wl_registry_listener registry_listener = {registry_handle_global,
335                                                        registry_handle_global_remove};
336 
337 #endif
338 
init_connection(struct sample_info & info)339 void init_connection(struct sample_info &info)
340 {
341 #if defined(VK_USE_PLATFORM_XCB_KHR)
342     const xcb_setup_t *setup;
343     xcb_screen_iterator_t iter;
344     int scr;
345 
346     info.connection = xcb_connect(NULL, &scr);
347     if (info.connection == NULL || xcb_connection_has_error(info.connection))
348     {
349         std::cout << "Unable to make an XCB connection\n";
350         exit(-1);
351     }
352 
353     setup = xcb_get_setup(info.connection);
354     iter  = xcb_setup_roots_iterator(setup);
355     while (scr-- > 0)
356         xcb_screen_next(&iter);
357 
358     info.screen = iter.data;
359 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
360     info.display = wl_display_connect(nullptr);
361 
362     if (info.display == nullptr)
363     {
364         printf(
365             "Cannot find a compatible Vulkan installable client driver "
366             "(ICD).\nExiting ...\n");
367         fflush(stdout);
368         exit(1);
369     }
370 
371     info.registry = wl_display_get_registry(info.display);
372     wl_registry_add_listener(info.registry, &registry_listener, &info);
373     wl_display_dispatch(info.display);
374 #endif
375 }
376 #ifdef _WIN32
run(struct sample_info * info)377 static void run(struct sample_info *info)
378 { /* Placeholder for samples that want to show dynamic content */
379 }
380 
381 // MS-Windows event handling function:
WndProc(HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)382 LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
383 {
384     struct sample_info *info =
385         reinterpret_cast<struct sample_info *>(GetWindowLongPtr(hWnd, GWLP_USERDATA));
386 
387     switch (uMsg)
388     {
389         case WM_CLOSE:
390             PostQuitMessage(0);
391             break;
392         case WM_PAINT:
393             run(info);
394             return 0;
395         default:
396             break;
397     }
398     return (DefWindowProc(hWnd, uMsg, wParam, lParam));
399 }
400 
init_window(struct sample_info & info)401 void init_window(struct sample_info &info)
402 {
403     WNDCLASSEXA win_class;
404     ASSERT(info.width > 0);
405     ASSERT(info.height > 0);
406 
407     info.connection = GetModuleHandle(NULL);
408     sprintf(info.name, "Sample");
409 
410     // Initialize the window class structure:
411     win_class.cbSize        = sizeof(WNDCLASSEX);
412     win_class.style         = CS_HREDRAW | CS_VREDRAW;
413     win_class.lpfnWndProc   = WndProc;
414     win_class.cbClsExtra    = 0;
415     win_class.cbWndExtra    = 0;
416     win_class.hInstance     = info.connection;  // hInstance
417     win_class.hIcon         = LoadIcon(NULL, IDI_APPLICATION);
418     win_class.hCursor       = LoadCursor(NULL, IDC_ARROW);
419     win_class.hbrBackground = (HBRUSH)GetStockObject(WHITE_BRUSH);
420     win_class.lpszMenuName  = NULL;
421     win_class.lpszClassName = info.name;
422     win_class.hIconSm       = LoadIcon(NULL, IDI_WINLOGO);
423     // Register window class:
424     if (!RegisterClassExA(&win_class))
425     {
426         // It didn't work, so try to give a useful error:
427         printf("Unexpected error trying to start the application!\n");
428         fflush(stdout);
429         exit(1);
430     }
431     // Create window with the registered class:
432     RECT wr = {0, 0, info.width, info.height};
433     AdjustWindowRect(&wr, WS_OVERLAPPEDWINDOW, FALSE);
434     info.window = CreateWindowExA(0,
435                                   info.name,             // class name
436                                   info.name,             // app name
437                                   WS_OVERLAPPEDWINDOW |  // window style
438                                       WS_VISIBLE | WS_SYSMENU,
439                                   100, 100,            // x/y coords
440                                   wr.right - wr.left,  // width
441                                   wr.bottom - wr.top,  // height
442                                   NULL,                // handle to parent
443                                   NULL,                // handle to menu
444                                   info.connection,     // hInstance
445                                   NULL);               // no extra parameters
446     if (!info.window)
447     {
448         // It didn't work, so try to give a useful error:
449         printf("Cannot create a window in which to draw!\n");
450         fflush(stdout);
451         exit(1);
452     }
453     SetWindowLongPtr(info.window, GWLP_USERDATA, (LONG_PTR)&info);
454 }
455 
destroy_window(struct sample_info & info)456 void destroy_window(struct sample_info &info)
457 {
458     vkDestroySurfaceKHR(info.inst, info.surface, NULL);
459     DestroyWindow(info.window);
460     UnregisterClassA(info.name, GetModuleHandle(NULL));
461 }
462 
463 #elif defined(__ANDROID__)
464 // Android implementation.
init_window(struct sample_info & info)465 void init_window(struct sample_info &info) {}
466 
destroy_window(struct sample_info & info)467 void destroy_window(struct sample_info &info)
468 {
469     vkDestroySurfaceKHR(info.inst, info.surface, NULL);
470 }
471 
472 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
473 
init_window(struct sample_info & info)474 void init_window(struct sample_info &info)
475 {
476     ASSERT(info.width > 0);
477     ASSERT(info.height > 0);
478 
479     info.window = wl_compositor_create_surface(info.compositor);
480     if (!info.window)
481     {
482         printf("Can not create wayland_surface from compositor!\n");
483         fflush(stdout);
484         exit(1);
485     }
486 
487     info.shell_surface = wl_shell_get_shell_surface(info.shell, info.window);
488     if (!info.shell_surface)
489     {
490         printf("Can not get shell_surface from wayland_surface!\n");
491         fflush(stdout);
492         exit(1);
493     }
494 
495     wl_shell_surface_add_listener(info.shell_surface, &shell_surface_listener, &info);
496     wl_shell_surface_set_toplevel(info.shell_surface);
497 }
498 
destroy_window(struct sample_info & info)499 void destroy_window(struct sample_info &info)
500 {
501     wl_shell_surface_destroy(info.shell_surface);
502     wl_surface_destroy(info.window);
503     wl_shell_destroy(info.shell);
504     wl_compositor_destroy(info.compositor);
505     wl_registry_destroy(info.registry);
506     wl_display_disconnect(info.display);
507 }
508 
509 #else
510 
init_window(struct sample_info & info)511 void init_window(struct sample_info &info)
512 {
513     ASSERT(info.width > 0);
514     ASSERT(info.height > 0);
515 
516     uint32_t value_mask, value_list[32];
517 
518     info.window = xcb_generate_id(info.connection);
519 
520     value_mask = XCB_CW_BACK_PIXEL | XCB_CW_EVENT_MASK;
521     value_list[0] = info.screen->black_pixel;
522     value_list[1] = XCB_EVENT_MASK_KEY_RELEASE | XCB_EVENT_MASK_EXPOSURE;
523 
524     xcb_create_window(info.connection, XCB_COPY_FROM_PARENT, info.window, info.screen->root, 0, 0,
525                       info.width, info.height, 0, XCB_WINDOW_CLASS_INPUT_OUTPUT,
526                       info.screen->root_visual, value_mask, value_list);
527 
528     /* Magic code that will send notification when window is destroyed */
529     xcb_intern_atom_cookie_t cookie = xcb_intern_atom(info.connection, 1, 12, "WM_PROTOCOLS");
530     xcb_intern_atom_reply_t *reply = xcb_intern_atom_reply(info.connection, cookie, 0);
531 
532     xcb_intern_atom_cookie_t cookie2 = xcb_intern_atom(info.connection, 0, 16, "WM_DELETE_WINDOW");
533     info.atom_wm_delete_window = xcb_intern_atom_reply(info.connection, cookie2, 0);
534 
535     xcb_change_property(info.connection, XCB_PROP_MODE_REPLACE, info.window, (*reply).atom, 4, 32,
536                         1, &(*info.atom_wm_delete_window).atom);
537     free(reply);
538 
539     xcb_map_window(info.connection, info.window);
540 
541     // Force the x/y coordinates to 100,100 results are identical in consecutive
542     // runs
543     const uint32_t coords[] = {100, 100};
544     xcb_configure_window(info.connection, info.window, XCB_CONFIG_WINDOW_X | XCB_CONFIG_WINDOW_Y,
545                          coords);
546     xcb_flush(info.connection);
547 
548     xcb_generic_event_t *e;
549     while ((e = xcb_wait_for_event(info.connection)))
550     {
551         if ((e->response_type & ~0x80) == XCB_EXPOSE)
552             break;
553     }
554 }
555 
destroy_window(struct sample_info & info)556 void destroy_window(struct sample_info &info)
557 {
558     vkDestroySurfaceKHR(info.inst, info.surface, NULL);
559     xcb_destroy_window(info.connection, info.window);
560     xcb_disconnect(info.connection);
561 }
562 
563 #endif  // _WIN32
564 
init_window_size(struct sample_info & info,int32_t default_width,int32_t default_height)565 void init_window_size(struct sample_info &info, int32_t default_width, int32_t default_height)
566 {
567 #ifdef __ANDROID__
568     info.mOSWindow = OSWindow::New();
569     ASSERT(info.mOSWindow != nullptr);
570     info.mOSWindow->initialize("VulkanTest", default_width, default_height);
571 #endif
572     info.width  = default_width;
573     info.height = default_height;
574 }
575 
init_swapchain_extension(struct sample_info & info)576 void init_swapchain_extension(struct sample_info &info)
577 {
578     /* DEPENDS on init_connection() and init_window() */
579 
580     VkResult res;
581 
582 // Construct the surface description:
583 #ifdef _WIN32
584     VkWin32SurfaceCreateInfoKHR createInfo = {};
585     createInfo.sType                       = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
586     createInfo.pNext                       = NULL;
587     createInfo.hinstance                   = info.connection;
588     createInfo.hwnd                        = info.window;
589     res = vkCreateWin32SurfaceKHR(info.inst, &createInfo, NULL, &info.surface);
590 #elif defined(__ANDROID__)
591     GET_INSTANCE_PROC_ADDR(info.inst, CreateAndroidSurfaceKHR);
592     VkAndroidSurfaceCreateInfoKHR createInfo;
593     createInfo.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
594     createInfo.pNext = nullptr;
595     createInfo.flags = 0;
596     createInfo.window = info.mOSWindow->getNativeWindow();
597     res = info.fpCreateAndroidSurfaceKHR(info.inst, &createInfo, nullptr, &info.surface);
598 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
599     VkWaylandSurfaceCreateInfoKHR createInfo = {};
600     createInfo.sType = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
601     createInfo.pNext = NULL;
602     createInfo.display = info.display;
603     createInfo.surface = info.window;
604     res = vkCreateWaylandSurfaceKHR(info.inst, &createInfo, NULL, &info.surface);
605 #else
606     VkXcbSurfaceCreateInfoKHR createInfo = {};
607     createInfo.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
608     createInfo.pNext = NULL;
609     createInfo.connection = info.connection;
610     createInfo.window = info.window;
611     res = vkCreateXcbSurfaceKHR(info.inst, &createInfo, NULL, &info.surface);
612 #endif  // __ANDROID__  && _WIN32
613     ASSERT(res == VK_SUCCESS);
614 
615     // Iterate over each queue to learn whether it supports presenting:
616     VkBool32 *pSupportsPresent = (VkBool32 *)malloc(info.queue_family_count * sizeof(VkBool32));
617     for (uint32_t i = 0; i < info.queue_family_count; i++)
618     {
619         vkGetPhysicalDeviceSurfaceSupportKHR(info.gpus[0], i, info.surface, &pSupportsPresent[i]);
620     }
621 
622     // Search for a graphics and a present queue in the array of queue
623     // families, try to find one that supports both
624     info.graphics_queue_family_index = UINT32_MAX;
625     info.present_queue_family_index  = UINT32_MAX;
626     for (uint32_t i = 0; i < info.queue_family_count; ++i)
627     {
628         if ((info.queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
629         {
630             if (info.graphics_queue_family_index == UINT32_MAX)
631                 info.graphics_queue_family_index = i;
632 
633             if (pSupportsPresent[i] == VK_TRUE)
634             {
635                 info.graphics_queue_family_index = i;
636                 info.present_queue_family_index  = i;
637                 break;
638             }
639         }
640     }
641 
642     if (info.present_queue_family_index == UINT32_MAX)
643     {
644         // If didn't find a queue that supports both graphics and present, then
645         // find a separate present queue.
646         for (size_t i = 0; i < info.queue_family_count; ++i)
647             if (pSupportsPresent[i] == VK_TRUE)
648             {
649                 info.present_queue_family_index = i;
650                 break;
651             }
652     }
653     free(pSupportsPresent);
654 
655     // Generate error if could not find queues that support graphics
656     // and present
657     if (info.graphics_queue_family_index == UINT32_MAX ||
658         info.present_queue_family_index == UINT32_MAX)
659     {
660         std::cout << "Could not find a queues for both graphics and present";
661         exit(-1);
662     }
663 
664     // Get the list of VkFormats that are supported:
665     uint32_t formatCount;
666     res = vkGetPhysicalDeviceSurfaceFormatsKHR(info.gpus[0], info.surface, &formatCount, NULL);
667     ASSERT(res == VK_SUCCESS);
668     VkSurfaceFormatKHR *surfFormats =
669         (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
670     res =
671         vkGetPhysicalDeviceSurfaceFormatsKHR(info.gpus[0], info.surface, &formatCount, surfFormats);
672     ASSERT(res == VK_SUCCESS);
673     // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
674     // the surface has no preferred format.  Otherwise, at least one
675     // supported format will be returned.
676     if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED)
677     {
678         info.format = VK_FORMAT_B8G8R8A8_UNORM;
679     }
680     else
681     {
682         ASSERT(formatCount >= 1);
683         info.format = surfFormats[0].format;
684     }
685     free(surfFormats);
686 }
687 
init_device(struct sample_info & info)688 VkResult init_device(struct sample_info &info)
689 {
690     VkResult res;
691     VkDeviceQueueCreateInfo queue_info = {};
692 
693     float queue_priorities[1]   = {0.0};
694     queue_info.sType            = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
695     queue_info.pNext            = NULL;
696     queue_info.queueCount       = 1;
697     queue_info.pQueuePriorities = queue_priorities;
698     queue_info.queueFamilyIndex = info.graphics_queue_family_index;
699 
700     VkDeviceCreateInfo device_info    = {};
701     device_info.sType                 = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
702     device_info.pNext                 = NULL;
703     device_info.queueCreateInfoCount  = 1;
704     device_info.pQueueCreateInfos     = &queue_info;
705     device_info.enabledExtensionCount = info.device_extension_names.size();
706     device_info.ppEnabledExtensionNames =
707         device_info.enabledExtensionCount ? info.device_extension_names.data() : NULL;
708     device_info.pEnabledFeatures = NULL;
709 
710     res = vkCreateDevice(info.gpus[0], &device_info, NULL, &info.device);
711     ASSERT(res == VK_SUCCESS);
712 #if ANGLE_SHARED_LIBVULKAN
713     volkLoadDevice(info.device);
714 #endif  // ANGLE_SHARED_LIBVULKAN
715 
716     return res;
717 }
718 
init_command_pool(struct sample_info & info,VkCommandPoolCreateFlags cmd_pool_create_flags)719 void init_command_pool(struct sample_info &info, VkCommandPoolCreateFlags cmd_pool_create_flags)
720 {
721     /* DEPENDS on init_swapchain_extension() */
722     VkResult res;
723 
724     VkCommandPoolCreateInfo cmd_pool_info = {};
725     cmd_pool_info.sType                   = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
726     cmd_pool_info.pNext                   = NULL;
727     cmd_pool_info.queueFamilyIndex        = info.graphics_queue_family_index;
728     cmd_pool_info.flags                   = cmd_pool_create_flags;
729 
730     res = vkCreateCommandPool(info.device, &cmd_pool_info, NULL, &info.cmd_pool);
731     ASSERT(res == VK_SUCCESS);
732 }
733 
init_command_buffer(struct sample_info & info)734 void init_command_buffer(struct sample_info &info)
735 {
736     /* DEPENDS on init_swapchain_extension() and init_command_pool() */
737     VkResult res;
738 
739     VkCommandBufferAllocateInfo cmd = {};
740     cmd.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
741     cmd.pNext                       = NULL;
742     cmd.commandPool                 = info.cmd_pool;
743     cmd.level                       = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
744     cmd.commandBufferCount          = 1;
745 
746     res = vkAllocateCommandBuffers(info.device, &cmd, &info.cmd);
747     ASSERT(res == VK_SUCCESS);
748 }
749 
init_command_buffer_array(struct sample_info & info,int numBuffers)750 void init_command_buffer_array(struct sample_info &info, int numBuffers)
751 {
752     /* DEPENDS on init_swapchain_extension() and init_command_pool() */
753     VkResult res;
754     info.cmds.resize(numBuffers);
755     ASSERT(info.cmds.data() != NULL);
756 
757     VkCommandBufferAllocateInfo cmd = {};
758     cmd.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
759     cmd.pNext                       = NULL;
760     cmd.commandPool                 = info.cmd_pool;
761     cmd.level                       = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
762     cmd.commandBufferCount          = numBuffers;
763 
764     res = vkAllocateCommandBuffers(info.device, &cmd, info.cmds.data());
765     ASSERT(res == VK_SUCCESS);
766 }
767 
init_command_buffer2_array(struct sample_info & info,int numBuffers)768 void init_command_buffer2_array(struct sample_info &info, int numBuffers)
769 {
770     /* DEPENDS on init_swapchain_extension() and init_command_pool() */
771     VkResult res;
772     info.cmd2s.resize(numBuffers);
773     VkCommandBufferAllocateInfo cmd = {};
774     cmd.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
775     cmd.pNext                       = NULL;
776     cmd.commandPool                 = info.cmd_pool;
777     cmd.level                       = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
778     cmd.commandBufferCount          = numBuffers;
779 
780     res = vkAllocateCommandBuffers(info.device, &cmd, info.cmd2s.data());
781     ASSERT(res == VK_SUCCESS);
782 }
783 
init_device_queue(struct sample_info & info)784 void init_device_queue(struct sample_info &info)
785 {
786     /* DEPENDS on init_swapchain_extension() */
787 
788     vkGetDeviceQueue(info.device, info.graphics_queue_family_index, 0, &info.graphics_queue);
789     if (info.graphics_queue_family_index == info.present_queue_family_index)
790     {
791         info.present_queue = info.graphics_queue;
792     }
793     else
794     {
795         vkGetDeviceQueue(info.device, info.present_queue_family_index, 0, &info.present_queue);
796     }
797 }
798 
init_swap_chain(struct sample_info & info,VkImageUsageFlags usageFlags)799 void init_swap_chain(struct sample_info &info, VkImageUsageFlags usageFlags)
800 {
801     /* DEPENDS on info.cmd and info.queue initialized */
802 
803     VkResult res;
804     VkSurfaceCapabilitiesKHR surfCapabilities;
805 
806     res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(info.gpus[0], info.surface, &surfCapabilities);
807     ASSERT(res == VK_SUCCESS);
808 
809     uint32_t presentModeCount;
810     res = vkGetPhysicalDeviceSurfacePresentModesKHR(info.gpus[0], info.surface, &presentModeCount,
811                                                     NULL);
812     ASSERT(res == VK_SUCCESS);
813     VkPresentModeKHR *presentModes =
814         (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
815     ASSERT(presentModes);
816     res = vkGetPhysicalDeviceSurfacePresentModesKHR(info.gpus[0], info.surface, &presentModeCount,
817                                                     presentModes);
818     ASSERT(res == VK_SUCCESS);
819 
820     VkExtent2D swapchainExtent;
821     // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
822     if (surfCapabilities.currentExtent.width == 0xFFFFFFFF)
823     {
824         // If the surface size is undefined, the size is set to
825         // the size of the images requested.
826         swapchainExtent.width  = info.width;
827         swapchainExtent.height = info.height;
828         if (swapchainExtent.width < surfCapabilities.minImageExtent.width)
829         {
830             swapchainExtent.width = surfCapabilities.minImageExtent.width;
831         }
832         else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width)
833         {
834             swapchainExtent.width = surfCapabilities.maxImageExtent.width;
835         }
836 
837         if (swapchainExtent.height < surfCapabilities.minImageExtent.height)
838         {
839             swapchainExtent.height = surfCapabilities.minImageExtent.height;
840         }
841         else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height)
842         {
843             swapchainExtent.height = surfCapabilities.maxImageExtent.height;
844         }
845     }
846     else
847     {
848         // If the surface size is defined, the swap chain size must match
849         swapchainExtent = surfCapabilities.currentExtent;
850     }
851 
852     // The FIFO present mode is guaranteed by the spec to be supported
853     // Also note that current Android driver only supports FIFO
854     VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
855 
856     for (uint32_t presentModeIndex = 0; presentModeIndex < presentModeCount; ++presentModeIndex)
857     {
858         if (presentModes[presentModeIndex] == VK_PRESENT_MODE_IMMEDIATE_KHR)
859         {
860             swapchainPresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
861             break;
862         }
863     }
864 
865     // Determine the number of VkImage's to use in the swap chain.
866     // We need to acquire only 1 presentable image at at time.
867     // Asking for minImageCount images ensures that we can acquire
868     // 1 presentable image as long as we present it before attempting
869     // to acquire another.
870     uint32_t desiredNumberOfSwapChainImages = surfCapabilities.minImageCount;
871 
872     VkSurfaceTransformFlagBitsKHR preTransform;
873     if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR)
874     {
875         preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
876     }
877     else
878     {
879         preTransform = surfCapabilities.currentTransform;
880     }
881 
882     // Find a supported composite alpha mode - one of these is guaranteed to be set
883     VkCompositeAlphaFlagBitsKHR compositeAlpha         = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
884     VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = {
885         VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
886         VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
887         VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
888         VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
889     };
890     for (uint32_t i = 0; i < sizeof(compositeAlphaFlags); i++)
891     {
892         if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i])
893         {
894             compositeAlpha = compositeAlphaFlags[i];
895             break;
896         }
897     }
898 
899     VkSwapchainCreateInfoKHR swapchain_ci = {};
900     swapchain_ci.sType                    = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
901     swapchain_ci.pNext                    = NULL;
902     swapchain_ci.surface                  = info.surface;
903     swapchain_ci.minImageCount            = desiredNumberOfSwapChainImages;
904     swapchain_ci.imageFormat              = info.format;
905     swapchain_ci.imageExtent.width        = swapchainExtent.width;
906     swapchain_ci.imageExtent.height       = swapchainExtent.height;
907     swapchain_ci.preTransform             = preTransform;
908     swapchain_ci.compositeAlpha           = compositeAlpha;
909     swapchain_ci.imageArrayLayers         = 1;
910     swapchain_ci.presentMode              = swapchainPresentMode;
911     swapchain_ci.oldSwapchain             = VK_NULL_HANDLE;
912 #ifndef __ANDROID__
913     swapchain_ci.clipped = true;
914 #else
915     swapchain_ci.clipped = false;
916 #endif
917     swapchain_ci.imageColorSpace       = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
918     swapchain_ci.imageUsage            = usageFlags;
919     swapchain_ci.imageSharingMode      = VK_SHARING_MODE_EXCLUSIVE;
920     swapchain_ci.queueFamilyIndexCount = 0;
921     swapchain_ci.pQueueFamilyIndices   = NULL;
922     uint32_t queueFamilyIndices[2]     = {(uint32_t)info.graphics_queue_family_index,
923                                       (uint32_t)info.present_queue_family_index};
924     if (info.graphics_queue_family_index != info.present_queue_family_index)
925     {
926         // If the graphics and present queues are from different queue families,
927         // we either have to explicitly transfer ownership of images between the
928         // queues, or we have to create the swapchain with imageSharingMode
929         // as VK_SHARING_MODE_CONCURRENT
930         swapchain_ci.imageSharingMode      = VK_SHARING_MODE_CONCURRENT;
931         swapchain_ci.queueFamilyIndexCount = 2;
932         swapchain_ci.pQueueFamilyIndices   = queueFamilyIndices;
933     }
934 
935     res = vkCreateSwapchainKHR(info.device, &swapchain_ci, NULL, &info.swap_chain);
936     ASSERT(res == VK_SUCCESS);
937 
938     res = vkGetSwapchainImagesKHR(info.device, info.swap_chain, &info.swapchainImageCount, NULL);
939     ASSERT(res == VK_SUCCESS);
940 
941     VkImage *swapchainImages = (VkImage *)malloc(info.swapchainImageCount * sizeof(VkImage));
942     ASSERT(swapchainImages);
943     res = vkGetSwapchainImagesKHR(info.device, info.swap_chain, &info.swapchainImageCount,
944                                   swapchainImages);
945     ASSERT(res == VK_SUCCESS);
946 
947     for (uint32_t i = 0; i < info.swapchainImageCount; i++)
948     {
949         swap_chain_buffer sc_buffer;
950 
951         VkImageViewCreateInfo color_image_view           = {};
952         color_image_view.sType                           = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
953         color_image_view.pNext                           = NULL;
954         color_image_view.format                          = info.format;
955         color_image_view.components.r                    = VK_COMPONENT_SWIZZLE_R;
956         color_image_view.components.g                    = VK_COMPONENT_SWIZZLE_G;
957         color_image_view.components.b                    = VK_COMPONENT_SWIZZLE_B;
958         color_image_view.components.a                    = VK_COMPONENT_SWIZZLE_A;
959         color_image_view.subresourceRange.aspectMask     = VK_IMAGE_ASPECT_COLOR_BIT;
960         color_image_view.subresourceRange.baseMipLevel   = 0;
961         color_image_view.subresourceRange.levelCount     = 1;
962         color_image_view.subresourceRange.baseArrayLayer = 0;
963         color_image_view.subresourceRange.layerCount     = 1;
964         color_image_view.viewType                        = VK_IMAGE_VIEW_TYPE_2D;
965         color_image_view.flags                           = 0;
966 
967         sc_buffer.image = swapchainImages[i];
968 
969         color_image_view.image = sc_buffer.image;
970 
971         res = vkCreateImageView(info.device, &color_image_view, NULL, &sc_buffer.view);
972         info.buffers.push_back(sc_buffer);
973         ASSERT(res == VK_SUCCESS);
974     }
975     free(swapchainImages);
976     info.current_buffer = 0;
977 
978     if (NULL != presentModes)
979     {
980         free(presentModes);
981     }
982 }
983 
memory_type_from_properties(struct sample_info & info,uint32_t typeBits,VkFlags requirements_mask,uint32_t * typeIndex)984 bool memory_type_from_properties(struct sample_info &info,
985                                  uint32_t typeBits,
986                                  VkFlags requirements_mask,
987                                  uint32_t *typeIndex)
988 {
989     // Search memtypes to find first index with those properties
990     for (uint32_t i = 0; i < info.memory_properties.memoryTypeCount; i++)
991     {
992         if ((typeBits & 1) == 1)
993         {
994             // Type is available, does it match user properties?
995             if ((info.memory_properties.memoryTypes[i].propertyFlags & requirements_mask) ==
996                 requirements_mask)
997             {
998                 *typeIndex = i;
999                 return true;
1000             }
1001         }
1002         typeBits >>= 1;
1003     }
1004     // No memory types matched, return failure
1005     return false;
1006 }
1007 
init_depth_buffer(struct sample_info & info)1008 void init_depth_buffer(struct sample_info &info)
1009 {
1010     VkResult res;
1011     bool pass;
1012     VkImageCreateInfo image_info = {};
1013 
1014 /* allow custom depth formats */
1015 #ifdef __ANDROID__
1016     // Depth format needs to be VK_FORMAT_D24_UNORM_S8_UINT on Android.
1017     info.depth.format = VK_FORMAT_D24_UNORM_S8_UINT;
1018 #else
1019     if (info.depth.format == VK_FORMAT_UNDEFINED)
1020         info.depth.format = VK_FORMAT_D16_UNORM;
1021 #endif
1022 
1023     const VkFormat depth_format = info.depth.format;
1024     VkFormatProperties props;
1025     vkGetPhysicalDeviceFormatProperties(info.gpus[0], depth_format, &props);
1026     if (props.linearTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
1027     {
1028         image_info.tiling = VK_IMAGE_TILING_LINEAR;
1029     }
1030     else if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
1031     {
1032         image_info.tiling = VK_IMAGE_TILING_OPTIMAL;
1033     }
1034     else
1035     {
1036         /* Try other depth formats? */
1037         std::cout << "depth_format " << depth_format << " Unsupported.\n";
1038         exit(-1);
1039     }
1040 
1041     image_info.sType                 = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
1042     image_info.pNext                 = NULL;
1043     image_info.imageType             = VK_IMAGE_TYPE_2D;
1044     image_info.format                = depth_format;
1045     image_info.extent.width          = info.width;
1046     image_info.extent.height         = info.height;
1047     image_info.extent.depth          = 1;
1048     image_info.mipLevels             = 1;
1049     image_info.arrayLayers           = 1;
1050     image_info.samples               = NUM_SAMPLES;
1051     image_info.initialLayout         = VK_IMAGE_LAYOUT_UNDEFINED;
1052     image_info.queueFamilyIndexCount = 0;
1053     image_info.pQueueFamilyIndices   = NULL;
1054     image_info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
1055     image_info.usage                 = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
1056     image_info.flags                 = 0;
1057 
1058     VkMemoryAllocateInfo mem_alloc = {};
1059     mem_alloc.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1060     mem_alloc.pNext                = NULL;
1061     mem_alloc.allocationSize       = 0;
1062     mem_alloc.memoryTypeIndex      = 0;
1063 
1064     VkImageViewCreateInfo view_info           = {};
1065     view_info.sType                           = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1066     view_info.pNext                           = NULL;
1067     view_info.image                           = VK_NULL_HANDLE;
1068     view_info.format                          = depth_format;
1069     view_info.components.r                    = VK_COMPONENT_SWIZZLE_R;
1070     view_info.components.g                    = VK_COMPONENT_SWIZZLE_G;
1071     view_info.components.b                    = VK_COMPONENT_SWIZZLE_B;
1072     view_info.components.a                    = VK_COMPONENT_SWIZZLE_A;
1073     view_info.subresourceRange.aspectMask     = VK_IMAGE_ASPECT_DEPTH_BIT;
1074     view_info.subresourceRange.baseMipLevel   = 0;
1075     view_info.subresourceRange.levelCount     = 1;
1076     view_info.subresourceRange.baseArrayLayer = 0;
1077     view_info.subresourceRange.layerCount     = 1;
1078     view_info.viewType                        = VK_IMAGE_VIEW_TYPE_2D;
1079     view_info.flags                           = 0;
1080 
1081     if (depth_format == VK_FORMAT_D16_UNORM_S8_UINT ||
1082         depth_format == VK_FORMAT_D24_UNORM_S8_UINT || depth_format == VK_FORMAT_D32_SFLOAT_S8_UINT)
1083     {
1084         view_info.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
1085     }
1086 
1087     VkMemoryRequirements mem_reqs;
1088 
1089     /* Create image */
1090     res = vkCreateImage(info.device, &image_info, NULL, &info.depth.image);
1091     ASSERT(res == VK_SUCCESS);
1092 
1093     vkGetImageMemoryRequirements(info.device, info.depth.image, &mem_reqs);
1094 
1095     mem_alloc.allocationSize = mem_reqs.size;
1096     /* Use the memory properties to determine the type of memory required */
1097     pass = memory_type_from_properties(info, mem_reqs.memoryTypeBits,
1098                                        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1099                                        &mem_alloc.memoryTypeIndex);
1100     ASSERT(pass);
1101 
1102     /* Allocate memory */
1103     res = vkAllocateMemory(info.device, &mem_alloc, NULL, &info.depth.mem);
1104     ASSERT(res == VK_SUCCESS);
1105 
1106     /* Bind memory */
1107     res = vkBindImageMemory(info.device, info.depth.image, info.depth.mem, 0);
1108     ASSERT(res == VK_SUCCESS);
1109 
1110     /* Create image view */
1111     view_info.image = info.depth.image;
1112     res             = vkCreateImageView(info.device, &view_info, NULL, &info.depth.view);
1113     ASSERT(res == VK_SUCCESS);
1114 }
1115 
init_uniform_buffer(struct sample_info & info)1116 void init_uniform_buffer(struct sample_info &info)
1117 {
1118     VkResult res;
1119     bool pass;
1120 
1121     info.MVP = {1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f,
1122                 0.0f, 0.0f, 0.5f, 0.0f, 0.0f, 0.0f,  0.5f, 1.0f};
1123 
1124     /* VULKAN_KEY_START */
1125     VkBufferCreateInfo buf_info    = {};
1126     buf_info.sType                 = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1127     buf_info.pNext                 = NULL;
1128     buf_info.usage                 = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
1129     buf_info.size                  = sizeof(float) * 16;  // info.MVP.data() size
1130     buf_info.queueFamilyIndexCount = 0;
1131     buf_info.pQueueFamilyIndices   = NULL;
1132     buf_info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
1133     buf_info.flags                 = 0;
1134     res = vkCreateBuffer(info.device, &buf_info, NULL, &info.uniform_data.buf);
1135     ASSERT(res == VK_SUCCESS);
1136 
1137     VkMemoryRequirements mem_reqs;
1138     vkGetBufferMemoryRequirements(info.device, info.uniform_data.buf, &mem_reqs);
1139 
1140     VkMemoryAllocateInfo alloc_info = {};
1141     alloc_info.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1142     alloc_info.pNext                = NULL;
1143     alloc_info.memoryTypeIndex      = 0;
1144 
1145     alloc_info.allocationSize = mem_reqs.size;
1146     pass                      = memory_type_from_properties(
1147         info, mem_reqs.memoryTypeBits,
1148         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
1149         &alloc_info.memoryTypeIndex);
1150     ASSERT(pass && "No mappable, coherent memory");
1151 
1152     res = vkAllocateMemory(info.device, &alloc_info, NULL, &(info.uniform_data.mem));
1153     ASSERT(res == VK_SUCCESS);
1154 
1155     uint8_t *pData;
1156     res = vkMapMemory(info.device, info.uniform_data.mem, 0, mem_reqs.size, 0, (void **)&pData);
1157     ASSERT(res == VK_SUCCESS);
1158 
1159     memcpy(pData, info.MVP.data(), sizeof(float) * 16);  // info.MVP.data() size
1160 
1161     vkUnmapMemory(info.device, info.uniform_data.mem);
1162 
1163     res = vkBindBufferMemory(info.device, info.uniform_data.buf, info.uniform_data.mem, 0);
1164     ASSERT(res == VK_SUCCESS);
1165 
1166     info.uniform_data.buffer_info.buffer = info.uniform_data.buf;
1167     info.uniform_data.buffer_info.offset = 0;
1168     info.uniform_data.buffer_info.range  = sizeof(float) * 16;  // info.MVP.data() size
1169 }
1170 
init_descriptor_and_pipeline_layouts(struct sample_info & info,bool use_texture,VkDescriptorSetLayoutCreateFlags descSetLayoutCreateFlags)1171 void init_descriptor_and_pipeline_layouts(struct sample_info &info,
1172                                           bool use_texture,
1173                                           VkDescriptorSetLayoutCreateFlags descSetLayoutCreateFlags)
1174 {
1175     VkDescriptorSetLayoutBinding layout_bindings[2];
1176     layout_bindings[0].binding            = 0;
1177     layout_bindings[0].descriptorType     = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1178     layout_bindings[0].descriptorCount    = 1;
1179     layout_bindings[0].stageFlags         = VK_SHADER_STAGE_VERTEX_BIT;
1180     layout_bindings[0].pImmutableSamplers = NULL;
1181 
1182     if (use_texture)
1183     {
1184         layout_bindings[1].binding            = 1;
1185         layout_bindings[1].descriptorType     = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1186         layout_bindings[1].descriptorCount    = 1;
1187         layout_bindings[1].stageFlags         = VK_SHADER_STAGE_FRAGMENT_BIT;
1188         layout_bindings[1].pImmutableSamplers = NULL;
1189     }
1190 
1191     /* Next take layout bindings and use them to create a descriptor set layout
1192      */
1193     VkDescriptorSetLayoutCreateInfo descriptor_layout = {};
1194     descriptor_layout.sType        = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
1195     descriptor_layout.pNext        = NULL;
1196     descriptor_layout.flags        = descSetLayoutCreateFlags;
1197     descriptor_layout.bindingCount = use_texture ? 2 : 1;
1198     descriptor_layout.pBindings    = layout_bindings;
1199 
1200     VkResult res;
1201 
1202     info.desc_layout.resize(NUM_DESCRIPTOR_SETS);
1203     res =
1204         vkCreateDescriptorSetLayout(info.device, &descriptor_layout, NULL, info.desc_layout.data());
1205     ASSERT(res == VK_SUCCESS);
1206 
1207     /* Now use the descriptor layout to create a pipeline layout */
1208     VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = {};
1209     pPipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1210     pPipelineLayoutCreateInfo.pNext = NULL;
1211     pPipelineLayoutCreateInfo.pushConstantRangeCount = 0;
1212     pPipelineLayoutCreateInfo.pPushConstantRanges    = NULL;
1213     pPipelineLayoutCreateInfo.setLayoutCount         = NUM_DESCRIPTOR_SETS;
1214     pPipelineLayoutCreateInfo.pSetLayouts            = info.desc_layout.data();
1215 
1216     res = vkCreatePipelineLayout(info.device, &pPipelineLayoutCreateInfo, NULL,
1217                                  &info.pipeline_layout);
1218     ASSERT(res == VK_SUCCESS);
1219 }
1220 
init_renderpass(struct sample_info & info,bool include_depth,bool clear,VkImageLayout finalLayout)1221 void init_renderpass(struct sample_info &info,
1222                      bool include_depth,
1223                      bool clear,
1224                      VkImageLayout finalLayout)
1225 {
1226     /* DEPENDS on init_swap_chain() and init_depth_buffer() */
1227 
1228     VkResult res;
1229     /* Need attachments for render target and depth buffer */
1230     VkAttachmentDescription attachments[2];
1231     attachments[0].format        = info.format;
1232     attachments[0].samples       = NUM_SAMPLES;
1233     attachments[0].loadOp        = clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
1234     attachments[0].storeOp       = VK_ATTACHMENT_STORE_OP_STORE;
1235     attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
1236     attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1237     attachments[0].initialLayout  = VK_IMAGE_LAYOUT_UNDEFINED;
1238     attachments[0].finalLayout    = finalLayout;
1239     attachments[0].flags          = 0;
1240 
1241     if (include_depth)
1242     {
1243         attachments[1].format  = info.depth.format;
1244         attachments[1].samples = NUM_SAMPLES;
1245         attachments[1].loadOp  = clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
1246         attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
1247         attachments[1].stencilLoadOp  = VK_ATTACHMENT_LOAD_OP_LOAD;
1248         attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
1249         attachments[1].initialLayout  = VK_IMAGE_LAYOUT_UNDEFINED;
1250         attachments[1].finalLayout    = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1251         attachments[1].flags          = 0;
1252     }
1253 
1254     VkAttachmentReference color_reference = {};
1255     color_reference.attachment            = 0;
1256     color_reference.layout                = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1257 
1258     VkAttachmentReference depth_reference = {};
1259     depth_reference.attachment            = 1;
1260     depth_reference.layout                = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1261 
1262     VkSubpassDescription subpass    = {};
1263     subpass.pipelineBindPoint       = VK_PIPELINE_BIND_POINT_GRAPHICS;
1264     subpass.flags                   = 0;
1265     subpass.inputAttachmentCount    = 0;
1266     subpass.pInputAttachments       = NULL;
1267     subpass.colorAttachmentCount    = 1;
1268     subpass.pColorAttachments       = &color_reference;
1269     subpass.pResolveAttachments     = NULL;
1270     subpass.pDepthStencilAttachment = include_depth ? &depth_reference : NULL;
1271     subpass.preserveAttachmentCount = 0;
1272     subpass.pPreserveAttachments    = NULL;
1273 
1274     VkRenderPassCreateInfo rp_info = {};
1275     rp_info.sType                  = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
1276     rp_info.pNext                  = NULL;
1277     rp_info.attachmentCount        = include_depth ? 2 : 1;
1278     rp_info.pAttachments           = attachments;
1279     rp_info.subpassCount           = 1;
1280     rp_info.pSubpasses             = &subpass;
1281     rp_info.dependencyCount        = 0;
1282     rp_info.pDependencies          = NULL;
1283 
1284     res = vkCreateRenderPass(info.device, &rp_info, NULL, &info.render_pass);
1285     ASSERT(res == VK_SUCCESS);
1286 }
1287 
init_framebuffers(struct sample_info & info,bool include_depth)1288 void init_framebuffers(struct sample_info &info, bool include_depth)
1289 {
1290     /* DEPENDS on init_depth_buffer(), init_renderpass() and
1291      * init_swapchain_extension() */
1292 
1293     VkResult res;
1294     VkImageView attachments[2];
1295     attachments[1] = info.depth.view;
1296 
1297     VkFramebufferCreateInfo fb_info = {};
1298     fb_info.sType                   = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
1299     fb_info.pNext                   = NULL;
1300     fb_info.renderPass              = info.render_pass;
1301     fb_info.attachmentCount         = include_depth ? 2 : 1;
1302     fb_info.pAttachments            = attachments;
1303     fb_info.width                   = info.width;
1304     fb_info.height                  = info.height;
1305     fb_info.layers                  = 1;
1306 
1307     uint32_t i;
1308 
1309     info.framebuffers = (VkFramebuffer *)malloc(info.swapchainImageCount * sizeof(VkFramebuffer));
1310 
1311     for (i = 0; i < info.swapchainImageCount; i++)
1312     {
1313         attachments[0] = info.buffers[i].view;
1314         res            = vkCreateFramebuffer(info.device, &fb_info, NULL, &info.framebuffers[i]);
1315         ASSERT(res == VK_SUCCESS);
1316     }
1317 }
1318 
init_vertex_buffer(struct sample_info & info,const void * vertexData,uint32_t dataSize,uint32_t dataStride,bool use_texture)1319 void init_vertex_buffer(struct sample_info &info,
1320                         const void *vertexData,
1321                         uint32_t dataSize,
1322                         uint32_t dataStride,
1323                         bool use_texture)
1324 {
1325     VkResult res;
1326     bool pass;
1327 
1328     VkBufferCreateInfo buf_info    = {};
1329     buf_info.sType                 = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1330     buf_info.pNext                 = NULL;
1331     buf_info.usage                 = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1332     buf_info.size                  = dataSize;
1333     buf_info.queueFamilyIndexCount = 0;
1334     buf_info.pQueueFamilyIndices   = NULL;
1335     buf_info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
1336     buf_info.flags                 = 0;
1337     res = vkCreateBuffer(info.device, &buf_info, NULL, &info.vertex_buffer.buf);
1338     ASSERT(res == VK_SUCCESS);
1339 
1340     VkMemoryRequirements mem_reqs;
1341     vkGetBufferMemoryRequirements(info.device, info.vertex_buffer.buf, &mem_reqs);
1342 
1343     VkMemoryAllocateInfo alloc_info = {};
1344     alloc_info.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1345     alloc_info.pNext                = NULL;
1346     alloc_info.memoryTypeIndex      = 0;
1347 
1348     alloc_info.allocationSize = mem_reqs.size;
1349     pass                      = memory_type_from_properties(
1350         info, mem_reqs.memoryTypeBits,
1351         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
1352         &alloc_info.memoryTypeIndex);
1353     ASSERT(pass && "No mappable, coherent memory");
1354 
1355     res = vkAllocateMemory(info.device, &alloc_info, NULL, &(info.vertex_buffer.mem));
1356     ASSERT(res == VK_SUCCESS);
1357     info.vertex_buffer.buffer_info.range  = mem_reqs.size;
1358     info.vertex_buffer.buffer_info.offset = 0;
1359 
1360     uint8_t *pData;
1361     res = vkMapMemory(info.device, info.vertex_buffer.mem, 0, mem_reqs.size, 0, (void **)&pData);
1362     ASSERT(res == VK_SUCCESS);
1363 
1364     memcpy(pData, vertexData, dataSize);
1365 
1366     vkUnmapMemory(info.device, info.vertex_buffer.mem);
1367 
1368     res = vkBindBufferMemory(info.device, info.vertex_buffer.buf, info.vertex_buffer.mem, 0);
1369     ASSERT(res == VK_SUCCESS);
1370 
1371     info.vi_binding.binding   = 0;
1372     info.vi_binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1373     info.vi_binding.stride    = dataStride;
1374 
1375     info.vi_attribs[0].binding  = 0;
1376     info.vi_attribs[0].location = 0;
1377     info.vi_attribs[0].format   = VK_FORMAT_R32G32B32A32_SFLOAT;
1378     info.vi_attribs[0].offset   = 0;
1379     info.vi_attribs[1].binding  = 0;
1380     info.vi_attribs[1].location = 1;
1381     info.vi_attribs[1].format =
1382         use_texture ? VK_FORMAT_R32G32_SFLOAT : VK_FORMAT_R32G32B32A32_SFLOAT;
1383     info.vi_attribs[1].offset = 16;
1384 }
1385 
init_descriptor_pool(struct sample_info & info,bool use_texture)1386 void init_descriptor_pool(struct sample_info &info, bool use_texture)
1387 {
1388     /* DEPENDS on init_uniform_buffer() and
1389      * init_descriptor_and_pipeline_layouts() */
1390 
1391     VkResult res;
1392     VkDescriptorPoolSize type_count[2];
1393     type_count[0].type            = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1394     type_count[0].descriptorCount = 1;
1395     if (use_texture)
1396     {
1397         type_count[1].type            = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1398         type_count[1].descriptorCount = 1;
1399     }
1400 
1401     VkDescriptorPoolCreateInfo descriptor_pool = {};
1402     descriptor_pool.sType                      = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
1403     descriptor_pool.pNext                      = NULL;
1404     descriptor_pool.maxSets                    = 1;
1405     descriptor_pool.poolSizeCount              = use_texture ? 2 : 1;
1406     descriptor_pool.pPoolSizes                 = type_count;
1407 
1408     res = vkCreateDescriptorPool(info.device, &descriptor_pool, NULL, &info.desc_pool);
1409     ASSERT(res == VK_SUCCESS);
1410 }
1411 
init_descriptor_set(struct sample_info & info)1412 void init_descriptor_set(struct sample_info &info)
1413 {
1414     /* DEPENDS on init_descriptor_pool() */
1415 
1416     VkResult res;
1417 
1418     VkDescriptorSetAllocateInfo alloc_info[1];
1419     alloc_info[0].sType              = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
1420     alloc_info[0].pNext              = NULL;
1421     alloc_info[0].descriptorPool     = info.desc_pool;
1422     alloc_info[0].descriptorSetCount = NUM_DESCRIPTOR_SETS;
1423     alloc_info[0].pSetLayouts        = info.desc_layout.data();
1424 
1425     info.desc_set.resize(NUM_DESCRIPTOR_SETS);
1426     res = vkAllocateDescriptorSets(info.device, alloc_info, info.desc_set.data());
1427     ASSERT(res == VK_SUCCESS);
1428 
1429     VkWriteDescriptorSet writes[2];
1430 
1431     writes[0]                 = {};
1432     writes[0].sType           = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1433     writes[0].pNext           = NULL;
1434     writes[0].dstSet          = info.desc_set[0];
1435     writes[0].descriptorCount = 1;
1436     writes[0].descriptorType  = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1437     writes[0].pBufferInfo     = &info.uniform_data.buffer_info;
1438     writes[0].dstArrayElement = 0;
1439     writes[0].dstBinding      = 0;
1440 
1441     vkUpdateDescriptorSets(info.device, 1, writes, 0, NULL);
1442 }
1443 
GLSLtoSPV(const VkShaderStageFlagBits shader_type,const char * pshader,std::vector<unsigned int> & spirv)1444 bool GLSLtoSPV(const VkShaderStageFlagBits shader_type,
1445                const char *pshader,
1446                std::vector<unsigned int> &spirv)
1447 {
1448     EShLanguage stage = FindLanguage(shader_type);
1449     glslang::TShader shader(stage);
1450     glslang::TProgram program;
1451     const char *shaderStrings[1];
1452     TBuiltInResource Resources;
1453     init_resources(Resources);
1454 
1455     // Enable SPIR-V and Vulkan rules when parsing GLSL
1456     EShMessages messages = (EShMessages)(EShMsgSpvRules | EShMsgVulkanRules);
1457 
1458     shaderStrings[0] = pshader;
1459     shader.setStrings(shaderStrings, 1);
1460 
1461     if (!shader.parse(&Resources, 100, false, messages))
1462     {
1463         puts(shader.getInfoLog());
1464         puts(shader.getInfoDebugLog());
1465         return false;  // something didn't work
1466     }
1467 
1468     program.addShader(&shader);
1469 
1470     //
1471     // Program-level processing...
1472     //
1473 
1474     if (!program.link(messages))
1475     {
1476         puts(shader.getInfoLog());
1477         puts(shader.getInfoDebugLog());
1478         fflush(stdout);
1479         return false;
1480     }
1481 
1482     glslang::GlslangToSpv(*program.getIntermediate(stage), spirv);
1483     return true;
1484 }
1485 
init_shaders(struct sample_info & info,const char * vertShaderText,const char * fragShaderText)1486 void init_shaders(struct sample_info &info, const char *vertShaderText, const char *fragShaderText)
1487 {
1488     VkResult res;
1489     bool retVal;
1490 
1491     // If no shaders were submitted, just return
1492     if (!(vertShaderText || fragShaderText))
1493         return;
1494 
1495     glslang::InitializeProcess();
1496     VkShaderModuleCreateInfo moduleCreateInfo;
1497 
1498     if (vertShaderText)
1499     {
1500         std::vector<unsigned int> vtx_spv;
1501         info.shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1502         info.shaderStages[0].pNext = NULL;
1503         info.shaderStages[0].pSpecializationInfo = NULL;
1504         info.shaderStages[0].flags               = 0;
1505         info.shaderStages[0].stage               = VK_SHADER_STAGE_VERTEX_BIT;
1506         info.shaderStages[0].pName               = "main";
1507 
1508         retVal = GLSLtoSPV(VK_SHADER_STAGE_VERTEX_BIT, vertShaderText, vtx_spv);
1509         ASSERT(retVal);
1510 
1511         moduleCreateInfo.sType    = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1512         moduleCreateInfo.pNext    = NULL;
1513         moduleCreateInfo.flags    = 0;
1514         moduleCreateInfo.codeSize = vtx_spv.size() * sizeof(unsigned int);
1515         moduleCreateInfo.pCode    = vtx_spv.data();
1516         res                       = vkCreateShaderModule(info.device, &moduleCreateInfo, NULL,
1517                                    &info.shaderStages[0].module);
1518         ASSERT(res == VK_SUCCESS);
1519     }
1520 
1521     if (fragShaderText)
1522     {
1523         std::vector<unsigned int> frag_spv;
1524         info.shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1525         info.shaderStages[1].pNext = NULL;
1526         info.shaderStages[1].pSpecializationInfo = NULL;
1527         info.shaderStages[1].flags               = 0;
1528         info.shaderStages[1].stage               = VK_SHADER_STAGE_FRAGMENT_BIT;
1529         info.shaderStages[1].pName               = "main";
1530 
1531         retVal = GLSLtoSPV(VK_SHADER_STAGE_FRAGMENT_BIT, fragShaderText, frag_spv);
1532         ASSERT(retVal);
1533 
1534         moduleCreateInfo.sType    = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1535         moduleCreateInfo.pNext    = NULL;
1536         moduleCreateInfo.flags    = 0;
1537         moduleCreateInfo.codeSize = frag_spv.size() * sizeof(unsigned int);
1538         moduleCreateInfo.pCode    = frag_spv.data();
1539         res                       = vkCreateShaderModule(info.device, &moduleCreateInfo, NULL,
1540                                    &info.shaderStages[1].module);
1541         ASSERT(res == VK_SUCCESS);
1542     }
1543 
1544     glslang::FinalizeProcess();
1545 }
1546 
init_pipeline_cache(struct sample_info & info)1547 void init_pipeline_cache(struct sample_info &info)
1548 {
1549     VkResult res;
1550 
1551     VkPipelineCacheCreateInfo pipelineCache;
1552     pipelineCache.sType           = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1553     pipelineCache.pNext           = NULL;
1554     pipelineCache.initialDataSize = 0;
1555     pipelineCache.pInitialData    = NULL;
1556     pipelineCache.flags           = 0;
1557     res = vkCreatePipelineCache(info.device, &pipelineCache, NULL, &info.pipelineCache);
1558     ASSERT(res == VK_SUCCESS);
1559 }
1560 
init_pipeline(struct sample_info & info,VkBool32 include_depth,VkBool32 include_vi)1561 void init_pipeline(struct sample_info &info, VkBool32 include_depth, VkBool32 include_vi)
1562 {
1563     VkResult res;
1564 
1565     std::vector<VkDynamicState> dynamicStateEnables;
1566     VkPipelineDynamicStateCreateInfo dynamicState = {};
1567     dynamicState.sType             = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
1568     dynamicState.pNext             = NULL;
1569     dynamicState.pDynamicStates    = NULL;
1570     dynamicState.dynamicStateCount = 0;
1571 
1572     VkPipelineVertexInputStateCreateInfo vi;
1573     memset(&vi, 0, sizeof(vi));
1574     vi.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1575     if (include_vi)
1576     {
1577         vi.pNext                           = NULL;
1578         vi.flags                           = 0;
1579         vi.vertexBindingDescriptionCount   = 1;
1580         vi.pVertexBindingDescriptions      = &info.vi_binding;
1581         vi.vertexAttributeDescriptionCount = 2;
1582         vi.pVertexAttributeDescriptions    = info.vi_attribs;
1583     }
1584     VkPipelineInputAssemblyStateCreateInfo ia;
1585     ia.sType                  = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1586     ia.pNext                  = NULL;
1587     ia.flags                  = 0;
1588     ia.primitiveRestartEnable = VK_FALSE;
1589     ia.topology               = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1590 
1591     VkPipelineRasterizationStateCreateInfo rs;
1592     rs.sType                   = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
1593     rs.pNext                   = NULL;
1594     rs.flags                   = 0;
1595     rs.polygonMode             = VK_POLYGON_MODE_FILL;
1596     rs.cullMode                = VK_CULL_MODE_BACK_BIT;
1597     rs.frontFace               = VK_FRONT_FACE_CLOCKWISE;
1598     rs.depthClampEnable        = VK_FALSE;
1599     rs.rasterizerDiscardEnable = VK_FALSE;
1600     rs.depthBiasEnable         = VK_FALSE;
1601     rs.depthBiasConstantFactor = 0;
1602     rs.depthBiasClamp          = 0;
1603     rs.depthBiasSlopeFactor    = 0;
1604     rs.lineWidth               = 1.0f;
1605 
1606     VkPipelineColorBlendStateCreateInfo cb;
1607     cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1608     cb.flags = 0;
1609     cb.pNext = NULL;
1610     VkPipelineColorBlendAttachmentState att_state[1];
1611     att_state[0].colorWriteMask      = 0xf;
1612     att_state[0].blendEnable         = VK_FALSE;
1613     att_state[0].alphaBlendOp        = VK_BLEND_OP_ADD;
1614     att_state[0].colorBlendOp        = VK_BLEND_OP_ADD;
1615     att_state[0].srcColorBlendFactor = VK_BLEND_FACTOR_ZERO;
1616     att_state[0].dstColorBlendFactor = VK_BLEND_FACTOR_ZERO;
1617     att_state[0].srcAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
1618     att_state[0].dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
1619     cb.attachmentCount               = 1;
1620     cb.pAttachments                  = att_state;
1621     cb.logicOpEnable                 = VK_FALSE;
1622     cb.logicOp                       = VK_LOGIC_OP_NO_OP;
1623     cb.blendConstants[0]             = 1.0f;
1624     cb.blendConstants[1]             = 1.0f;
1625     cb.blendConstants[2]             = 1.0f;
1626     cb.blendConstants[3]             = 1.0f;
1627 
1628     VkPipelineViewportStateCreateInfo vp = {};
1629     vp.sType                             = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
1630     vp.pNext                             = NULL;
1631     vp.flags                             = 0;
1632 #ifndef __ANDROID__
1633     vp.viewportCount = NUM_VIEWPORTS;
1634     dynamicState.dynamicStateCount++;
1635     dynamicStateEnables.push_back(VK_DYNAMIC_STATE_VIEWPORT);
1636     vp.scissorCount = NUM_SCISSORS;
1637     dynamicState.dynamicStateCount++;
1638     dynamicStateEnables.push_back(VK_DYNAMIC_STATE_SCISSOR);
1639     vp.pScissors  = NULL;
1640     vp.pViewports = NULL;
1641 #else
1642     // Temporary disabling dynamic viewport on Android because some of drivers doesn't
1643     // support the feature.
1644     VkViewport viewports;
1645     viewports.minDepth = 0.0f;
1646     viewports.maxDepth = 1.0f;
1647     viewports.x = 0;
1648     viewports.y = 0;
1649     viewports.width = info.width;
1650     viewports.height = info.height;
1651     VkRect2D scissor;
1652     scissor.extent.width = info.width;
1653     scissor.extent.height = info.height;
1654     scissor.offset.x = 0;
1655     scissor.offset.y = 0;
1656     vp.viewportCount = NUM_VIEWPORTS;
1657     vp.scissorCount = NUM_SCISSORS;
1658     vp.pScissors = &scissor;
1659     vp.pViewports = &viewports;
1660 #endif
1661     VkPipelineDepthStencilStateCreateInfo ds;
1662     ds.sType                 = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
1663     ds.pNext                 = NULL;
1664     ds.flags                 = 0;
1665     ds.depthTestEnable       = include_depth;
1666     ds.depthWriteEnable      = include_depth;
1667     ds.depthCompareOp        = VK_COMPARE_OP_LESS_OR_EQUAL;
1668     ds.depthBoundsTestEnable = VK_FALSE;
1669     ds.stencilTestEnable     = VK_FALSE;
1670     ds.back.failOp           = VK_STENCIL_OP_KEEP;
1671     ds.back.passOp           = VK_STENCIL_OP_KEEP;
1672     ds.back.compareOp        = VK_COMPARE_OP_ALWAYS;
1673     ds.back.compareMask      = 0;
1674     ds.back.reference        = 0;
1675     ds.back.depthFailOp      = VK_STENCIL_OP_KEEP;
1676     ds.back.writeMask        = 0;
1677     ds.minDepthBounds        = 0;
1678     ds.maxDepthBounds        = 0;
1679     ds.stencilTestEnable     = VK_FALSE;
1680     ds.front                 = ds.back;
1681 
1682     VkPipelineMultisampleStateCreateInfo ms;
1683     ms.sType                 = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1684     ms.pNext                 = NULL;
1685     ms.flags                 = 0;
1686     ms.pSampleMask           = NULL;
1687     ms.rasterizationSamples  = NUM_SAMPLES;
1688     ms.sampleShadingEnable   = VK_FALSE;
1689     ms.alphaToCoverageEnable = VK_FALSE;
1690     ms.alphaToOneEnable      = VK_FALSE;
1691     ms.minSampleShading      = 0.0;
1692 
1693     VkGraphicsPipelineCreateInfo pipeline;
1694     pipeline.sType               = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
1695     pipeline.pNext               = NULL;
1696     pipeline.layout              = info.pipeline_layout;
1697     pipeline.basePipelineHandle  = VK_NULL_HANDLE;
1698     pipeline.basePipelineIndex   = 0;
1699     pipeline.flags               = 0;
1700     pipeline.pVertexInputState   = &vi;
1701     pipeline.pInputAssemblyState = &ia;
1702     pipeline.pRasterizationState = &rs;
1703     pipeline.pColorBlendState    = &cb;
1704     pipeline.pTessellationState  = NULL;
1705     pipeline.pMultisampleState   = &ms;
1706     pipeline.pDynamicState       = &dynamicState;
1707     pipeline.pViewportState      = &vp;
1708     pipeline.pDepthStencilState  = &ds;
1709     pipeline.pStages             = info.shaderStages;
1710     pipeline.stageCount          = 2;
1711     pipeline.renderPass          = info.render_pass;
1712     pipeline.subpass             = 0;
1713 
1714     if (dynamicStateEnables.size() > 0)
1715     {
1716         dynamicState.pDynamicStates    = dynamicStateEnables.data();
1717         dynamicState.dynamicStateCount = dynamicStateEnables.size();
1718     }
1719 
1720     res = vkCreateGraphicsPipelines(info.device, info.pipelineCache, 1, &pipeline, NULL,
1721                                     &info.pipeline);
1722     ASSERT(res == VK_SUCCESS);
1723 }
1724 
init_viewports(struct sample_info & info)1725 void init_viewports(struct sample_info &info)
1726 {
1727 #ifdef __ANDROID__
1728 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic viewport
1729 // feature.
1730 #else
1731     info.viewport.height = (float)info.height;
1732     info.viewport.width = (float)info.width;
1733     info.viewport.minDepth = (float)0.0f;
1734     info.viewport.maxDepth = (float)1.0f;
1735     info.viewport.x = 0;
1736     info.viewport.y = 0;
1737     vkCmdSetViewport(info.cmd, 0, NUM_VIEWPORTS, &info.viewport);
1738 #endif
1739 }
1740 
init_viewports_array(struct sample_info & info,int index)1741 void init_viewports_array(struct sample_info &info, int index)
1742 {
1743 #ifdef __ANDROID__
1744 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic viewport
1745 // feature.
1746 #else
1747     info.viewport.height = (float)info.height;
1748     info.viewport.width = (float)info.width;
1749     info.viewport.minDepth = (float)0.0f;
1750     info.viewport.maxDepth = (float)1.0f;
1751     info.viewport.x = 0;
1752     info.viewport.y = 0;
1753     vkCmdSetViewport(info.cmds[index], 0, NUM_VIEWPORTS, &info.viewport);
1754 #endif
1755 }
1756 
init_viewports2_array(struct sample_info & info,int index)1757 void init_viewports2_array(struct sample_info &info, int index)
1758 {
1759 #ifdef __ANDROID__
1760 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic viewport
1761 // feature.
1762 #else
1763     info.viewport.height = (float)info.height;
1764     info.viewport.width = (float)info.width;
1765     info.viewport.minDepth = (float)0.0f;
1766     info.viewport.maxDepth = (float)1.0f;
1767     info.viewport.x = 0;
1768     info.viewport.y = 0;
1769     vkCmdSetViewport(info.cmd2s[index], 0, NUM_VIEWPORTS, &info.viewport);
1770 #endif
1771 }
1772 
init_scissors(struct sample_info & info)1773 void init_scissors(struct sample_info &info)
1774 {
1775 #ifdef __ANDROID__
1776 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic scissors
1777 // feature.
1778 #else
1779     info.scissor.extent.width = info.width;
1780     info.scissor.extent.height = info.height;
1781     info.scissor.offset.x = 0;
1782     info.scissor.offset.y = 0;
1783     vkCmdSetScissor(info.cmd, 0, NUM_SCISSORS, &info.scissor);
1784 #endif
1785 }
1786 
init_scissors_array(struct sample_info & info,int index)1787 void init_scissors_array(struct sample_info &info, int index)
1788 {
1789 #ifdef __ANDROID__
1790 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic scissors
1791 // feature.
1792 #else
1793     info.scissor.extent.width = info.width;
1794     info.scissor.extent.height = info.height;
1795     info.scissor.offset.x = 0;
1796     info.scissor.offset.y = 0;
1797     vkCmdSetScissor(info.cmds[index], 0, NUM_SCISSORS, &info.scissor);
1798 #endif
1799 }
1800 
init_scissors2_array(struct sample_info & info,int index)1801 void init_scissors2_array(struct sample_info &info, int index)
1802 {
1803 #ifdef __ANDROID__
1804 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic scissors
1805 // feature.
1806 #else
1807     info.scissor.extent.width = info.width;
1808     info.scissor.extent.height = info.height;
1809     info.scissor.offset.x = 0;
1810     info.scissor.offset.y = 0;
1811     vkCmdSetScissor(info.cmd2s[index], 0, NUM_SCISSORS, &info.scissor);
1812 #endif
1813 }
1814 
destroy_pipeline(struct sample_info & info)1815 void destroy_pipeline(struct sample_info &info)
1816 {
1817     vkDestroyPipeline(info.device, info.pipeline, NULL);
1818 }
1819 
destroy_pipeline_cache(struct sample_info & info)1820 void destroy_pipeline_cache(struct sample_info &info)
1821 {
1822     vkDestroyPipelineCache(info.device, info.pipelineCache, NULL);
1823 }
1824 
destroy_uniform_buffer(struct sample_info & info)1825 void destroy_uniform_buffer(struct sample_info &info)
1826 {
1827     vkDestroyBuffer(info.device, info.uniform_data.buf, NULL);
1828     vkFreeMemory(info.device, info.uniform_data.mem, NULL);
1829 }
1830 
destroy_descriptor_and_pipeline_layouts(struct sample_info & info)1831 void destroy_descriptor_and_pipeline_layouts(struct sample_info &info)
1832 {
1833     for (int i = 0; i < NUM_DESCRIPTOR_SETS; i++)
1834         vkDestroyDescriptorSetLayout(info.device, info.desc_layout[i], NULL);
1835     vkDestroyPipelineLayout(info.device, info.pipeline_layout, NULL);
1836 }
1837 
destroy_descriptor_pool(struct sample_info & info)1838 void destroy_descriptor_pool(struct sample_info &info)
1839 {
1840     vkDestroyDescriptorPool(info.device, info.desc_pool, NULL);
1841 }
1842 
destroy_shaders(struct sample_info & info)1843 void destroy_shaders(struct sample_info &info)
1844 {
1845     vkDestroyShaderModule(info.device, info.shaderStages[0].module, NULL);
1846     vkDestroyShaderModule(info.device, info.shaderStages[1].module, NULL);
1847 }
1848 
destroy_command_buffer(struct sample_info & info)1849 void destroy_command_buffer(struct sample_info &info)
1850 {
1851     VkCommandBuffer cmd_bufs[1] = {info.cmd};
1852     vkFreeCommandBuffers(info.device, info.cmd_pool, 1, cmd_bufs);
1853 }
1854 
destroy_command_buffer_array(struct sample_info & info,int numBuffers)1855 void destroy_command_buffer_array(struct sample_info &info, int numBuffers)
1856 {
1857     vkFreeCommandBuffers(info.device, info.cmd_pool, numBuffers, info.cmds.data());
1858 }
1859 
reset_command_buffer2_array(struct sample_info & info,VkCommandBufferResetFlags cmd_buffer_reset_flags)1860 void reset_command_buffer2_array(struct sample_info &info,
1861                                  VkCommandBufferResetFlags cmd_buffer_reset_flags)
1862 {
1863     for (auto cb : info.cmd2s)
1864     {
1865         vkResetCommandBuffer(cb, cmd_buffer_reset_flags);
1866     }
1867 }
1868 
destroy_command_buffer2_array(struct sample_info & info,int numBuffers)1869 void destroy_command_buffer2_array(struct sample_info &info, int numBuffers)
1870 {
1871     vkFreeCommandBuffers(info.device, info.cmd_pool, numBuffers, info.cmd2s.data());
1872 }
1873 
reset_command_pool(struct sample_info & info,VkCommandPoolResetFlags cmd_pool_reset_flags)1874 void reset_command_pool(struct sample_info &info, VkCommandPoolResetFlags cmd_pool_reset_flags)
1875 {
1876     vkResetCommandPool(info.device, info.cmd_pool, cmd_pool_reset_flags);
1877 }
1878 
destroy_command_pool(struct sample_info & info)1879 void destroy_command_pool(struct sample_info &info)
1880 {
1881     vkDestroyCommandPool(info.device, info.cmd_pool, NULL);
1882 }
1883 
destroy_depth_buffer(struct sample_info & info)1884 void destroy_depth_buffer(struct sample_info &info)
1885 {
1886     vkDestroyImageView(info.device, info.depth.view, NULL);
1887     vkDestroyImage(info.device, info.depth.image, NULL);
1888     vkFreeMemory(info.device, info.depth.mem, NULL);
1889 }
1890 
destroy_vertex_buffer(struct sample_info & info)1891 void destroy_vertex_buffer(struct sample_info &info)
1892 {
1893     vkDestroyBuffer(info.device, info.vertex_buffer.buf, NULL);
1894     vkFreeMemory(info.device, info.vertex_buffer.mem, NULL);
1895 }
1896 
destroy_swap_chain(struct sample_info & info)1897 void destroy_swap_chain(struct sample_info &info)
1898 {
1899     for (uint32_t i = 0; i < info.swapchainImageCount; i++)
1900     {
1901         vkDestroyImageView(info.device, info.buffers[i].view, NULL);
1902     }
1903     vkDestroySwapchainKHR(info.device, info.swap_chain, NULL);
1904 }
1905 
destroy_framebuffers(struct sample_info & info)1906 void destroy_framebuffers(struct sample_info &info)
1907 {
1908     for (uint32_t i = 0; i < info.swapchainImageCount; i++)
1909     {
1910         vkDestroyFramebuffer(info.device, info.framebuffers[i], NULL);
1911     }
1912     free(info.framebuffers);
1913 }
1914 
destroy_renderpass(struct sample_info & info)1915 void destroy_renderpass(struct sample_info &info)
1916 {
1917     vkDestroyRenderPass(info.device, info.render_pass, NULL);
1918 }
1919 
destroy_device(struct sample_info & info)1920 void destroy_device(struct sample_info &info)
1921 {
1922     vkDeviceWaitIdle(info.device);
1923     vkDestroyDevice(info.device, NULL);
1924 }
1925 
destroy_instance(struct sample_info & info)1926 void destroy_instance(struct sample_info &info)
1927 {
1928     vkDestroyInstance(info.inst, NULL);
1929 }
1930