• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Vulkan Samples
3  *
4  * Copyright (C) 2015-2016 Valve Corporation
5  * Copyright (C) 2015-2016 LunarG, Inc.
6  * Copyright (C) 2015-2018 Google, Inc.
7  *
8  * Licensed under the Apache License, Version 2.0 (the "License");
9  * you may not use this file except in compliance with the License.
10  * You may obtain a copy of the License at
11  *
12  *     http://www.apache.org/licenses/LICENSE-2.0
13  *
14  * Unless required by applicable law or agreed to in writing, software
15  * distributed under the License is distributed on an "AS IS" BASIS,
16  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17  * See the License for the specific language governing permissions and
18  * limitations under the License.
19  */
20 
21 /*
22 VULKAN_SAMPLE_DESCRIPTION
23 samples utility functions
24 */
25 
26 #include "vulkan_command_buffer_utils.h"
27 
28 #include <assert.h>
29 #include <string.h>
30 #include <cstdlib>
31 #include <iterator>
32 
33 #include "SPIRV/GlslangToSpv.h"
34 
35 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
36 #    include <linux/input.h>
37 #endif
38 
39 using namespace std;
40 
41 /*
42  * TODO: function description here
43  */
init_global_extension_properties(layer_properties & layer_props)44 VkResult init_global_extension_properties(layer_properties &layer_props)
45 {
46     VkExtensionProperties *instance_extensions;
47     uint32_t instance_extension_count;
48     VkResult res;
49     char *layer_name = NULL;
50 
51     layer_name = layer_props.properties.layerName;
52 
53     do
54     {
55         res = vkEnumerateInstanceExtensionProperties(layer_name, &instance_extension_count, NULL);
56         if (res)
57             return res;
58 
59         if (instance_extension_count == 0)
60         {
61             return VK_SUCCESS;
62         }
63 
64         layer_props.instance_extensions.resize(instance_extension_count);
65         instance_extensions = layer_props.instance_extensions.data();
66         res = vkEnumerateInstanceExtensionProperties(layer_name, &instance_extension_count,
67                                                      instance_extensions);
68     } while (res == VK_INCOMPLETE);
69 
70     return res;
71 }
72 
73 /*
74  * TODO: function description here
75  */
init_global_layer_properties(struct sample_info & info)76 VkResult init_global_layer_properties(struct sample_info &info)
77 {
78     uint32_t instance_layer_count;
79     VkLayerProperties *vk_props = NULL;
80     VkResult res;
81 
82     /*
83      * It's possible, though very rare, that the number of
84      * instance layers could change. For example, installing something
85      * could include new layers that the loader would pick up
86      * between the initial query for the count and the
87      * request for VkLayerProperties. The loader indicates that
88      * by returning a VK_INCOMPLETE status and will update the
89      * the count parameter.
90      * The count parameter will be updated with the number of
91      * entries loaded into the data pointer - in case the number
92      * of layers went down or is smaller than the size given.
93      */
94     do
95     {
96         res = vkEnumerateInstanceLayerProperties(&instance_layer_count, NULL);
97         if (res)
98             return res;
99 
100         if (instance_layer_count == 0)
101         {
102             return VK_SUCCESS;
103         }
104 
105         vk_props = (VkLayerProperties *)realloc(vk_props,
106                                                 instance_layer_count * sizeof(VkLayerProperties));
107 
108         res = vkEnumerateInstanceLayerProperties(&instance_layer_count, vk_props);
109     } while (res == VK_INCOMPLETE);
110 
111     /*
112      * Now gather the extension list for each instance layer.
113      */
114     for (uint32_t i = 0; i < instance_layer_count; i++)
115     {
116         layer_properties layer_props;
117         layer_props.properties = vk_props[i];
118         res                    = init_global_extension_properties(layer_props);
119         if (res)
120             return res;
121         info.instance_layer_properties.push_back(layer_props);
122     }
123     free(vk_props);
124 
125     return res;
126 }
127 
init_device_extension_properties(struct sample_info & info,layer_properties & layer_props)128 VkResult init_device_extension_properties(struct sample_info &info, layer_properties &layer_props)
129 {
130     VkExtensionProperties *device_extensions;
131     uint32_t device_extension_count;
132     VkResult res;
133     char *layer_name = NULL;
134 
135     layer_name = layer_props.properties.layerName;
136 
137     do
138     {
139         res = vkEnumerateDeviceExtensionProperties(info.gpus[0], layer_name,
140                                                    &device_extension_count, NULL);
141         if (res)
142             return res;
143 
144         if (device_extension_count == 0)
145         {
146             return VK_SUCCESS;
147         }
148 
149         layer_props.device_extensions.resize(device_extension_count);
150         device_extensions = layer_props.device_extensions.data();
151         res               = vkEnumerateDeviceExtensionProperties(info.gpus[0], layer_name,
152                                                                  &device_extension_count, device_extensions);
153     } while (res == VK_INCOMPLETE);
154 
155     return res;
156 }
157 
158 /*
159  * Return 1 (true) if all layer names specified in check_names
160  * can be found in given layer properties.
161  */
demo_check_layers(const std::vector<layer_properties> & layer_props,const std::vector<const char * > & layer_names)162 VkBool32 demo_check_layers(const std::vector<layer_properties> &layer_props,
163                            const std::vector<const char *> &layer_names)
164 {
165     uint32_t check_count = layer_names.size();
166     uint32_t layer_count = layer_props.size();
167     for (uint32_t i = 0; i < check_count; i++)
168     {
169         VkBool32 found = 0;
170         for (uint32_t j = 0; j < layer_count; j++)
171         {
172             if (!strcmp(layer_names[i], layer_props[j].properties.layerName))
173             {
174                 found = 1;
175             }
176         }
177         if (!found)
178         {
179             std::cout << "Cannot find layer: " << layer_names[i] << std::endl;
180             return 0;
181         }
182     }
183     return 1;
184 }
185 
init_instance_extension_names(struct sample_info & info)186 void init_instance_extension_names(struct sample_info &info)
187 {
188     info.instance_extension_names.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
189 #ifdef __ANDROID__
190     info.instance_extension_names.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
191 #elif defined(_WIN32)
192     info.instance_extension_names.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
193 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
194     info.instance_extension_names.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
195 #else
196     info.instance_extension_names.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
197 #endif
198 #ifndef __ANDROID__
199     info.instance_layer_names.push_back("VK_LAYER_LUNARG_standard_validation");
200     if (!demo_check_layers(info.instance_layer_properties, info.instance_layer_names))
201     {
202         // If standard validation is not present, search instead for the
203         // individual layers that make it up, in the correct order.
204         //
205 
206         info.instance_layer_names.clear();
207         info.instance_layer_names.push_back("VK_LAYER_GOOGLE_threading");
208         info.instance_layer_names.push_back("VK_LAYER_LUNARG_parameter_validation");
209         info.instance_layer_names.push_back("VK_LAYER_LUNARG_object_tracker");
210         info.instance_layer_names.push_back("VK_LAYER_LUNARG_core_validation");
211         info.instance_layer_names.push_back("VK_LAYER_LUNARG_image");
212         info.instance_layer_names.push_back("VK_LAYER_LUNARG_swapchain");
213         info.instance_layer_names.push_back("VK_LAYER_GOOGLE_unique_objects");
214 
215         if (!demo_check_layers(info.instance_layer_properties, info.instance_layer_names))
216         {
217             exit(1);
218         }
219     }
220 
221     // Enable debug callback extension
222     info.instance_extension_names.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
223 #endif
224 }
225 
init_instance(struct sample_info & info,char const * const app_short_name)226 VkResult init_instance(struct sample_info &info, char const *const app_short_name)
227 {
228     VkResult res = VK_SUCCESS;
229 #if ANGLE_SHARED_LIBVULKAN
230     res = volkInitialize();
231     ASSERT(res == VK_SUCCESS);
232 #endif  // ANGLE_SHARED_LIBVULKAN
233     VkApplicationInfo app_info  = {};
234     app_info.sType              = VK_STRUCTURE_TYPE_APPLICATION_INFO;
235     app_info.pNext              = NULL;
236     app_info.pApplicationName   = app_short_name;
237     app_info.applicationVersion = 1;
238     app_info.pEngineName        = app_short_name;
239     app_info.engineVersion      = 1;
240     app_info.apiVersion         = VK_API_VERSION_1_0;
241 
242     VkInstanceCreateInfo inst_info = {};
243     inst_info.sType                = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
244     inst_info.pNext                = NULL;
245     inst_info.flags                = 0;
246     inst_info.pApplicationInfo     = &app_info;
247     inst_info.enabledLayerCount    = info.instance_layer_names.size();
248     inst_info.ppEnabledLayerNames =
249         info.instance_layer_names.size() ? info.instance_layer_names.data() : NULL;
250     inst_info.enabledExtensionCount   = info.instance_extension_names.size();
251     inst_info.ppEnabledExtensionNames = info.instance_extension_names.data();
252 
253     res = vkCreateInstance(&inst_info, NULL, &info.inst);
254     ASSERT(res == VK_SUCCESS);
255 #if ANGLE_SHARED_LIBVULKAN
256     volkLoadInstance(info.inst);
257 #endif  // ANGLE_SHARED_LIBVULKAN
258 
259     return res;
260 }
261 
init_device_extension_names(struct sample_info & info)262 void init_device_extension_names(struct sample_info &info)
263 {
264     info.device_extension_names.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
265 }
266 
init_enumerate_device(struct sample_info & info,uint32_t gpu_count)267 VkResult init_enumerate_device(struct sample_info &info, uint32_t gpu_count)
268 {
269     VkResult res = vkEnumeratePhysicalDevices(info.inst, &gpu_count, NULL);
270     ASSERT(gpu_count);
271     info.gpus.resize(gpu_count);
272 
273     res = vkEnumeratePhysicalDevices(info.inst, &gpu_count, info.gpus.data());
274     ASSERT(!res);
275 
276     vkGetPhysicalDeviceQueueFamilyProperties(info.gpus[0], &info.queue_family_count, NULL);
277     ASSERT(info.queue_family_count >= 1);
278 
279     info.queue_props.resize(info.queue_family_count);
280     vkGetPhysicalDeviceQueueFamilyProperties(info.gpus[0], &info.queue_family_count,
281                                              info.queue_props.data());
282     ASSERT(info.queue_family_count >= 1);
283 
284     /* This is as good a place as any to do this */
285     vkGetPhysicalDeviceMemoryProperties(info.gpus[0], &info.memory_properties);
286     vkGetPhysicalDeviceProperties(info.gpus[0], &info.gpu_props);
287     /* query device extensions for enabled layers */
288     for (auto &layer_props : info.instance_layer_properties)
289     {
290         init_device_extension_properties(info, layer_props);
291     }
292 
293     return res;
294 }
295 
296 #if defined(VK_USE_PLATFORM_WAYLAND_KHR)
297 
handle_ping(void * data,wl_shell_surface * shell_surface,uint32_t serial)298 static void handle_ping(void *data, wl_shell_surface *shell_surface, uint32_t serial)
299 {
300     wl_shell_surface_pong(shell_surface, serial);
301 }
302 
handle_configure(void * data,wl_shell_surface * shell_surface,uint32_t edges,int32_t width,int32_t height)303 static void handle_configure(void *data,
304                              wl_shell_surface *shell_surface,
305                              uint32_t edges,
306                              int32_t width,
307                              int32_t height)
308 {}
309 
handle_popup_done(void * data,wl_shell_surface * shell_surface)310 static void handle_popup_done(void *data, wl_shell_surface *shell_surface) {}
311 
312 static const wl_shell_surface_listener shell_surface_listener = {handle_ping, handle_configure,
313                                                                  handle_popup_done};
314 
registry_handle_global(void * data,wl_registry * registry,uint32_t id,const char * interface,uint32_t version)315 static void registry_handle_global(void *data,
316                                    wl_registry *registry,
317                                    uint32_t id,
318                                    const char *interface,
319                                    uint32_t version)
320 {
321     sample_info *info = (sample_info *)data;
322     // pickup wayland objects when they appear
323     if (strcmp(interface, "wl_compositor") == 0)
324     {
325         info->compositor =
326             (wl_compositor *)wl_registry_bind(registry, id, &wl_compositor_interface, 1);
327     }
328     else if (strcmp(interface, "wl_shell") == 0)
329     {
330         info->shell = (wl_shell *)wl_registry_bind(registry, id, &wl_shell_interface, 1);
331     }
332 }
333 
registry_handle_global_remove(void * data,wl_registry * registry,uint32_t name)334 static void registry_handle_global_remove(void *data, wl_registry *registry, uint32_t name) {}
335 
336 static const wl_registry_listener registry_listener = {registry_handle_global,
337                                                        registry_handle_global_remove};
338 
339 #endif
340 
init_connection(struct sample_info & info)341 void init_connection(struct sample_info &info)
342 {
343 #if defined(VK_USE_PLATFORM_XCB_KHR)
344     const xcb_setup_t *setup;
345     xcb_screen_iterator_t iter;
346     int scr;
347 
348     info.connection = xcb_connect(NULL, &scr);
349     if (info.connection == NULL || xcb_connection_has_error(info.connection))
350     {
351         std::cout << "Unable to make an XCB connection\n";
352         exit(-1);
353     }
354 
355     setup = xcb_get_setup(info.connection);
356     iter  = xcb_setup_roots_iterator(setup);
357     while (scr-- > 0)
358         xcb_screen_next(&iter);
359 
360     info.screen = iter.data;
361 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
362     info.display = wl_display_connect(nullptr);
363 
364     if (info.display == nullptr)
365     {
366         printf(
367             "Cannot find a compatible Vulkan installable client driver "
368             "(ICD).\nExiting ...\n");
369         fflush(stdout);
370         exit(1);
371     }
372 
373     info.registry = wl_display_get_registry(info.display);
374     wl_registry_add_listener(info.registry, &registry_listener, &info);
375     wl_display_dispatch(info.display);
376 #endif
377 }
378 #ifdef _WIN32
run(struct sample_info * info)379 static void run(struct sample_info *info)
380 { /* Placeholder for samples that want to show dynamic content */
381 }
382 
383 // MS-Windows event handling function:
WndProc(HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)384 LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
385 {
386     struct sample_info *info =
387         reinterpret_cast<struct sample_info *>(GetWindowLongPtr(hWnd, GWLP_USERDATA));
388 
389     switch (uMsg)
390     {
391         case WM_CLOSE:
392             PostQuitMessage(0);
393             break;
394         case WM_PAINT:
395             run(info);
396             return 0;
397         default:
398             break;
399     }
400     return (DefWindowProc(hWnd, uMsg, wParam, lParam));
401 }
402 
init_window(struct sample_info & info)403 void init_window(struct sample_info &info)
404 {
405     WNDCLASSEXA win_class;
406     ASSERT(info.width > 0);
407     ASSERT(info.height > 0);
408 
409     info.connection = GetModuleHandle(NULL);
410     sprintf(info.name, "Sample");
411 
412     // Initialize the window class structure:
413     win_class.cbSize        = sizeof(WNDCLASSEX);
414     win_class.style         = CS_HREDRAW | CS_VREDRAW;
415     win_class.lpfnWndProc   = WndProc;
416     win_class.cbClsExtra    = 0;
417     win_class.cbWndExtra    = 0;
418     win_class.hInstance     = info.connection;  // hInstance
419     win_class.hIcon         = LoadIcon(NULL, IDI_APPLICATION);
420     win_class.hCursor       = LoadCursor(NULL, IDC_ARROW);
421     win_class.hbrBackground = (HBRUSH)GetStockObject(WHITE_BRUSH);
422     win_class.lpszMenuName  = NULL;
423     win_class.lpszClassName = info.name;
424     win_class.hIconSm       = LoadIcon(NULL, IDI_WINLOGO);
425     // Register window class:
426     if (!RegisterClassExA(&win_class))
427     {
428         // It didn't work, so try to give a useful error:
429         printf("Unexpected error trying to start the application!\n");
430         fflush(stdout);
431         exit(1);
432     }
433     // Create window with the registered class:
434     RECT wr = {0, 0, info.width, info.height};
435     AdjustWindowRect(&wr, WS_OVERLAPPEDWINDOW, FALSE);
436     info.window = CreateWindowExA(0,
437                                   info.name,             // class name
438                                   info.name,             // app name
439                                   WS_OVERLAPPEDWINDOW |  // window style
440                                       WS_VISIBLE | WS_SYSMENU,
441                                   100, 100,            // x/y coords
442                                   wr.right - wr.left,  // width
443                                   wr.bottom - wr.top,  // height
444                                   NULL,                // handle to parent
445                                   NULL,                // handle to menu
446                                   info.connection,     // hInstance
447                                   NULL);               // no extra parameters
448     if (!info.window)
449     {
450         // It didn't work, so try to give a useful error:
451         printf("Cannot create a window in which to draw!\n");
452         fflush(stdout);
453         exit(1);
454     }
455     SetWindowLongPtr(info.window, GWLP_USERDATA, (LONG_PTR)&info);
456 }
457 
destroy_window(struct sample_info & info)458 void destroy_window(struct sample_info &info)
459 {
460     vkDestroySurfaceKHR(info.inst, info.surface, NULL);
461     DestroyWindow(info.window);
462     UnregisterClassA(info.name, GetModuleHandle(NULL));
463 }
464 
465 #elif defined(__ANDROID__)
466 // Android implementation.
init_window(struct sample_info & info)467 void init_window(struct sample_info &info) {}
468 
destroy_window(struct sample_info & info)469 void destroy_window(struct sample_info &info)
470 {
471     vkDestroySurfaceKHR(info.inst, info.surface, NULL);
472 }
473 
474 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
475 
init_window(struct sample_info & info)476 void init_window(struct sample_info &info)
477 {
478     ASSERT(info.width > 0);
479     ASSERT(info.height > 0);
480 
481     info.window = wl_compositor_create_surface(info.compositor);
482     if (!info.window)
483     {
484         printf("Can not create wayland_surface from compositor!\n");
485         fflush(stdout);
486         exit(1);
487     }
488 
489     info.shell_surface = wl_shell_get_shell_surface(info.shell, info.window);
490     if (!info.shell_surface)
491     {
492         printf("Can not get shell_surface from wayland_surface!\n");
493         fflush(stdout);
494         exit(1);
495     }
496 
497     wl_shell_surface_add_listener(info.shell_surface, &shell_surface_listener, &info);
498     wl_shell_surface_set_toplevel(info.shell_surface);
499 }
500 
destroy_window(struct sample_info & info)501 void destroy_window(struct sample_info &info)
502 {
503     wl_shell_surface_destroy(info.shell_surface);
504     wl_surface_destroy(info.window);
505     wl_shell_destroy(info.shell);
506     wl_compositor_destroy(info.compositor);
507     wl_registry_destroy(info.registry);
508     wl_display_disconnect(info.display);
509 }
510 
511 #else
512 
init_window(struct sample_info & info)513 void init_window(struct sample_info &info)
514 {
515     ASSERT(info.width > 0);
516     ASSERT(info.height > 0);
517 
518     uint32_t value_mask, value_list[32];
519 
520     info.window = xcb_generate_id(info.connection);
521 
522     value_mask    = XCB_CW_BACK_PIXEL | XCB_CW_EVENT_MASK;
523     value_list[0] = info.screen->black_pixel;
524     value_list[1] = XCB_EVENT_MASK_KEY_RELEASE | XCB_EVENT_MASK_EXPOSURE;
525 
526     xcb_create_window(info.connection, XCB_COPY_FROM_PARENT, info.window, info.screen->root, 0, 0,
527                       info.width, info.height, 0, XCB_WINDOW_CLASS_INPUT_OUTPUT,
528                       info.screen->root_visual, value_mask, value_list);
529 
530     /* Magic code that will send notification when window is destroyed */
531     xcb_intern_atom_cookie_t cookie = xcb_intern_atom(info.connection, 1, 12, "WM_PROTOCOLS");
532     xcb_intern_atom_reply_t *reply  = xcb_intern_atom_reply(info.connection, cookie, 0);
533 
534     xcb_intern_atom_cookie_t cookie2 = xcb_intern_atom(info.connection, 0, 16, "WM_DELETE_WINDOW");
535     info.atom_wm_delete_window       = xcb_intern_atom_reply(info.connection, cookie2, 0);
536 
537     xcb_change_property(info.connection, XCB_PROP_MODE_REPLACE, info.window, (*reply).atom, 4, 32,
538                         1, &(*info.atom_wm_delete_window).atom);
539     free(reply);
540 
541     xcb_map_window(info.connection, info.window);
542 
543     // Force the x/y coordinates to 100,100 results are identical in consecutive
544     // runs
545     const uint32_t coords[] = {100, 100};
546     xcb_configure_window(info.connection, info.window, XCB_CONFIG_WINDOW_X | XCB_CONFIG_WINDOW_Y,
547                          coords);
548     xcb_flush(info.connection);
549 
550     xcb_generic_event_t *e;
551     while ((e = xcb_wait_for_event(info.connection)))
552     {
553         if ((e->response_type & ~0x80) == XCB_EXPOSE)
554             break;
555     }
556 }
557 
destroy_window(struct sample_info & info)558 void destroy_window(struct sample_info &info)
559 {
560     vkDestroySurfaceKHR(info.inst, info.surface, NULL);
561     xcb_destroy_window(info.connection, info.window);
562     xcb_disconnect(info.connection);
563 }
564 
565 #endif  // _WIN32
566 
init_window_size(struct sample_info & info,int32_t default_width,int32_t default_height)567 void init_window_size(struct sample_info &info, int32_t default_width, int32_t default_height)
568 {
569 #ifdef __ANDROID__
570     info.mOSWindow = OSWindow::New();
571     ASSERT(info.mOSWindow != nullptr);
572     info.mOSWindow->initialize("VulkanTest", default_width, default_height);
573 #endif
574     info.width  = default_width;
575     info.height = default_height;
576 }
577 
init_swapchain_extension(struct sample_info & info)578 void init_swapchain_extension(struct sample_info &info)
579 {
580     /* DEPENDS on init_connection() and init_window() */
581 
582     VkResult res;
583 
584 // Construct the surface description:
585 #ifdef _WIN32
586     VkWin32SurfaceCreateInfoKHR createInfo = {};
587     createInfo.sType                       = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
588     createInfo.pNext                       = NULL;
589     createInfo.hinstance                   = info.connection;
590     createInfo.hwnd                        = info.window;
591     res = vkCreateWin32SurfaceKHR(info.inst, &createInfo, NULL, &info.surface);
592 #elif defined(__ANDROID__)
593     GET_INSTANCE_PROC_ADDR(info.inst, CreateAndroidSurfaceKHR);
594     VkAndroidSurfaceCreateInfoKHR createInfo;
595     createInfo.sType  = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
596     createInfo.pNext  = nullptr;
597     createInfo.flags  = 0;
598     createInfo.window = info.mOSWindow->getNativeWindow();
599     res = info.fpCreateAndroidSurfaceKHR(info.inst, &createInfo, nullptr, &info.surface);
600 #elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
601     VkWaylandSurfaceCreateInfoKHR createInfo = {};
602     createInfo.sType                         = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR;
603     createInfo.pNext                         = NULL;
604     createInfo.display                       = info.display;
605     createInfo.surface                       = info.window;
606     res = vkCreateWaylandSurfaceKHR(info.inst, &createInfo, NULL, &info.surface);
607 #else
608     VkXcbSurfaceCreateInfoKHR createInfo = {};
609     createInfo.sType                     = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
610     createInfo.pNext                     = NULL;
611     createInfo.connection                = info.connection;
612     createInfo.window                    = info.window;
613     res = vkCreateXcbSurfaceKHR(info.inst, &createInfo, NULL, &info.surface);
614 #endif  // __ANDROID__  && _WIN32
615     ASSERT(res == VK_SUCCESS);
616 
617     // Iterate over each queue to learn whether it supports presenting:
618     VkBool32 *pSupportsPresent = (VkBool32 *)malloc(info.queue_family_count * sizeof(VkBool32));
619     for (uint32_t i = 0; i < info.queue_family_count; i++)
620     {
621         vkGetPhysicalDeviceSurfaceSupportKHR(info.gpus[0], i, info.surface, &pSupportsPresent[i]);
622     }
623 
624     // Search for a graphics and a present queue in the array of queue
625     // families, try to find one that supports both
626     info.graphics_queue_family_index = UINT32_MAX;
627     info.present_queue_family_index  = UINT32_MAX;
628     for (uint32_t i = 0; i < info.queue_family_count; ++i)
629     {
630         if ((info.queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0)
631         {
632             if (info.graphics_queue_family_index == UINT32_MAX)
633                 info.graphics_queue_family_index = i;
634 
635             if (pSupportsPresent[i] == VK_TRUE)
636             {
637                 info.graphics_queue_family_index = i;
638                 info.present_queue_family_index  = i;
639                 break;
640             }
641         }
642     }
643 
644     if (info.present_queue_family_index == UINT32_MAX)
645     {
646         // If didn't find a queue that supports both graphics and present, then
647         // find a separate present queue.
648         for (size_t i = 0; i < info.queue_family_count; ++i)
649             if (pSupportsPresent[i] == VK_TRUE)
650             {
651                 info.present_queue_family_index = i;
652                 break;
653             }
654     }
655     free(pSupportsPresent);
656 
657     // Generate error if could not find queues that support graphics
658     // and present
659     if (info.graphics_queue_family_index == UINT32_MAX ||
660         info.present_queue_family_index == UINT32_MAX)
661     {
662         std::cout << "Could not find a queues for both graphics and present";
663         exit(-1);
664     }
665 
666     // Get the list of VkFormats that are supported:
667     uint32_t formatCount;
668     res = vkGetPhysicalDeviceSurfaceFormatsKHR(info.gpus[0], info.surface, &formatCount, NULL);
669     ASSERT(res == VK_SUCCESS);
670     VkSurfaceFormatKHR *surfFormats =
671         (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
672     res =
673         vkGetPhysicalDeviceSurfaceFormatsKHR(info.gpus[0], info.surface, &formatCount, surfFormats);
674     ASSERT(res == VK_SUCCESS);
675     // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
676     // the surface has no preferred format.  Otherwise, at least one
677     // supported format will be returned.
678     if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED)
679     {
680         info.format = VK_FORMAT_B8G8R8A8_UNORM;
681     }
682     else
683     {
684         ASSERT(formatCount >= 1);
685         info.format = surfFormats[0].format;
686     }
687     free(surfFormats);
688 }
689 
init_device(struct sample_info & info)690 VkResult init_device(struct sample_info &info)
691 {
692     VkResult res;
693     VkDeviceQueueCreateInfo queue_info = {};
694 
695     float queue_priorities[1]   = {0.0};
696     queue_info.sType            = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
697     queue_info.pNext            = NULL;
698     queue_info.queueCount       = 1;
699     queue_info.pQueuePriorities = queue_priorities;
700     queue_info.queueFamilyIndex = info.graphics_queue_family_index;
701 
702     VkDeviceCreateInfo device_info    = {};
703     device_info.sType                 = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
704     device_info.pNext                 = NULL;
705     device_info.queueCreateInfoCount  = 1;
706     device_info.pQueueCreateInfos     = &queue_info;
707     device_info.enabledExtensionCount = info.device_extension_names.size();
708     device_info.ppEnabledExtensionNames =
709         device_info.enabledExtensionCount ? info.device_extension_names.data() : NULL;
710     device_info.pEnabledFeatures = NULL;
711 
712     res = vkCreateDevice(info.gpus[0], &device_info, NULL, &info.device);
713     ASSERT(res == VK_SUCCESS);
714 #if ANGLE_SHARED_LIBVULKAN
715     volkLoadDevice(info.device);
716 #endif  // ANGLE_SHARED_LIBVULKAN
717 
718     return res;
719 }
720 
init_command_pool(struct sample_info & info,VkCommandPoolCreateFlags cmd_pool_create_flags)721 void init_command_pool(struct sample_info &info, VkCommandPoolCreateFlags cmd_pool_create_flags)
722 {
723     /* DEPENDS on init_swapchain_extension() */
724     VkResult res;
725 
726     VkCommandPoolCreateInfo cmd_pool_info = {};
727     cmd_pool_info.sType                   = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
728     cmd_pool_info.pNext                   = NULL;
729     cmd_pool_info.queueFamilyIndex        = info.graphics_queue_family_index;
730     cmd_pool_info.flags                   = cmd_pool_create_flags;
731 
732     res = vkCreateCommandPool(info.device, &cmd_pool_info, NULL, &info.cmd_pool);
733     ASSERT(res == VK_SUCCESS);
734 }
735 
init_command_buffer(struct sample_info & info)736 void init_command_buffer(struct sample_info &info)
737 {
738     /* DEPENDS on init_swapchain_extension() and init_command_pool() */
739     VkResult res;
740 
741     VkCommandBufferAllocateInfo cmd = {};
742     cmd.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
743     cmd.pNext                       = NULL;
744     cmd.commandPool                 = info.cmd_pool;
745     cmd.level                       = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
746     cmd.commandBufferCount          = 1;
747 
748     res = vkAllocateCommandBuffers(info.device, &cmd, &info.cmd);
749     ASSERT(res == VK_SUCCESS);
750 }
751 
init_command_buffer_array(struct sample_info & info,int numBuffers)752 void init_command_buffer_array(struct sample_info &info, int numBuffers)
753 {
754     /* DEPENDS on init_swapchain_extension() and init_command_pool() */
755     VkResult res;
756     info.cmds.resize(numBuffers);
757     ASSERT(info.cmds.data() != NULL);
758 
759     VkCommandBufferAllocateInfo cmd = {};
760     cmd.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
761     cmd.pNext                       = NULL;
762     cmd.commandPool                 = info.cmd_pool;
763     cmd.level                       = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
764     cmd.commandBufferCount          = numBuffers;
765 
766     res = vkAllocateCommandBuffers(info.device, &cmd, info.cmds.data());
767     ASSERT(res == VK_SUCCESS);
768 }
769 
init_command_buffer2_array(struct sample_info & info,int numBuffers)770 void init_command_buffer2_array(struct sample_info &info, int numBuffers)
771 {
772     /* DEPENDS on init_swapchain_extension() and init_command_pool() */
773     VkResult res;
774     info.cmd2s.resize(numBuffers);
775     VkCommandBufferAllocateInfo cmd = {};
776     cmd.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
777     cmd.pNext                       = NULL;
778     cmd.commandPool                 = info.cmd_pool;
779     cmd.level                       = VK_COMMAND_BUFFER_LEVEL_SECONDARY;
780     cmd.commandBufferCount          = numBuffers;
781 
782     res = vkAllocateCommandBuffers(info.device, &cmd, info.cmd2s.data());
783     ASSERT(res == VK_SUCCESS);
784 }
785 
init_device_queue(struct sample_info & info)786 void init_device_queue(struct sample_info &info)
787 {
788     /* DEPENDS on init_swapchain_extension() */
789 
790     vkGetDeviceQueue(info.device, info.graphics_queue_family_index, 0, &info.graphics_queue);
791     if (info.graphics_queue_family_index == info.present_queue_family_index)
792     {
793         info.present_queue = info.graphics_queue;
794     }
795     else
796     {
797         vkGetDeviceQueue(info.device, info.present_queue_family_index, 0, &info.present_queue);
798     }
799 }
800 
init_swap_chain(struct sample_info & info,VkImageUsageFlags usageFlags)801 void init_swap_chain(struct sample_info &info, VkImageUsageFlags usageFlags)
802 {
803     /* DEPENDS on info.cmd and info.queue initialized */
804 
805     VkResult res;
806     VkSurfaceCapabilitiesKHR surfCapabilities;
807 
808     res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(info.gpus[0], info.surface, &surfCapabilities);
809     ASSERT(res == VK_SUCCESS);
810 
811     uint32_t presentModeCount;
812     res = vkGetPhysicalDeviceSurfacePresentModesKHR(info.gpus[0], info.surface, &presentModeCount,
813                                                     NULL);
814     ASSERT(res == VK_SUCCESS);
815     VkPresentModeKHR *presentModes =
816         (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
817     ASSERT(presentModes);
818     res = vkGetPhysicalDeviceSurfacePresentModesKHR(info.gpus[0], info.surface, &presentModeCount,
819                                                     presentModes);
820     ASSERT(res == VK_SUCCESS);
821 
822     VkExtent2D swapchainExtent;
823     // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
824     if (surfCapabilities.currentExtent.width == 0xFFFFFFFF)
825     {
826         // If the surface size is undefined, the size is set to
827         // the size of the images requested.
828         swapchainExtent.width  = info.width;
829         swapchainExtent.height = info.height;
830         if (swapchainExtent.width < surfCapabilities.minImageExtent.width)
831         {
832             swapchainExtent.width = surfCapabilities.minImageExtent.width;
833         }
834         else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width)
835         {
836             swapchainExtent.width = surfCapabilities.maxImageExtent.width;
837         }
838 
839         if (swapchainExtent.height < surfCapabilities.minImageExtent.height)
840         {
841             swapchainExtent.height = surfCapabilities.minImageExtent.height;
842         }
843         else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height)
844         {
845             swapchainExtent.height = surfCapabilities.maxImageExtent.height;
846         }
847     }
848     else
849     {
850         // If the surface size is defined, the swap chain size must match
851         swapchainExtent = surfCapabilities.currentExtent;
852     }
853 
854     // The FIFO present mode is guaranteed by the spec to be supported
855     // Also note that current Android driver only supports FIFO
856     VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
857 
858     for (uint32_t presentModeIndex = 0; presentModeIndex < presentModeCount; ++presentModeIndex)
859     {
860         if (presentModes[presentModeIndex] == VK_PRESENT_MODE_IMMEDIATE_KHR)
861         {
862             swapchainPresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
863             break;
864         }
865     }
866 
867     // Determine the number of VkImage's to use in the swap chain.
868     // We need to acquire only 1 presentable image at at time.
869     // Asking for minImageCount images ensures that we can acquire
870     // 1 presentable image as long as we present it before attempting
871     // to acquire another.
872     uint32_t desiredNumberOfSwapChainImages = surfCapabilities.minImageCount;
873 
874     VkSurfaceTransformFlagBitsKHR preTransform;
875     if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR)
876     {
877         preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
878     }
879     else
880     {
881         preTransform = surfCapabilities.currentTransform;
882     }
883 
884     // Find a supported composite alpha mode - one of these is guaranteed to be set
885     VkCompositeAlphaFlagBitsKHR compositeAlpha         = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
886     VkCompositeAlphaFlagBitsKHR compositeAlphaFlags[4] = {
887         VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
888         VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
889         VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
890         VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,
891     };
892     for (uint32_t i = 0; i < sizeof(compositeAlphaFlags); i++)
893     {
894         if (surfCapabilities.supportedCompositeAlpha & compositeAlphaFlags[i])
895         {
896             compositeAlpha = compositeAlphaFlags[i];
897             break;
898         }
899     }
900 
901     VkSwapchainCreateInfoKHR swapchain_ci = {};
902     swapchain_ci.sType                    = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
903     swapchain_ci.pNext                    = NULL;
904     swapchain_ci.surface                  = info.surface;
905     swapchain_ci.minImageCount            = desiredNumberOfSwapChainImages;
906     swapchain_ci.imageFormat              = info.format;
907     swapchain_ci.imageExtent.width        = swapchainExtent.width;
908     swapchain_ci.imageExtent.height       = swapchainExtent.height;
909     swapchain_ci.preTransform             = preTransform;
910     swapchain_ci.compositeAlpha           = compositeAlpha;
911     swapchain_ci.imageArrayLayers         = 1;
912     swapchain_ci.presentMode              = swapchainPresentMode;
913     swapchain_ci.oldSwapchain             = VK_NULL_HANDLE;
914 #ifndef __ANDROID__
915     swapchain_ci.clipped = true;
916 #else
917     swapchain_ci.clipped = false;
918 #endif
919     swapchain_ci.imageColorSpace       = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
920     swapchain_ci.imageUsage            = usageFlags;
921     swapchain_ci.imageSharingMode      = VK_SHARING_MODE_EXCLUSIVE;
922     swapchain_ci.queueFamilyIndexCount = 0;
923     swapchain_ci.pQueueFamilyIndices   = NULL;
924     uint32_t queueFamilyIndices[2]     = {(uint32_t)info.graphics_queue_family_index,
925                                           (uint32_t)info.present_queue_family_index};
926     if (info.graphics_queue_family_index != info.present_queue_family_index)
927     {
928         // If the graphics and present queues are from different queue families,
929         // we either have to explicitly transfer ownership of images between the
930         // queues, or we have to create the swapchain with imageSharingMode
931         // as VK_SHARING_MODE_CONCURRENT
932         swapchain_ci.imageSharingMode      = VK_SHARING_MODE_CONCURRENT;
933         swapchain_ci.queueFamilyIndexCount = 2;
934         swapchain_ci.pQueueFamilyIndices   = queueFamilyIndices;
935     }
936 
937     res = vkCreateSwapchainKHR(info.device, &swapchain_ci, NULL, &info.swap_chain);
938     ASSERT(res == VK_SUCCESS);
939 
940     res = vkGetSwapchainImagesKHR(info.device, info.swap_chain, &info.swapchainImageCount, NULL);
941     ASSERT(res == VK_SUCCESS);
942 
943     VkImage *swapchainImages = (VkImage *)malloc(info.swapchainImageCount * sizeof(VkImage));
944     ASSERT(swapchainImages);
945     res = vkGetSwapchainImagesKHR(info.device, info.swap_chain, &info.swapchainImageCount,
946                                   swapchainImages);
947     ASSERT(res == VK_SUCCESS);
948 
949     for (uint32_t i = 0; i < info.swapchainImageCount; i++)
950     {
951         swap_chain_buffer sc_buffer;
952 
953         VkImageViewCreateInfo color_image_view           = {};
954         color_image_view.sType                           = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
955         color_image_view.pNext                           = NULL;
956         color_image_view.format                          = info.format;
957         color_image_view.components.r                    = VK_COMPONENT_SWIZZLE_R;
958         color_image_view.components.g                    = VK_COMPONENT_SWIZZLE_G;
959         color_image_view.components.b                    = VK_COMPONENT_SWIZZLE_B;
960         color_image_view.components.a                    = VK_COMPONENT_SWIZZLE_A;
961         color_image_view.subresourceRange.aspectMask     = VK_IMAGE_ASPECT_COLOR_BIT;
962         color_image_view.subresourceRange.baseMipLevel   = 0;
963         color_image_view.subresourceRange.levelCount     = 1;
964         color_image_view.subresourceRange.baseArrayLayer = 0;
965         color_image_view.subresourceRange.layerCount     = 1;
966         color_image_view.viewType                        = VK_IMAGE_VIEW_TYPE_2D;
967         color_image_view.flags                           = 0;
968 
969         sc_buffer.image = swapchainImages[i];
970 
971         color_image_view.image = sc_buffer.image;
972 
973         res = vkCreateImageView(info.device, &color_image_view, NULL, &sc_buffer.view);
974         info.buffers.push_back(sc_buffer);
975         ASSERT(res == VK_SUCCESS);
976     }
977     free(swapchainImages);
978     info.current_buffer = 0;
979 
980     if (NULL != presentModes)
981     {
982         free(presentModes);
983     }
984 }
985 
memory_type_from_properties(struct sample_info & info,uint32_t typeBits,VkFlags requirements_mask,uint32_t * typeIndex)986 bool memory_type_from_properties(struct sample_info &info,
987                                  uint32_t typeBits,
988                                  VkFlags requirements_mask,
989                                  uint32_t *typeIndex)
990 {
991     // Search memtypes to find first index with those properties
992     for (uint32_t i = 0; i < info.memory_properties.memoryTypeCount; i++)
993     {
994         if ((typeBits & 1) == 1)
995         {
996             // Type is available, does it match user properties?
997             if ((info.memory_properties.memoryTypes[i].propertyFlags & requirements_mask) ==
998                 requirements_mask)
999             {
1000                 *typeIndex = i;
1001                 return true;
1002             }
1003         }
1004         typeBits >>= 1;
1005     }
1006     // No memory types matched, return failure
1007     return false;
1008 }
1009 
init_depth_buffer(struct sample_info & info)1010 void init_depth_buffer(struct sample_info &info)
1011 {
1012     VkResult res;
1013     bool pass;
1014     VkImageCreateInfo image_info = {};
1015 
1016 /* allow custom depth formats */
1017 #ifdef __ANDROID__
1018     // Depth format needs to be VK_FORMAT_D24_UNORM_S8_UINT on Android.
1019     info.depth.format = VK_FORMAT_D24_UNORM_S8_UINT;
1020 #else
1021     if (info.depth.format == VK_FORMAT_UNDEFINED)
1022         info.depth.format = VK_FORMAT_D16_UNORM;
1023 #endif
1024 
1025     const VkFormat depth_format = info.depth.format;
1026     VkFormatProperties props;
1027     vkGetPhysicalDeviceFormatProperties(info.gpus[0], depth_format, &props);
1028     if (props.linearTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
1029     {
1030         image_info.tiling = VK_IMAGE_TILING_LINEAR;
1031     }
1032     else if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
1033     {
1034         image_info.tiling = VK_IMAGE_TILING_OPTIMAL;
1035     }
1036     else
1037     {
1038         /* Try other depth formats? */
1039         std::cout << "depth_format " << depth_format << " Unsupported.\n";
1040         exit(-1);
1041     }
1042 
1043     image_info.sType                 = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
1044     image_info.pNext                 = NULL;
1045     image_info.imageType             = VK_IMAGE_TYPE_2D;
1046     image_info.format                = depth_format;
1047     image_info.extent.width          = info.width;
1048     image_info.extent.height         = info.height;
1049     image_info.extent.depth          = 1;
1050     image_info.mipLevels             = 1;
1051     image_info.arrayLayers           = 1;
1052     image_info.samples               = NUM_SAMPLES;
1053     image_info.initialLayout         = VK_IMAGE_LAYOUT_UNDEFINED;
1054     image_info.queueFamilyIndexCount = 0;
1055     image_info.pQueueFamilyIndices   = NULL;
1056     image_info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
1057     image_info.usage                 = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
1058     image_info.flags                 = 0;
1059 
1060     VkMemoryAllocateInfo mem_alloc = {};
1061     mem_alloc.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1062     mem_alloc.pNext                = NULL;
1063     mem_alloc.allocationSize       = 0;
1064     mem_alloc.memoryTypeIndex      = 0;
1065 
1066     VkImageViewCreateInfo view_info           = {};
1067     view_info.sType                           = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
1068     view_info.pNext                           = NULL;
1069     view_info.image                           = VK_NULL_HANDLE;
1070     view_info.format                          = depth_format;
1071     view_info.components.r                    = VK_COMPONENT_SWIZZLE_R;
1072     view_info.components.g                    = VK_COMPONENT_SWIZZLE_G;
1073     view_info.components.b                    = VK_COMPONENT_SWIZZLE_B;
1074     view_info.components.a                    = VK_COMPONENT_SWIZZLE_A;
1075     view_info.subresourceRange.aspectMask     = VK_IMAGE_ASPECT_DEPTH_BIT;
1076     view_info.subresourceRange.baseMipLevel   = 0;
1077     view_info.subresourceRange.levelCount     = 1;
1078     view_info.subresourceRange.baseArrayLayer = 0;
1079     view_info.subresourceRange.layerCount     = 1;
1080     view_info.viewType                        = VK_IMAGE_VIEW_TYPE_2D;
1081     view_info.flags                           = 0;
1082 
1083     if (depth_format == VK_FORMAT_D16_UNORM_S8_UINT ||
1084         depth_format == VK_FORMAT_D24_UNORM_S8_UINT || depth_format == VK_FORMAT_D32_SFLOAT_S8_UINT)
1085     {
1086         view_info.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
1087     }
1088 
1089     VkMemoryRequirements mem_reqs;
1090 
1091     /* Create image */
1092     res = vkCreateImage(info.device, &image_info, NULL, &info.depth.image);
1093     ASSERT(res == VK_SUCCESS);
1094 
1095     vkGetImageMemoryRequirements(info.device, info.depth.image, &mem_reqs);
1096 
1097     mem_alloc.allocationSize = mem_reqs.size;
1098     /* Use the memory properties to determine the type of memory required */
1099     pass = memory_type_from_properties(info, mem_reqs.memoryTypeBits,
1100                                        VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
1101                                        &mem_alloc.memoryTypeIndex);
1102     ASSERT(pass);
1103 
1104     /* Allocate memory */
1105     res = vkAllocateMemory(info.device, &mem_alloc, NULL, &info.depth.mem);
1106     ASSERT(res == VK_SUCCESS);
1107 
1108     /* Bind memory */
1109     res = vkBindImageMemory(info.device, info.depth.image, info.depth.mem, 0);
1110     ASSERT(res == VK_SUCCESS);
1111 
1112     /* Create image view */
1113     view_info.image = info.depth.image;
1114     res             = vkCreateImageView(info.device, &view_info, NULL, &info.depth.view);
1115     ASSERT(res == VK_SUCCESS);
1116 }
1117 
init_uniform_buffer(struct sample_info & info)1118 void init_uniform_buffer(struct sample_info &info)
1119 {
1120     VkResult res;
1121     bool pass;
1122 
1123     info.MVP = {1.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f,
1124                 0.0f, 0.0f, 0.5f, 0.0f, 0.0f, 0.0f,  0.5f, 1.0f};
1125 
1126     /* VULKAN_KEY_START */
1127     VkBufferCreateInfo buf_info    = {};
1128     buf_info.sType                 = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1129     buf_info.pNext                 = NULL;
1130     buf_info.usage                 = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
1131     buf_info.size                  = sizeof(float) * 16;  // info.MVP.data() size
1132     buf_info.queueFamilyIndexCount = 0;
1133     buf_info.pQueueFamilyIndices   = NULL;
1134     buf_info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
1135     buf_info.flags                 = 0;
1136     res = vkCreateBuffer(info.device, &buf_info, NULL, &info.uniform_data.buf);
1137     ASSERT(res == VK_SUCCESS);
1138 
1139     VkMemoryRequirements mem_reqs;
1140     vkGetBufferMemoryRequirements(info.device, info.uniform_data.buf, &mem_reqs);
1141 
1142     VkMemoryAllocateInfo alloc_info = {};
1143     alloc_info.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1144     alloc_info.pNext                = NULL;
1145     alloc_info.memoryTypeIndex      = 0;
1146 
1147     alloc_info.allocationSize = mem_reqs.size;
1148     pass                      = memory_type_from_properties(
1149         info, mem_reqs.memoryTypeBits,
1150         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
1151         &alloc_info.memoryTypeIndex);
1152     ASSERT(pass && "No mappable, coherent memory");
1153 
1154     res = vkAllocateMemory(info.device, &alloc_info, NULL, &(info.uniform_data.mem));
1155     ASSERT(res == VK_SUCCESS);
1156 
1157     uint8_t *pData;
1158     res = vkMapMemory(info.device, info.uniform_data.mem, 0, mem_reqs.size, 0, (void **)&pData);
1159     ASSERT(res == VK_SUCCESS);
1160 
1161     memcpy(pData, info.MVP.data(), sizeof(float) * 16);  // info.MVP.data() size
1162 
1163     vkUnmapMemory(info.device, info.uniform_data.mem);
1164 
1165     res = vkBindBufferMemory(info.device, info.uniform_data.buf, info.uniform_data.mem, 0);
1166     ASSERT(res == VK_SUCCESS);
1167 
1168     info.uniform_data.buffer_info.buffer = info.uniform_data.buf;
1169     info.uniform_data.buffer_info.offset = 0;
1170     info.uniform_data.buffer_info.range  = sizeof(float) * 16;  // info.MVP.data() size
1171 }
1172 
init_descriptor_and_pipeline_layouts(struct sample_info & info,bool use_texture,VkDescriptorSetLayoutCreateFlags descSetLayoutCreateFlags)1173 void init_descriptor_and_pipeline_layouts(struct sample_info &info,
1174                                           bool use_texture,
1175                                           VkDescriptorSetLayoutCreateFlags descSetLayoutCreateFlags)
1176 {
1177     VkDescriptorSetLayoutBinding layout_bindings[2];
1178     layout_bindings[0].binding            = 0;
1179     layout_bindings[0].descriptorType     = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1180     layout_bindings[0].descriptorCount    = 1;
1181     layout_bindings[0].stageFlags         = VK_SHADER_STAGE_VERTEX_BIT;
1182     layout_bindings[0].pImmutableSamplers = NULL;
1183 
1184     if (use_texture)
1185     {
1186         layout_bindings[1].binding            = 1;
1187         layout_bindings[1].descriptorType     = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1188         layout_bindings[1].descriptorCount    = 1;
1189         layout_bindings[1].stageFlags         = VK_SHADER_STAGE_FRAGMENT_BIT;
1190         layout_bindings[1].pImmutableSamplers = NULL;
1191     }
1192 
1193     /* Next take layout bindings and use them to create a descriptor set layout
1194      */
1195     VkDescriptorSetLayoutCreateInfo descriptor_layout = {};
1196     descriptor_layout.sType        = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
1197     descriptor_layout.pNext        = NULL;
1198     descriptor_layout.flags        = descSetLayoutCreateFlags;
1199     descriptor_layout.bindingCount = use_texture ? 2 : 1;
1200     descriptor_layout.pBindings    = layout_bindings;
1201 
1202     VkResult res;
1203 
1204     info.desc_layout.resize(NUM_DESCRIPTOR_SETS);
1205     res =
1206         vkCreateDescriptorSetLayout(info.device, &descriptor_layout, NULL, info.desc_layout.data());
1207     ASSERT(res == VK_SUCCESS);
1208 
1209     /* Now use the descriptor layout to create a pipeline layout */
1210     VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = {};
1211     pPipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
1212     pPipelineLayoutCreateInfo.pNext = NULL;
1213     pPipelineLayoutCreateInfo.pushConstantRangeCount = 0;
1214     pPipelineLayoutCreateInfo.pPushConstantRanges    = NULL;
1215     pPipelineLayoutCreateInfo.setLayoutCount         = NUM_DESCRIPTOR_SETS;
1216     pPipelineLayoutCreateInfo.pSetLayouts            = info.desc_layout.data();
1217 
1218     res = vkCreatePipelineLayout(info.device, &pPipelineLayoutCreateInfo, NULL,
1219                                  &info.pipeline_layout);
1220     ASSERT(res == VK_SUCCESS);
1221 }
1222 
init_renderpass(struct sample_info & info,bool include_depth,bool clear,VkImageLayout finalLayout)1223 void init_renderpass(struct sample_info &info,
1224                      bool include_depth,
1225                      bool clear,
1226                      VkImageLayout finalLayout)
1227 {
1228     /* DEPENDS on init_swap_chain() and init_depth_buffer() */
1229 
1230     VkResult res;
1231     /* Need attachments for render target and depth buffer */
1232     VkAttachmentDescription attachments[2];
1233     attachments[0].format        = info.format;
1234     attachments[0].samples       = NUM_SAMPLES;
1235     attachments[0].loadOp        = clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
1236     attachments[0].storeOp       = VK_ATTACHMENT_STORE_OP_STORE;
1237     attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
1238     attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
1239     attachments[0].initialLayout  = VK_IMAGE_LAYOUT_UNDEFINED;
1240     attachments[0].finalLayout    = finalLayout;
1241     attachments[0].flags          = 0;
1242 
1243     if (include_depth)
1244     {
1245         attachments[1].format  = info.depth.format;
1246         attachments[1].samples = NUM_SAMPLES;
1247         attachments[1].loadOp  = clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD;
1248         attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
1249         attachments[1].stencilLoadOp  = VK_ATTACHMENT_LOAD_OP_LOAD;
1250         attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_STORE;
1251         attachments[1].initialLayout  = VK_IMAGE_LAYOUT_UNDEFINED;
1252         attachments[1].finalLayout    = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1253         attachments[1].flags          = 0;
1254     }
1255 
1256     VkAttachmentReference color_reference = {};
1257     color_reference.attachment            = 0;
1258     color_reference.layout                = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
1259 
1260     VkAttachmentReference depth_reference = {};
1261     depth_reference.attachment            = 1;
1262     depth_reference.layout                = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
1263 
1264     VkSubpassDescription subpass    = {};
1265     subpass.pipelineBindPoint       = VK_PIPELINE_BIND_POINT_GRAPHICS;
1266     subpass.flags                   = 0;
1267     subpass.inputAttachmentCount    = 0;
1268     subpass.pInputAttachments       = NULL;
1269     subpass.colorAttachmentCount    = 1;
1270     subpass.pColorAttachments       = &color_reference;
1271     subpass.pResolveAttachments     = NULL;
1272     subpass.pDepthStencilAttachment = include_depth ? &depth_reference : NULL;
1273     subpass.preserveAttachmentCount = 0;
1274     subpass.pPreserveAttachments    = NULL;
1275 
1276     VkRenderPassCreateInfo rp_info = {};
1277     rp_info.sType                  = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
1278     rp_info.pNext                  = NULL;
1279     rp_info.attachmentCount        = include_depth ? 2 : 1;
1280     rp_info.pAttachments           = attachments;
1281     rp_info.subpassCount           = 1;
1282     rp_info.pSubpasses             = &subpass;
1283     rp_info.dependencyCount        = 0;
1284     rp_info.pDependencies          = NULL;
1285 
1286     res = vkCreateRenderPass(info.device, &rp_info, NULL, &info.render_pass);
1287     ASSERT(res == VK_SUCCESS);
1288 }
1289 
init_framebuffers(struct sample_info & info,bool include_depth)1290 void init_framebuffers(struct sample_info &info, bool include_depth)
1291 {
1292     /* DEPENDS on init_depth_buffer(), init_renderpass() and
1293      * init_swapchain_extension() */
1294 
1295     VkResult res;
1296     VkImageView attachments[2];
1297     attachments[1] = info.depth.view;
1298 
1299     VkFramebufferCreateInfo fb_info = {};
1300     fb_info.sType                   = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
1301     fb_info.pNext                   = NULL;
1302     fb_info.renderPass              = info.render_pass;
1303     fb_info.attachmentCount         = include_depth ? 2 : 1;
1304     fb_info.pAttachments            = attachments;
1305     fb_info.width                   = info.width;
1306     fb_info.height                  = info.height;
1307     fb_info.layers                  = 1;
1308 
1309     uint32_t i;
1310 
1311     info.framebuffers = (VkFramebuffer *)malloc(info.swapchainImageCount * sizeof(VkFramebuffer));
1312 
1313     for (i = 0; i < info.swapchainImageCount; i++)
1314     {
1315         attachments[0] = info.buffers[i].view;
1316         res            = vkCreateFramebuffer(info.device, &fb_info, NULL, &info.framebuffers[i]);
1317         ASSERT(res == VK_SUCCESS);
1318     }
1319 }
1320 
init_vertex_buffer(struct sample_info & info,const void * vertexData,uint32_t dataSize,uint32_t dataStride,bool use_texture)1321 void init_vertex_buffer(struct sample_info &info,
1322                         const void *vertexData,
1323                         uint32_t dataSize,
1324                         uint32_t dataStride,
1325                         bool use_texture)
1326 {
1327     VkResult res;
1328     bool pass;
1329 
1330     VkBufferCreateInfo buf_info    = {};
1331     buf_info.sType                 = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
1332     buf_info.pNext                 = NULL;
1333     buf_info.usage                 = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1334     buf_info.size                  = dataSize;
1335     buf_info.queueFamilyIndexCount = 0;
1336     buf_info.pQueueFamilyIndices   = NULL;
1337     buf_info.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
1338     buf_info.flags                 = 0;
1339     res = vkCreateBuffer(info.device, &buf_info, NULL, &info.vertex_buffer.buf);
1340     ASSERT(res == VK_SUCCESS);
1341 
1342     VkMemoryRequirements mem_reqs;
1343     vkGetBufferMemoryRequirements(info.device, info.vertex_buffer.buf, &mem_reqs);
1344 
1345     VkMemoryAllocateInfo alloc_info = {};
1346     alloc_info.sType                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
1347     alloc_info.pNext                = NULL;
1348     alloc_info.memoryTypeIndex      = 0;
1349 
1350     alloc_info.allocationSize = mem_reqs.size;
1351     pass                      = memory_type_from_properties(
1352         info, mem_reqs.memoryTypeBits,
1353         VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
1354         &alloc_info.memoryTypeIndex);
1355     ASSERT(pass && "No mappable, coherent memory");
1356 
1357     res = vkAllocateMemory(info.device, &alloc_info, NULL, &(info.vertex_buffer.mem));
1358     ASSERT(res == VK_SUCCESS);
1359     info.vertex_buffer.buffer_info.range  = mem_reqs.size;
1360     info.vertex_buffer.buffer_info.offset = 0;
1361 
1362     uint8_t *pData;
1363     res = vkMapMemory(info.device, info.vertex_buffer.mem, 0, mem_reqs.size, 0, (void **)&pData);
1364     ASSERT(res == VK_SUCCESS);
1365 
1366     memcpy(pData, vertexData, dataSize);
1367 
1368     vkUnmapMemory(info.device, info.vertex_buffer.mem);
1369 
1370     res = vkBindBufferMemory(info.device, info.vertex_buffer.buf, info.vertex_buffer.mem, 0);
1371     ASSERT(res == VK_SUCCESS);
1372 
1373     info.vi_binding.binding   = 0;
1374     info.vi_binding.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
1375     info.vi_binding.stride    = dataStride;
1376 
1377     info.vi_attribs[0].binding  = 0;
1378     info.vi_attribs[0].location = 0;
1379     info.vi_attribs[0].format   = VK_FORMAT_R32G32B32A32_SFLOAT;
1380     info.vi_attribs[0].offset   = 0;
1381     info.vi_attribs[1].binding  = 0;
1382     info.vi_attribs[1].location = 1;
1383     info.vi_attribs[1].format =
1384         use_texture ? VK_FORMAT_R32G32_SFLOAT : VK_FORMAT_R32G32B32A32_SFLOAT;
1385     info.vi_attribs[1].offset = 16;
1386 }
1387 
init_descriptor_pool(struct sample_info & info,bool use_texture)1388 void init_descriptor_pool(struct sample_info &info, bool use_texture)
1389 {
1390     /* DEPENDS on init_uniform_buffer() and
1391      * init_descriptor_and_pipeline_layouts() */
1392 
1393     VkResult res;
1394     VkDescriptorPoolSize type_count[2];
1395     type_count[0].type            = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1396     type_count[0].descriptorCount = 1;
1397     if (use_texture)
1398     {
1399         type_count[1].type            = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1400         type_count[1].descriptorCount = 1;
1401     }
1402 
1403     VkDescriptorPoolCreateInfo descriptor_pool = {};
1404     descriptor_pool.sType                      = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
1405     descriptor_pool.pNext                      = NULL;
1406     descriptor_pool.maxSets                    = 1;
1407     descriptor_pool.poolSizeCount              = use_texture ? 2 : 1;
1408     descriptor_pool.pPoolSizes                 = type_count;
1409 
1410     res = vkCreateDescriptorPool(info.device, &descriptor_pool, NULL, &info.desc_pool);
1411     ASSERT(res == VK_SUCCESS);
1412 }
1413 
init_descriptor_set(struct sample_info & info)1414 void init_descriptor_set(struct sample_info &info)
1415 {
1416     /* DEPENDS on init_descriptor_pool() */
1417 
1418     VkResult res;
1419 
1420     VkDescriptorSetAllocateInfo alloc_info[1];
1421     alloc_info[0].sType              = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
1422     alloc_info[0].pNext              = NULL;
1423     alloc_info[0].descriptorPool     = info.desc_pool;
1424     alloc_info[0].descriptorSetCount = NUM_DESCRIPTOR_SETS;
1425     alloc_info[0].pSetLayouts        = info.desc_layout.data();
1426 
1427     info.desc_set.resize(NUM_DESCRIPTOR_SETS);
1428     res = vkAllocateDescriptorSets(info.device, alloc_info, info.desc_set.data());
1429     ASSERT(res == VK_SUCCESS);
1430 
1431     VkWriteDescriptorSet writes[2];
1432 
1433     writes[0]                 = {};
1434     writes[0].sType           = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
1435     writes[0].pNext           = NULL;
1436     writes[0].dstSet          = info.desc_set[0];
1437     writes[0].descriptorCount = 1;
1438     writes[0].descriptorType  = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1439     writes[0].pBufferInfo     = &info.uniform_data.buffer_info;
1440     writes[0].dstArrayElement = 0;
1441     writes[0].dstBinding      = 0;
1442 
1443     vkUpdateDescriptorSets(info.device, 1, writes, 0, NULL);
1444 }
1445 
GLSLtoSPV(const VkShaderStageFlagBits shader_type,const char * pshader,std::vector<unsigned int> & spirv)1446 bool GLSLtoSPV(const VkShaderStageFlagBits shader_type,
1447                const char *pshader,
1448                std::vector<unsigned int> &spirv)
1449 {
1450     EShLanguage stage = FindLanguage(shader_type);
1451     glslang::TShader shader(stage);
1452     glslang::TProgram program;
1453     const char *shaderStrings[1];
1454     TBuiltInResource Resources;
1455     init_resources(Resources);
1456 
1457     // Enable SPIR-V and Vulkan rules when parsing GLSL
1458     EShMessages messages = (EShMessages)(EShMsgSpvRules | EShMsgVulkanRules);
1459 
1460     shaderStrings[0] = pshader;
1461     shader.setStrings(shaderStrings, 1);
1462 
1463     if (!shader.parse(&Resources, 100, false, messages))
1464     {
1465         puts(shader.getInfoLog());
1466         puts(shader.getInfoDebugLog());
1467         return false;  // something didn't work
1468     }
1469 
1470     program.addShader(&shader);
1471 
1472     //
1473     // Program-level processing...
1474     //
1475 
1476     if (!program.link(messages))
1477     {
1478         puts(shader.getInfoLog());
1479         puts(shader.getInfoDebugLog());
1480         fflush(stdout);
1481         return false;
1482     }
1483 
1484     glslang::GlslangToSpv(*program.getIntermediate(stage), spirv);
1485     return true;
1486 }
1487 
init_shaders(struct sample_info & info,const char * vertShaderText,const char * fragShaderText)1488 void init_shaders(struct sample_info &info, const char *vertShaderText, const char *fragShaderText)
1489 {
1490     VkResult res;
1491     bool retVal;
1492 
1493     // If no shaders were submitted, just return
1494     if (!(vertShaderText || fragShaderText))
1495         return;
1496 
1497     glslang::InitializeProcess();
1498     VkShaderModuleCreateInfo moduleCreateInfo;
1499 
1500     if (vertShaderText)
1501     {
1502         std::vector<unsigned int> vtx_spv;
1503         info.shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1504         info.shaderStages[0].pNext = NULL;
1505         info.shaderStages[0].pSpecializationInfo = NULL;
1506         info.shaderStages[0].flags               = 0;
1507         info.shaderStages[0].stage               = VK_SHADER_STAGE_VERTEX_BIT;
1508         info.shaderStages[0].pName               = "main";
1509 
1510         retVal = GLSLtoSPV(VK_SHADER_STAGE_VERTEX_BIT, vertShaderText, vtx_spv);
1511         ASSERT(retVal);
1512 
1513         moduleCreateInfo.sType    = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1514         moduleCreateInfo.pNext    = NULL;
1515         moduleCreateInfo.flags    = 0;
1516         moduleCreateInfo.codeSize = vtx_spv.size() * sizeof(unsigned int);
1517         moduleCreateInfo.pCode    = vtx_spv.data();
1518         res                       = vkCreateShaderModule(info.device, &moduleCreateInfo, NULL,
1519                                                          &info.shaderStages[0].module);
1520         ASSERT(res == VK_SUCCESS);
1521     }
1522 
1523     if (fragShaderText)
1524     {
1525         std::vector<unsigned int> frag_spv;
1526         info.shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
1527         info.shaderStages[1].pNext = NULL;
1528         info.shaderStages[1].pSpecializationInfo = NULL;
1529         info.shaderStages[1].flags               = 0;
1530         info.shaderStages[1].stage               = VK_SHADER_STAGE_FRAGMENT_BIT;
1531         info.shaderStages[1].pName               = "main";
1532 
1533         retVal = GLSLtoSPV(VK_SHADER_STAGE_FRAGMENT_BIT, fragShaderText, frag_spv);
1534         ASSERT(retVal);
1535 
1536         moduleCreateInfo.sType    = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
1537         moduleCreateInfo.pNext    = NULL;
1538         moduleCreateInfo.flags    = 0;
1539         moduleCreateInfo.codeSize = frag_spv.size() * sizeof(unsigned int);
1540         moduleCreateInfo.pCode    = frag_spv.data();
1541         res                       = vkCreateShaderModule(info.device, &moduleCreateInfo, NULL,
1542                                                          &info.shaderStages[1].module);
1543         ASSERT(res == VK_SUCCESS);
1544     }
1545 
1546     glslang::FinalizeProcess();
1547 }
1548 
init_pipeline_cache(struct sample_info & info)1549 void init_pipeline_cache(struct sample_info &info)
1550 {
1551     VkResult res;
1552 
1553     VkPipelineCacheCreateInfo pipelineCache;
1554     pipelineCache.sType           = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1555     pipelineCache.pNext           = NULL;
1556     pipelineCache.initialDataSize = 0;
1557     pipelineCache.pInitialData    = NULL;
1558     pipelineCache.flags           = 0;
1559     res = vkCreatePipelineCache(info.device, &pipelineCache, NULL, &info.pipelineCache);
1560     ASSERT(res == VK_SUCCESS);
1561 }
1562 
init_pipeline(struct sample_info & info,VkBool32 include_depth,VkBool32 include_vi)1563 void init_pipeline(struct sample_info &info, VkBool32 include_depth, VkBool32 include_vi)
1564 {
1565     VkResult res;
1566 
1567     std::vector<VkDynamicState> dynamicStateEnables;
1568     VkPipelineDynamicStateCreateInfo dynamicState = {};
1569     dynamicState.sType             = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
1570     dynamicState.pNext             = NULL;
1571     dynamicState.pDynamicStates    = NULL;
1572     dynamicState.dynamicStateCount = 0;
1573 
1574     VkPipelineVertexInputStateCreateInfo vi;
1575     memset(&vi, 0, sizeof(vi));
1576     vi.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
1577     if (include_vi)
1578     {
1579         vi.pNext                           = NULL;
1580         vi.flags                           = 0;
1581         vi.vertexBindingDescriptionCount   = 1;
1582         vi.pVertexBindingDescriptions      = &info.vi_binding;
1583         vi.vertexAttributeDescriptionCount = 2;
1584         vi.pVertexAttributeDescriptions    = info.vi_attribs;
1585     }
1586     VkPipelineInputAssemblyStateCreateInfo ia;
1587     ia.sType                  = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
1588     ia.pNext                  = NULL;
1589     ia.flags                  = 0;
1590     ia.primitiveRestartEnable = VK_FALSE;
1591     ia.topology               = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
1592 
1593     VkPipelineRasterizationStateCreateInfo rs;
1594     rs.sType                   = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
1595     rs.pNext                   = NULL;
1596     rs.flags                   = 0;
1597     rs.polygonMode             = VK_POLYGON_MODE_FILL;
1598     rs.cullMode                = VK_CULL_MODE_BACK_BIT;
1599     rs.frontFace               = VK_FRONT_FACE_CLOCKWISE;
1600     rs.depthClampEnable        = VK_FALSE;
1601     rs.rasterizerDiscardEnable = VK_FALSE;
1602     rs.depthBiasEnable         = VK_FALSE;
1603     rs.depthBiasConstantFactor = 0;
1604     rs.depthBiasClamp          = 0;
1605     rs.depthBiasSlopeFactor    = 0;
1606     rs.lineWidth               = 1.0f;
1607 
1608     VkPipelineColorBlendStateCreateInfo cb;
1609     cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
1610     cb.flags = 0;
1611     cb.pNext = NULL;
1612     VkPipelineColorBlendAttachmentState att_state[1];
1613     att_state[0].colorWriteMask      = 0xf;
1614     att_state[0].blendEnable         = VK_FALSE;
1615     att_state[0].alphaBlendOp        = VK_BLEND_OP_ADD;
1616     att_state[0].colorBlendOp        = VK_BLEND_OP_ADD;
1617     att_state[0].srcColorBlendFactor = VK_BLEND_FACTOR_ZERO;
1618     att_state[0].dstColorBlendFactor = VK_BLEND_FACTOR_ZERO;
1619     att_state[0].srcAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
1620     att_state[0].dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
1621     cb.attachmentCount               = 1;
1622     cb.pAttachments                  = att_state;
1623     cb.logicOpEnable                 = VK_FALSE;
1624     cb.logicOp                       = VK_LOGIC_OP_NO_OP;
1625     cb.blendConstants[0]             = 1.0f;
1626     cb.blendConstants[1]             = 1.0f;
1627     cb.blendConstants[2]             = 1.0f;
1628     cb.blendConstants[3]             = 1.0f;
1629 
1630     VkPipelineViewportStateCreateInfo vp = {};
1631     vp.sType                             = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
1632     vp.pNext                             = NULL;
1633     vp.flags                             = 0;
1634 #ifndef __ANDROID__
1635     vp.viewportCount = NUM_VIEWPORTS;
1636     dynamicState.dynamicStateCount++;
1637     dynamicStateEnables.push_back(VK_DYNAMIC_STATE_VIEWPORT);
1638     vp.scissorCount = NUM_SCISSORS;
1639     dynamicState.dynamicStateCount++;
1640     dynamicStateEnables.push_back(VK_DYNAMIC_STATE_SCISSOR);
1641     vp.pScissors  = NULL;
1642     vp.pViewports = NULL;
1643 #else
1644     // Temporary disabling dynamic viewport on Android because some of drivers doesn't
1645     // support the feature.
1646     VkViewport viewports;
1647     viewports.minDepth = 0.0f;
1648     viewports.maxDepth = 1.0f;
1649     viewports.x        = 0;
1650     viewports.y        = 0;
1651     viewports.width    = info.width;
1652     viewports.height   = info.height;
1653     VkRect2D scissor;
1654     scissor.extent.width  = info.width;
1655     scissor.extent.height = info.height;
1656     scissor.offset.x      = 0;
1657     scissor.offset.y      = 0;
1658     vp.viewportCount      = NUM_VIEWPORTS;
1659     vp.scissorCount       = NUM_SCISSORS;
1660     vp.pScissors          = &scissor;
1661     vp.pViewports         = &viewports;
1662 #endif
1663     VkPipelineDepthStencilStateCreateInfo ds;
1664     ds.sType                 = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
1665     ds.pNext                 = NULL;
1666     ds.flags                 = 0;
1667     ds.depthTestEnable       = include_depth;
1668     ds.depthWriteEnable      = include_depth;
1669     ds.depthCompareOp        = VK_COMPARE_OP_LESS_OR_EQUAL;
1670     ds.depthBoundsTestEnable = VK_FALSE;
1671     ds.stencilTestEnable     = VK_FALSE;
1672     ds.back.failOp           = VK_STENCIL_OP_KEEP;
1673     ds.back.passOp           = VK_STENCIL_OP_KEEP;
1674     ds.back.compareOp        = VK_COMPARE_OP_ALWAYS;
1675     ds.back.compareMask      = 0;
1676     ds.back.reference        = 0;
1677     ds.back.depthFailOp      = VK_STENCIL_OP_KEEP;
1678     ds.back.writeMask        = 0;
1679     ds.minDepthBounds        = 0;
1680     ds.maxDepthBounds        = 0;
1681     ds.stencilTestEnable     = VK_FALSE;
1682     ds.front                 = ds.back;
1683 
1684     VkPipelineMultisampleStateCreateInfo ms;
1685     ms.sType                 = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
1686     ms.pNext                 = NULL;
1687     ms.flags                 = 0;
1688     ms.pSampleMask           = NULL;
1689     ms.rasterizationSamples  = NUM_SAMPLES;
1690     ms.sampleShadingEnable   = VK_FALSE;
1691     ms.alphaToCoverageEnable = VK_FALSE;
1692     ms.alphaToOneEnable      = VK_FALSE;
1693     ms.minSampleShading      = 0.0;
1694 
1695     VkGraphicsPipelineCreateInfo pipeline;
1696     pipeline.sType               = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
1697     pipeline.pNext               = NULL;
1698     pipeline.layout              = info.pipeline_layout;
1699     pipeline.basePipelineHandle  = VK_NULL_HANDLE;
1700     pipeline.basePipelineIndex   = 0;
1701     pipeline.flags               = 0;
1702     pipeline.pVertexInputState   = &vi;
1703     pipeline.pInputAssemblyState = &ia;
1704     pipeline.pRasterizationState = &rs;
1705     pipeline.pColorBlendState    = &cb;
1706     pipeline.pTessellationState  = NULL;
1707     pipeline.pMultisampleState   = &ms;
1708     pipeline.pDynamicState       = &dynamicState;
1709     pipeline.pViewportState      = &vp;
1710     pipeline.pDepthStencilState  = &ds;
1711     pipeline.pStages             = info.shaderStages;
1712     pipeline.stageCount          = 2;
1713     pipeline.renderPass          = info.render_pass;
1714     pipeline.subpass             = 0;
1715 
1716     if (dynamicStateEnables.size() > 0)
1717     {
1718         dynamicState.pDynamicStates    = dynamicStateEnables.data();
1719         dynamicState.dynamicStateCount = dynamicStateEnables.size();
1720     }
1721 
1722     res = vkCreateGraphicsPipelines(info.device, info.pipelineCache, 1, &pipeline, NULL,
1723                                     &info.pipeline);
1724     ASSERT(res == VK_SUCCESS);
1725 }
1726 
init_viewports(struct sample_info & info)1727 void init_viewports(struct sample_info &info)
1728 {
1729 #ifdef __ANDROID__
1730 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic viewport
1731 // feature.
1732 #else
1733     info.viewport.height   = (float)info.height;
1734     info.viewport.width    = (float)info.width;
1735     info.viewport.minDepth = (float)0.0f;
1736     info.viewport.maxDepth = (float)1.0f;
1737     info.viewport.x        = 0;
1738     info.viewport.y        = 0;
1739     vkCmdSetViewport(info.cmd, 0, NUM_VIEWPORTS, &info.viewport);
1740 #endif
1741 }
1742 
init_viewports_array(struct sample_info & info,int index)1743 void init_viewports_array(struct sample_info &info, int index)
1744 {
1745 #ifdef __ANDROID__
1746 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic viewport
1747 // feature.
1748 #else
1749     info.viewport.height   = (float)info.height;
1750     info.viewport.width    = (float)info.width;
1751     info.viewport.minDepth = (float)0.0f;
1752     info.viewport.maxDepth = (float)1.0f;
1753     info.viewport.x        = 0;
1754     info.viewport.y        = 0;
1755     vkCmdSetViewport(info.cmds[index], 0, NUM_VIEWPORTS, &info.viewport);
1756 #endif
1757 }
1758 
init_viewports2_array(struct sample_info & info,int index)1759 void init_viewports2_array(struct sample_info &info, int index)
1760 {
1761 #ifdef __ANDROID__
1762 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic viewport
1763 // feature.
1764 #else
1765     info.viewport.height   = (float)info.height;
1766     info.viewport.width    = (float)info.width;
1767     info.viewport.minDepth = (float)0.0f;
1768     info.viewport.maxDepth = (float)1.0f;
1769     info.viewport.x        = 0;
1770     info.viewport.y        = 0;
1771     vkCmdSetViewport(info.cmd2s[index], 0, NUM_VIEWPORTS, &info.viewport);
1772 #endif
1773 }
1774 
init_scissors(struct sample_info & info)1775 void init_scissors(struct sample_info &info)
1776 {
1777 #ifdef __ANDROID__
1778 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic scissors
1779 // feature.
1780 #else
1781     info.scissor.extent.width  = info.width;
1782     info.scissor.extent.height = info.height;
1783     info.scissor.offset.x      = 0;
1784     info.scissor.offset.y      = 0;
1785     vkCmdSetScissor(info.cmd, 0, NUM_SCISSORS, &info.scissor);
1786 #endif
1787 }
1788 
init_scissors_array(struct sample_info & info,int index)1789 void init_scissors_array(struct sample_info &info, int index)
1790 {
1791 #ifdef __ANDROID__
1792 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic scissors
1793 // feature.
1794 #else
1795     info.scissor.extent.width  = info.width;
1796     info.scissor.extent.height = info.height;
1797     info.scissor.offset.x      = 0;
1798     info.scissor.offset.y      = 0;
1799     vkCmdSetScissor(info.cmds[index], 0, NUM_SCISSORS, &info.scissor);
1800 #endif
1801 }
1802 
init_scissors2_array(struct sample_info & info,int index)1803 void init_scissors2_array(struct sample_info &info, int index)
1804 {
1805 #ifdef __ANDROID__
1806 // Disable dynamic viewport on Android. Some drive has an issue with the dynamic scissors
1807 // feature.
1808 #else
1809     info.scissor.extent.width  = info.width;
1810     info.scissor.extent.height = info.height;
1811     info.scissor.offset.x      = 0;
1812     info.scissor.offset.y      = 0;
1813     vkCmdSetScissor(info.cmd2s[index], 0, NUM_SCISSORS, &info.scissor);
1814 #endif
1815 }
1816 
destroy_pipeline(struct sample_info & info)1817 void destroy_pipeline(struct sample_info &info)
1818 {
1819     vkDestroyPipeline(info.device, info.pipeline, NULL);
1820 }
1821 
destroy_pipeline_cache(struct sample_info & info)1822 void destroy_pipeline_cache(struct sample_info &info)
1823 {
1824     vkDestroyPipelineCache(info.device, info.pipelineCache, NULL);
1825 }
1826 
destroy_uniform_buffer(struct sample_info & info)1827 void destroy_uniform_buffer(struct sample_info &info)
1828 {
1829     vkDestroyBuffer(info.device, info.uniform_data.buf, NULL);
1830     vkFreeMemory(info.device, info.uniform_data.mem, NULL);
1831 }
1832 
destroy_descriptor_and_pipeline_layouts(struct sample_info & info)1833 void destroy_descriptor_and_pipeline_layouts(struct sample_info &info)
1834 {
1835     for (int i = 0; i < NUM_DESCRIPTOR_SETS; i++)
1836         vkDestroyDescriptorSetLayout(info.device, info.desc_layout[i], NULL);
1837     vkDestroyPipelineLayout(info.device, info.pipeline_layout, NULL);
1838 }
1839 
destroy_descriptor_pool(struct sample_info & info)1840 void destroy_descriptor_pool(struct sample_info &info)
1841 {
1842     vkDestroyDescriptorPool(info.device, info.desc_pool, NULL);
1843 }
1844 
destroy_shaders(struct sample_info & info)1845 void destroy_shaders(struct sample_info &info)
1846 {
1847     vkDestroyShaderModule(info.device, info.shaderStages[0].module, NULL);
1848     vkDestroyShaderModule(info.device, info.shaderStages[1].module, NULL);
1849 }
1850 
destroy_command_buffer(struct sample_info & info)1851 void destroy_command_buffer(struct sample_info &info)
1852 {
1853     VkCommandBuffer cmd_bufs[1] = {info.cmd};
1854     vkFreeCommandBuffers(info.device, info.cmd_pool, 1, cmd_bufs);
1855 }
1856 
destroy_command_buffer_array(struct sample_info & info,int numBuffers)1857 void destroy_command_buffer_array(struct sample_info &info, int numBuffers)
1858 {
1859     vkFreeCommandBuffers(info.device, info.cmd_pool, numBuffers, info.cmds.data());
1860 }
1861 
reset_command_buffer2_array(struct sample_info & info,VkCommandBufferResetFlags cmd_buffer_reset_flags)1862 void reset_command_buffer2_array(struct sample_info &info,
1863                                  VkCommandBufferResetFlags cmd_buffer_reset_flags)
1864 {
1865     for (auto cb : info.cmd2s)
1866     {
1867         vkResetCommandBuffer(cb, cmd_buffer_reset_flags);
1868     }
1869 }
1870 
destroy_command_buffer2_array(struct sample_info & info,int numBuffers)1871 void destroy_command_buffer2_array(struct sample_info &info, int numBuffers)
1872 {
1873     vkFreeCommandBuffers(info.device, info.cmd_pool, numBuffers, info.cmd2s.data());
1874 }
1875 
reset_command_pool(struct sample_info & info,VkCommandPoolResetFlags cmd_pool_reset_flags)1876 void reset_command_pool(struct sample_info &info, VkCommandPoolResetFlags cmd_pool_reset_flags)
1877 {
1878     vkResetCommandPool(info.device, info.cmd_pool, cmd_pool_reset_flags);
1879 }
1880 
destroy_command_pool(struct sample_info & info)1881 void destroy_command_pool(struct sample_info &info)
1882 {
1883     vkDestroyCommandPool(info.device, info.cmd_pool, NULL);
1884 }
1885 
destroy_depth_buffer(struct sample_info & info)1886 void destroy_depth_buffer(struct sample_info &info)
1887 {
1888     vkDestroyImageView(info.device, info.depth.view, NULL);
1889     vkDestroyImage(info.device, info.depth.image, NULL);
1890     vkFreeMemory(info.device, info.depth.mem, NULL);
1891 }
1892 
destroy_vertex_buffer(struct sample_info & info)1893 void destroy_vertex_buffer(struct sample_info &info)
1894 {
1895     vkDestroyBuffer(info.device, info.vertex_buffer.buf, NULL);
1896     vkFreeMemory(info.device, info.vertex_buffer.mem, NULL);
1897 }
1898 
destroy_swap_chain(struct sample_info & info)1899 void destroy_swap_chain(struct sample_info &info)
1900 {
1901     for (uint32_t i = 0; i < info.swapchainImageCount; i++)
1902     {
1903         vkDestroyImageView(info.device, info.buffers[i].view, NULL);
1904     }
1905     vkDestroySwapchainKHR(info.device, info.swap_chain, NULL);
1906 }
1907 
destroy_framebuffers(struct sample_info & info)1908 void destroy_framebuffers(struct sample_info &info)
1909 {
1910     for (uint32_t i = 0; i < info.swapchainImageCount; i++)
1911     {
1912         vkDestroyFramebuffer(info.device, info.framebuffers[i], NULL);
1913     }
1914     free(info.framebuffers);
1915 }
1916 
destroy_renderpass(struct sample_info & info)1917 void destroy_renderpass(struct sample_info &info)
1918 {
1919     vkDestroyRenderPass(info.device, info.render_pass, NULL);
1920 }
1921 
destroy_device(struct sample_info & info)1922 void destroy_device(struct sample_info &info)
1923 {
1924     vkDeviceWaitIdle(info.device);
1925     vkDestroyDevice(info.device, NULL);
1926 }
1927 
destroy_instance(struct sample_info & info)1928 void destroy_instance(struct sample_info &info)
1929 {
1930     vkDestroyInstance(info.inst, NULL);
1931 }
1932