1 /*
2 * Copyright (C) 2016 Google, Inc.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <cassert>
18 #include <array>
19 #include <iostream>
20 #include <string>
21 #include <sstream>
22 #include <set>
23 #include "Helpers.h"
24 #include "Shell.h"
25 #include "Game.h"
26
Shell(Game & game)27 Shell::Shell(Game &game)
28 : game_(game), settings_(game.settings()), ctx_(),
29 game_tick_(1.0f / settings_.ticks_per_second), game_time_(game_tick_)
30 {
31 // require generic WSI extensions
32 instance_extensions_.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
33 device_extensions_.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
34
35 // require "standard" validation layers
36 if (settings_.validate) {
37 instance_layers_.push_back("VK_LAYER_LUNARG_standard_validation");
38 instance_extensions_.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
39 }
40 }
41
log(LogPriority priority,const char * msg)42 void Shell::log(LogPriority priority, const char *msg)
43 {
44 std::ostream &st = (priority >= LOG_ERR) ? std::cerr : std::cout;
45 st << msg << "\n";
46 }
47
init_vk()48 void Shell::init_vk()
49 {
50 vk::init_dispatch_table_top(load_vk());
51
52 init_instance();
53 vk::init_dispatch_table_middle(ctx_.instance, false);
54
55 init_debug_report();
56 init_physical_dev();
57 }
58
cleanup_vk()59 void Shell::cleanup_vk()
60 {
61 if (settings_.validate)
62 vk::DestroyDebugReportCallbackEXT(ctx_.instance, ctx_.debug_report, nullptr);
63
64 vk::DestroyInstance(ctx_.instance, nullptr);
65 }
66
debug_report_callback(VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT obj_type,uint64_t object,size_t location,int32_t msg_code,const char * layer_prefix,const char * msg)67 bool Shell::debug_report_callback(VkDebugReportFlagsEXT flags,
68 VkDebugReportObjectTypeEXT obj_type,
69 uint64_t object,
70 size_t location,
71 int32_t msg_code,
72 const char *layer_prefix,
73 const char *msg)
74 {
75 LogPriority prio = LOG_WARN;
76 if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
77 prio = LOG_ERR;
78 else if (flags & (VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT))
79 prio = LOG_WARN;
80 else if (flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT)
81 prio = LOG_INFO;
82 else if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT)
83 prio = LOG_DEBUG;
84
85 std::stringstream ss;
86 ss << layer_prefix << ": " << msg;
87
88 log(prio, ss.str().c_str());
89
90 return false;
91 }
92
assert_all_instance_layers() const93 void Shell::assert_all_instance_layers() const
94 {
95 // enumerate instance layer
96 std::vector<VkLayerProperties> layers;
97 vk::enumerate(layers);
98
99 std::set<std::string> layer_names;
100 for (const auto &layer : layers)
101 layer_names.insert(layer.layerName);
102
103 // all listed instance layers are required
104 for (const auto &name : instance_layers_) {
105 if (layer_names.find(name) == layer_names.end()) {
106 std::stringstream ss;
107 ss << "instance layer " << name << " is missing";
108 throw std::runtime_error(ss.str());
109 }
110 }
111 }
112
assert_all_instance_extensions() const113 void Shell::assert_all_instance_extensions() const
114 {
115 // enumerate instance extensions
116 std::vector<VkExtensionProperties> exts;
117 vk::enumerate(nullptr, exts);
118
119 std::set<std::string> ext_names;
120 for (const auto &ext : exts)
121 ext_names.insert(ext.extensionName);
122
123 // all listed instance extensions are required
124 for (const auto &name : instance_extensions_) {
125 if (ext_names.find(name) == ext_names.end()) {
126 std::stringstream ss;
127 ss << "instance extension " << name << " is missing";
128 throw std::runtime_error(ss.str());
129 }
130 }
131 }
132
has_all_device_extensions(VkPhysicalDevice phy) const133 bool Shell::has_all_device_extensions(VkPhysicalDevice phy) const
134 {
135 // enumerate device extensions
136 std::vector<VkExtensionProperties> exts;
137 vk::enumerate(phy, nullptr, exts);
138
139 std::set<std::string> ext_names;
140 for (const auto &ext : exts)
141 ext_names.insert(ext.extensionName);
142
143 // all listed device extensions are required
144 for (const auto &name : device_extensions_) {
145 if (ext_names.find(name) == ext_names.end())
146 return false;
147 }
148
149 return true;
150 }
151
init_instance()152 void Shell::init_instance()
153 {
154 assert_all_instance_layers();
155 assert_all_instance_extensions();
156
157 VkApplicationInfo app_info = {};
158 app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
159 app_info.pApplicationName = settings_.name.c_str();
160 app_info.applicationVersion = 0;
161 app_info.apiVersion = VK_API_VERSION_1_0;
162
163 VkInstanceCreateInfo instance_info = {};
164 instance_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
165 instance_info.pApplicationInfo = &app_info;
166 instance_info.enabledLayerCount = static_cast<uint32_t>(instance_layers_.size());
167 instance_info.ppEnabledLayerNames = instance_layers_.data();
168 instance_info.enabledExtensionCount = static_cast<uint32_t>(instance_extensions_.size());
169 instance_info.ppEnabledExtensionNames = instance_extensions_.data();
170
171 vk::assert_success(vk::CreateInstance(&instance_info, nullptr, &ctx_.instance));
172 }
173
init_debug_report()174 void Shell::init_debug_report()
175 {
176 if (!settings_.validate)
177 return;
178
179 VkDebugReportCallbackCreateInfoEXT debug_report_info = {};
180 debug_report_info.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
181
182 debug_report_info.flags = VK_DEBUG_REPORT_WARNING_BIT_EXT |
183 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT |
184 VK_DEBUG_REPORT_ERROR_BIT_EXT;
185 if (settings_.validate_verbose) {
186 debug_report_info.flags = VK_DEBUG_REPORT_INFORMATION_BIT_EXT |
187 VK_DEBUG_REPORT_DEBUG_BIT_EXT;
188 }
189
190 debug_report_info.pfnCallback = debug_report_callback;
191 debug_report_info.pUserData = reinterpret_cast<void *>(this);
192
193 vk::assert_success(vk::CreateDebugReportCallbackEXT(ctx_.instance,
194 &debug_report_info, nullptr, &ctx_.debug_report));
195 }
196
init_physical_dev()197 void Shell::init_physical_dev()
198 {
199 // enumerate physical devices
200 std::vector<VkPhysicalDevice> phys;
201 vk::assert_success(vk::enumerate(ctx_.instance, phys));
202
203 ctx_.physical_dev = VK_NULL_HANDLE;
204 for (auto phy : phys) {
205 if (!has_all_device_extensions(phy))
206 continue;
207
208 // get queue properties
209 std::vector<VkQueueFamilyProperties> queues;
210 vk::get(phy, queues);
211
212 int game_queue_family = -1, present_queue_family = -1;
213 for (uint32_t i = 0; i < queues.size(); i++) {
214 const VkQueueFamilyProperties &q = queues[i];
215
216 // requires only GRAPHICS for game queues
217 const VkFlags game_queue_flags = VK_QUEUE_GRAPHICS_BIT;
218 if (game_queue_family < 0 &&
219 (q.queueFlags & game_queue_flags) == game_queue_flags)
220 game_queue_family = i;
221
222 // present queue must support the surface
223 if (present_queue_family < 0 && can_present(phy, i))
224 present_queue_family = i;
225
226 if (game_queue_family >= 0 && present_queue_family >= 0)
227 break;
228 }
229
230 if (game_queue_family >= 0 && present_queue_family >= 0) {
231 ctx_.physical_dev = phy;
232 ctx_.game_queue_family = game_queue_family;
233 ctx_.present_queue_family = present_queue_family;
234 break;
235 }
236 }
237
238 if (ctx_.physical_dev == VK_NULL_HANDLE)
239 throw std::runtime_error("failed to find any capable Vulkan physical device");
240 }
241
create_context()242 void Shell::create_context()
243 {
244 create_dev();
245 vk::init_dispatch_table_bottom(ctx_.instance, ctx_.dev);
246
247 vk::GetDeviceQueue(ctx_.dev, ctx_.game_queue_family, 0, &ctx_.game_queue);
248 vk::GetDeviceQueue(ctx_.dev, ctx_.present_queue_family, 0, &ctx_.present_queue);
249
250 create_back_buffers();
251
252 // initialize ctx_.{surface,format} before attach_shell
253 create_swapchain();
254
255 game_.attach_shell(*this);
256 }
257
destroy_context()258 void Shell::destroy_context()
259 {
260 if (ctx_.dev == VK_NULL_HANDLE)
261 return;
262
263 vk::DeviceWaitIdle(ctx_.dev);
264
265 destroy_swapchain();
266
267 game_.detach_shell();
268
269 destroy_back_buffers();
270
271 ctx_.game_queue = VK_NULL_HANDLE;
272 ctx_.present_queue = VK_NULL_HANDLE;
273
274 vk::DestroyDevice(ctx_.dev, nullptr);
275 ctx_.dev = VK_NULL_HANDLE;
276 }
277
create_dev()278 void Shell::create_dev()
279 {
280 VkDeviceCreateInfo dev_info = {};
281 dev_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
282
283 const std::vector<float> queue_priorities(settings_.queue_count, 0.0f);
284 std::array<VkDeviceQueueCreateInfo, 2> queue_info = {};
285 queue_info[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
286 queue_info[0].queueFamilyIndex = ctx_.game_queue_family;
287 queue_info[0].queueCount = settings_.queue_count;
288 queue_info[0].pQueuePriorities = queue_priorities.data();
289
290 if (ctx_.game_queue_family != ctx_.present_queue_family) {
291 queue_info[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
292 queue_info[1].queueFamilyIndex = ctx_.present_queue_family;
293 queue_info[1].queueCount = 1;
294 queue_info[1].pQueuePriorities = queue_priorities.data();
295
296 dev_info.queueCreateInfoCount = 2;
297 } else {
298 dev_info.queueCreateInfoCount = 1;
299 }
300
301 dev_info.pQueueCreateInfos = queue_info.data();
302
303 dev_info.enabledExtensionCount = static_cast<uint32_t>(device_extensions_.size());
304 dev_info.ppEnabledExtensionNames = device_extensions_.data();
305
306 // disable all features
307 VkPhysicalDeviceFeatures features = {};
308 dev_info.pEnabledFeatures = &features;
309
310 vk::assert_success(vk::CreateDevice(ctx_.physical_dev, &dev_info, nullptr, &ctx_.dev));
311 }
312
create_back_buffers()313 void Shell::create_back_buffers()
314 {
315 VkSemaphoreCreateInfo sem_info = {};
316 sem_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
317
318 VkFenceCreateInfo fence_info = {};
319 fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
320 fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT;
321
322 // BackBuffer is used to track which swapchain image and its associated
323 // sync primitives are busy. Having more BackBuffer's than swapchain
324 // images may allows us to replace CPU wait on present_fence by GPU wait
325 // on acquire_semaphore.
326 const int count = settings_.back_buffer_count + 1;
327 for (int i = 0; i < count; i++) {
328 BackBuffer buf = {};
329 vk::assert_success(vk::CreateSemaphore(ctx_.dev, &sem_info, nullptr, &buf.acquire_semaphore));
330 vk::assert_success(vk::CreateSemaphore(ctx_.dev, &sem_info, nullptr, &buf.render_semaphore));
331 vk::assert_success(vk::CreateFence(ctx_.dev, &fence_info, nullptr, &buf.present_fence));
332
333 ctx_.back_buffers.push(buf);
334 }
335 }
336
destroy_back_buffers()337 void Shell::destroy_back_buffers()
338 {
339 while (!ctx_.back_buffers.empty()) {
340 const auto &buf = ctx_.back_buffers.front();
341
342 vk::DestroySemaphore(ctx_.dev, buf.acquire_semaphore, nullptr);
343 vk::DestroySemaphore(ctx_.dev, buf.render_semaphore, nullptr);
344 vk::DestroyFence(ctx_.dev, buf.present_fence, nullptr);
345
346 ctx_.back_buffers.pop();
347 }
348 }
349
create_swapchain()350 void Shell::create_swapchain()
351 {
352 ctx_.surface = create_surface(ctx_.instance);
353
354 VkBool32 supported;
355 vk::assert_success(vk::GetPhysicalDeviceSurfaceSupportKHR(ctx_.physical_dev,
356 ctx_.present_queue_family, ctx_.surface, &supported));
357 // this should be guaranteed by the platform-specific can_present call
358 assert(supported);
359
360 std::vector<VkSurfaceFormatKHR> formats;
361 vk::get(ctx_.physical_dev, ctx_.surface, formats);
362 ctx_.format = formats[0];
363
364 // defer to resize_swapchain()
365 ctx_.swapchain = VK_NULL_HANDLE;
366 ctx_.extent.width = (uint32_t) -1;
367 ctx_.extent.height = (uint32_t) -1;
368 }
369
destroy_swapchain()370 void Shell::destroy_swapchain()
371 {
372 if (ctx_.swapchain != VK_NULL_HANDLE) {
373 game_.detach_swapchain();
374
375 vk::DestroySwapchainKHR(ctx_.dev, ctx_.swapchain, nullptr);
376 ctx_.swapchain = VK_NULL_HANDLE;
377 }
378
379 vk::DestroySurfaceKHR(ctx_.instance, ctx_.surface, nullptr);
380 ctx_.surface = VK_NULL_HANDLE;
381 }
382
resize_swapchain(uint32_t width_hint,uint32_t height_hint)383 void Shell::resize_swapchain(uint32_t width_hint, uint32_t height_hint)
384 {
385 VkSurfaceCapabilitiesKHR caps;
386 vk::assert_success(vk::GetPhysicalDeviceSurfaceCapabilitiesKHR(ctx_.physical_dev,
387 ctx_.surface, &caps));
388
389 VkExtent2D extent = caps.currentExtent;
390 // use the hints
391 if (extent.width == (uint32_t) -1) {
392 extent.width = width_hint;
393 extent.height = height_hint;
394 }
395 // clamp width; to protect us from broken hints?
396 if (extent.width < caps.minImageExtent.width)
397 extent.width = caps.minImageExtent.width;
398 else if (extent.width > caps.maxImageExtent.width)
399 extent.width = caps.maxImageExtent.width;
400 // clamp height
401 if (extent.height < caps.minImageExtent.height)
402 extent.height = caps.minImageExtent.height;
403 else if (extent.height > caps.maxImageExtent.height)
404 extent.height = caps.maxImageExtent.height;
405
406 if (ctx_.extent.width == extent.width && ctx_.extent.height == extent.height)
407 return;
408
409 uint32_t image_count = settings_.back_buffer_count;
410 if (image_count < caps.minImageCount)
411 image_count = caps.minImageCount;
412 else if (image_count > caps.maxImageCount)
413 image_count = caps.maxImageCount;
414
415 assert(caps.supportedUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT);
416 assert(caps.supportedTransforms & caps.currentTransform);
417 assert(caps.supportedCompositeAlpha & (VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
418 VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR));
419 VkCompositeAlphaFlagBitsKHR composite_alpha =
420 (caps.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR) ?
421 VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR : VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
422
423 std::vector<VkPresentModeKHR> modes;
424 vk::get(ctx_.physical_dev, ctx_.surface, modes);
425
426 // FIFO is the only mode universally supported
427 VkPresentModeKHR mode = VK_PRESENT_MODE_FIFO_KHR;
428 for (auto m : modes) {
429 if ((settings_.vsync && m == VK_PRESENT_MODE_MAILBOX_KHR) ||
430 (!settings_.vsync && m == VK_PRESENT_MODE_IMMEDIATE_KHR)) {
431 mode = m;
432 break;
433 }
434 }
435
436 VkSwapchainCreateInfoKHR swapchain_info = {};
437 swapchain_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
438 swapchain_info.surface = ctx_.surface;
439 swapchain_info.minImageCount = image_count;
440 swapchain_info.imageFormat = ctx_.format.format;
441 swapchain_info.imageColorSpace = ctx_.format.colorSpace;
442 swapchain_info.imageExtent = extent;
443 swapchain_info.imageArrayLayers = 1;
444 swapchain_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
445
446 std::vector<uint32_t> queue_families(1, ctx_.game_queue_family);
447 if (ctx_.game_queue_family != ctx_.present_queue_family) {
448 queue_families.push_back(ctx_.present_queue_family);
449
450 swapchain_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
451 swapchain_info.queueFamilyIndexCount = (uint32_t)queue_families.size();
452 swapchain_info.pQueueFamilyIndices = queue_families.data();
453 } else {
454 swapchain_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
455 }
456
457 swapchain_info.preTransform = caps.currentTransform;;
458 swapchain_info.compositeAlpha = composite_alpha;
459 swapchain_info.presentMode = mode;
460 swapchain_info.clipped = true;
461 swapchain_info.oldSwapchain = ctx_.swapchain;
462
463 vk::assert_success(vk::CreateSwapchainKHR(ctx_.dev, &swapchain_info, nullptr, &ctx_.swapchain));
464 ctx_.extent = extent;
465
466 // destroy the old swapchain
467 if (swapchain_info.oldSwapchain != VK_NULL_HANDLE) {
468 game_.detach_swapchain();
469
470 vk::DeviceWaitIdle(ctx_.dev);
471 vk::DestroySwapchainKHR(ctx_.dev, swapchain_info.oldSwapchain, nullptr);
472 }
473
474 game_.attach_swapchain();
475 }
476
add_game_time(float time)477 void Shell::add_game_time(float time)
478 {
479 int max_ticks = 3;
480
481 if (!settings_.no_tick)
482 game_time_ += time;
483
484 while (game_time_ >= game_tick_ && max_ticks--) {
485 game_.on_tick();
486 game_time_ -= game_tick_;
487 }
488 }
489
acquire_back_buffer()490 void Shell::acquire_back_buffer()
491 {
492 // acquire just once when not presenting
493 if (settings_.no_present &&
494 ctx_.acquired_back_buffer.acquire_semaphore != VK_NULL_HANDLE)
495 return;
496
497 auto &buf = ctx_.back_buffers.front();
498
499 // wait until acquire and render semaphores are waited/unsignaled
500 vk::assert_success(vk::WaitForFences(ctx_.dev, 1, &buf.present_fence,
501 true, UINT64_MAX));
502 // reset the fence
503 vk::assert_success(vk::ResetFences(ctx_.dev, 1, &buf.present_fence));
504
505 vk::assert_success(vk::AcquireNextImageKHR(ctx_.dev, ctx_.swapchain,
506 UINT64_MAX, buf.acquire_semaphore, VK_NULL_HANDLE,
507 &buf.image_index));
508
509 ctx_.acquired_back_buffer = buf;
510 ctx_.back_buffers.pop();
511 }
512
present_back_buffer()513 void Shell::present_back_buffer()
514 {
515 const auto &buf = ctx_.acquired_back_buffer;
516
517 if (!settings_.no_render)
518 game_.on_frame(game_time_ / game_tick_);
519
520 if (settings_.no_present) {
521 fake_present();
522 return;
523 }
524
525 VkPresentInfoKHR present_info = {};
526 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
527 present_info.waitSemaphoreCount = 1;
528 present_info.pWaitSemaphores = (settings_.no_render) ?
529 &buf.acquire_semaphore : &buf.render_semaphore;
530 present_info.swapchainCount = 1;
531 present_info.pSwapchains = &ctx_.swapchain;
532 present_info.pImageIndices = &buf.image_index;
533
534 vk::assert_success(vk::QueuePresentKHR(ctx_.present_queue, &present_info));
535
536 vk::assert_success(vk::QueueSubmit(ctx_.present_queue, 0, nullptr, buf.present_fence));
537 ctx_.back_buffers.push(buf);
538 }
539
fake_present()540 void Shell::fake_present()
541 {
542 const auto &buf = ctx_.acquired_back_buffer;
543
544 assert(settings_.no_present);
545
546 // wait render semaphore and signal acquire semaphore
547 if (!settings_.no_render) {
548 VkPipelineStageFlags stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
549 VkSubmitInfo submit_info = {};
550 submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
551 submit_info.waitSemaphoreCount = 1;
552 submit_info.pWaitSemaphores = &buf.render_semaphore;
553 submit_info.pWaitDstStageMask = &stage;
554 submit_info.signalSemaphoreCount = 1;
555 submit_info.pSignalSemaphores = &buf.acquire_semaphore;
556 vk::assert_success(vk::QueueSubmit(ctx_.game_queue, 1, &submit_info, VK_NULL_HANDLE));
557 }
558
559 // push the buffer back just once for Shell::cleanup_vk
560 if (buf.acquire_semaphore != ctx_.back_buffers.back().acquire_semaphore)
561 ctx_.back_buffers.push(buf);
562 }
563