1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // RendererVk.cpp:
7 // Implements the class methods for RendererVk.
8 //
9
10 #include "libANGLE/renderer/vulkan/RendererVk.h"
11
12 // Placing this first seems to solve an intellisense bug.
13 #include "libANGLE/renderer/vulkan/vk_utils.h"
14
15 #include <EGL/eglext.h>
16
17 #include "common/debug.h"
18 #include "common/platform.h"
19 #include "common/system_utils.h"
20 #include "libANGLE/Context.h"
21 #include "libANGLE/Display.h"
22 #include "libANGLE/renderer/driver_utils.h"
23 #include "libANGLE/renderer/vulkan/CommandGraph.h"
24 #include "libANGLE/renderer/vulkan/CompilerVk.h"
25 #include "libANGLE/renderer/vulkan/ContextVk.h"
26 #include "libANGLE/renderer/vulkan/DisplayVk.h"
27 #include "libANGLE/renderer/vulkan/FramebufferVk.h"
28 #include "libANGLE/renderer/vulkan/GlslangWrapper.h"
29 #include "libANGLE/renderer/vulkan/ProgramVk.h"
30 #include "libANGLE/renderer/vulkan/VertexArrayVk.h"
31 #include "libANGLE/renderer/vulkan/vk_caps_utils.h"
32 #include "libANGLE/renderer/vulkan/vk_format_utils.h"
33 #include "libANGLE/trace.h"
34 #include "platform/Platform.h"
35
36 // Consts
37 namespace
38 {
39 const uint32_t kMockVendorID = 0xba5eba11;
40 const uint32_t kMockDeviceID = 0xf005ba11;
41 constexpr char kMockDeviceName[] = "Vulkan Mock Device";
42 constexpr VkFormatFeatureFlags kInvalidFormatFeatureFlags = static_cast<VkFormatFeatureFlags>(-1);
43 } // anonymous namespace
44
45 namespace rx
46 {
47
48 namespace
49 {
50 // Update the pipeline cache every this many swaps.
51 constexpr uint32_t kPipelineCacheVkUpdatePeriod = 60;
52 // Wait a maximum of 10s. If that times out, we declare it a failure.
53 constexpr uint64_t kMaxFenceWaitTimeNs = 10'000'000'000llu;
54 // Per the Vulkan specification, as long as Vulkan 1.1+ is returned by vkEnumerateInstanceVersion,
55 // ANGLE must indicate the highest version of Vulkan functionality that it uses. The Vulkan
56 // validation layers will issue messages for any core functionality that requires a higher version.
57 // This value must be increased whenever ANGLE starts using functionality from a newer core
58 // version of Vulkan.
59 constexpr uint32_t kPreferredVulkanAPIVersion = VK_API_VERSION_1_1;
60
ShouldEnableMockICD(const egl::AttributeMap & attribs)61 bool ShouldEnableMockICD(const egl::AttributeMap &attribs)
62 {
63 #if !defined(ANGLE_PLATFORM_ANDROID)
64 // Mock ICD does not currently run on Android
65 return (attribs.get(EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE,
66 EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE) ==
67 EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE);
68 #else
69 return false;
70 #endif // !defined(ANGLE_PLATFORM_ANDROID)
71 }
72
StrLess(const char * a,const char * b)73 bool StrLess(const char *a, const char *b)
74 {
75 return strcmp(a, b) < 0;
76 }
77
ExtensionFound(const char * needle,const RendererVk::ExtensionNameList & haystack)78 bool ExtensionFound(const char *needle, const RendererVk::ExtensionNameList &haystack)
79 {
80 // NOTE: The list must be sorted.
81 return std::binary_search(haystack.begin(), haystack.end(), needle, StrLess);
82 }
83
VerifyExtensionsPresent(const RendererVk::ExtensionNameList & haystack,const RendererVk::ExtensionNameList & needles)84 VkResult VerifyExtensionsPresent(const RendererVk::ExtensionNameList &haystack,
85 const RendererVk::ExtensionNameList &needles)
86 {
87 // NOTE: The lists must be sorted.
88 if (std::includes(haystack.begin(), haystack.end(), needles.begin(), needles.end(), StrLess))
89 {
90 return VK_SUCCESS;
91 }
92 for (const char *needle : needles)
93 {
94 if (!ExtensionFound(needle, haystack))
95 {
96 ERR() << "Extension not supported: " << needle;
97 }
98 }
99 return VK_ERROR_EXTENSION_NOT_PRESENT;
100 }
101
102 // Array of Validation error/warning messages that will be ignored, should include bugID
103 constexpr const char *kSkippedMessages[] = {
104 // http://anglebug.com/2866
105 "UNASSIGNED-CoreValidation-Shader-OutputNotConsumed",
106 // http://anglebug.com/2796
107 "UNASSIGNED-CoreValidation-Shader-PointSizeMissing",
108 };
109
110 // Suppress validation errors that are known
111 // return "true" if given code/prefix/message is known, else return "false"
IsIgnoredDebugMessage(const char * message)112 bool IsIgnoredDebugMessage(const char *message)
113 {
114 if (!message)
115 {
116 return false;
117 }
118 for (const char *msg : kSkippedMessages)
119 {
120 if (strstr(message, msg) != nullptr)
121 {
122 return true;
123 }
124 }
125 return false;
126 }
127
GetVkObjectTypeName(VkObjectType type)128 const char *GetVkObjectTypeName(VkObjectType type)
129 {
130 switch (type)
131 {
132 case VK_OBJECT_TYPE_UNKNOWN:
133 return "Unknown";
134 case VK_OBJECT_TYPE_INSTANCE:
135 return "Instance";
136 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
137 return "Physical Device";
138 case VK_OBJECT_TYPE_DEVICE:
139 return "Device";
140 case VK_OBJECT_TYPE_QUEUE:
141 return "Queue";
142 case VK_OBJECT_TYPE_SEMAPHORE:
143 return "Semaphore";
144 case VK_OBJECT_TYPE_COMMAND_BUFFER:
145 return "Command Buffer";
146 case VK_OBJECT_TYPE_FENCE:
147 return "Fence";
148 case VK_OBJECT_TYPE_DEVICE_MEMORY:
149 return "Device Memory";
150 case VK_OBJECT_TYPE_BUFFER:
151 return "Buffer";
152 case VK_OBJECT_TYPE_IMAGE:
153 return "Image";
154 case VK_OBJECT_TYPE_EVENT:
155 return "Event";
156 case VK_OBJECT_TYPE_QUERY_POOL:
157 return "Query Pool";
158 case VK_OBJECT_TYPE_BUFFER_VIEW:
159 return "Buffer View";
160 case VK_OBJECT_TYPE_IMAGE_VIEW:
161 return "Image View";
162 case VK_OBJECT_TYPE_SHADER_MODULE:
163 return "Shader Module";
164 case VK_OBJECT_TYPE_PIPELINE_CACHE:
165 return "Pipeline Cache";
166 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
167 return "Pipeline Layout";
168 case VK_OBJECT_TYPE_RENDER_PASS:
169 return "Render Pass";
170 case VK_OBJECT_TYPE_PIPELINE:
171 return "Pipeline";
172 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
173 return "Descriptor Set Layout";
174 case VK_OBJECT_TYPE_SAMPLER:
175 return "Sampler";
176 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
177 return "Descriptor Pool";
178 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
179 return "Descriptor Set";
180 case VK_OBJECT_TYPE_FRAMEBUFFER:
181 return "Framebuffer";
182 case VK_OBJECT_TYPE_COMMAND_POOL:
183 return "Command Pool";
184 case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
185 return "Sampler YCbCr Conversion";
186 case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
187 return "Descriptor Update Template";
188 case VK_OBJECT_TYPE_SURFACE_KHR:
189 return "Surface";
190 case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
191 return "Swapchain";
192 case VK_OBJECT_TYPE_DISPLAY_KHR:
193 return "Display";
194 case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
195 return "Display Mode";
196 case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
197 return "Debug Report Callback";
198 case VK_OBJECT_TYPE_OBJECT_TABLE_NVX:
199 return "Object Table";
200 case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX:
201 return "Indirect Commands Layout";
202 case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
203 return "Debug Utils Messenger";
204 case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
205 return "Validation Cache";
206 case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV:
207 return "Acceleration Structure";
208 default:
209 return "<Unrecognized>";
210 }
211 }
212
213 VKAPI_ATTR VkBool32 VKAPI_CALL
DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * callbackData,void * userData)214 DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
215 VkDebugUtilsMessageTypeFlagsEXT messageTypes,
216 const VkDebugUtilsMessengerCallbackDataEXT *callbackData,
217 void *userData)
218 {
219 // See if it's an issue we are aware of and don't want to be spammed about.
220 if (IsIgnoredDebugMessage(callbackData->pMessageIdName))
221 {
222 return VK_FALSE;
223 }
224
225 std::ostringstream log;
226 if (callbackData->pMessageIdName)
227 {
228 log << "[ " << callbackData->pMessageIdName << " ] ";
229 }
230 log << callbackData->pMessage << std::endl;
231
232 // Aesthetic value based on length of the function name, line number, etc.
233 constexpr size_t kStartIndent = 28;
234
235 // Output the debug marker hierarchy under which this error has occured.
236 size_t indent = kStartIndent;
237 if (callbackData->queueLabelCount > 0)
238 {
239 log << std::string(indent++, ' ') << "<Queue Label Hierarchy:>" << std::endl;
240 for (uint32_t i = 0; i < callbackData->queueLabelCount; ++i)
241 {
242 log << std::string(indent++, ' ') << callbackData->pQueueLabels[i].pLabelName
243 << std::endl;
244 }
245 }
246 if (callbackData->cmdBufLabelCount > 0)
247 {
248 log << std::string(indent++, ' ') << "<Command Buffer Label Hierarchy:>" << std::endl;
249 for (uint32_t i = 0; i < callbackData->cmdBufLabelCount; ++i)
250 {
251 log << std::string(indent++, ' ') << callbackData->pCmdBufLabels[i].pLabelName
252 << std::endl;
253 }
254 }
255 // Output the objects involved in this error message.
256 if (callbackData->objectCount > 0)
257 {
258 for (uint32_t i = 0; i < callbackData->objectCount; ++i)
259 {
260 const char *objectName = callbackData->pObjects[i].pObjectName;
261 const char *objectType = GetVkObjectTypeName(callbackData->pObjects[i].objectType);
262 uint64_t objectHandle = callbackData->pObjects[i].objectHandle;
263 log << std::string(indent, ' ') << "Object: ";
264 if (objectHandle == 0)
265 {
266 log << "VK_NULL_HANDLE";
267 }
268 else
269 {
270 log << "0x" << std::hex << objectHandle << std::dec;
271 }
272 log << " (type = " << objectType << "(" << callbackData->pObjects[i].objectType << "))";
273 if (objectName)
274 {
275 log << " [" << objectName << "]";
276 }
277 log << std::endl;
278 }
279 }
280
281 bool isError = (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0;
282 std::string msg = log.str();
283
284 if (isError)
285 {
286 ERR() << msg;
287 }
288 else
289 {
290 WARN() << msg;
291 }
292
293 return VK_FALSE;
294 }
295
DebugReportCallback(VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const char * layerPrefix,const char * message,void * userData)296 VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
297 VkDebugReportObjectTypeEXT objectType,
298 uint64_t object,
299 size_t location,
300 int32_t messageCode,
301 const char *layerPrefix,
302 const char *message,
303 void *userData)
304 {
305 if (IsIgnoredDebugMessage(message))
306 {
307 return VK_FALSE;
308 }
309 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
310 {
311 ERR() << message;
312 #if !defined(NDEBUG)
313 // Abort the call in Debug builds.
314 return VK_TRUE;
315 #endif
316 }
317 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
318 {
319 WARN() << message;
320 }
321 else
322 {
323 // Uncomment this if you want Vulkan spam.
324 // WARN() << message;
325 }
326
327 return VK_FALSE;
328 }
329
330 // If we're loading the validation layers, we could be running from any random directory.
331 // Change to the executable directory so we can find the layers, then change back to the
332 // previous directory to be safe we don't disrupt the application.
333 class ScopedVkLoaderEnvironment : angle::NonCopyable
334 {
335 public:
ScopedVkLoaderEnvironment(bool enableValidationLayers,bool enableMockICD)336 ScopedVkLoaderEnvironment(bool enableValidationLayers, bool enableMockICD)
337 : mEnableValidationLayers(enableValidationLayers),
338 mEnableMockICD(enableMockICD),
339 mChangedCWD(false),
340 mChangedICDPath(false)
341 {
342 // Changing CWD and setting environment variables makes no sense on Android,
343 // since this code is a part of Java application there.
344 // Android Vulkan loader doesn't need this either.
345 #if !defined(ANGLE_PLATFORM_ANDROID) && !defined(ANGLE_PLATFORM_FUCHSIA)
346 if (enableMockICD)
347 {
348 // Override environment variable to use built Mock ICD
349 // ANGLE_VK_ICD_JSON gets set to the built mock ICD in BUILD.gn
350 mPreviousICDPath = angle::GetEnvironmentVar(g_VkICDPathEnv);
351 mChangedICDPath = angle::SetEnvironmentVar(g_VkICDPathEnv, ANGLE_VK_ICD_JSON);
352 if (!mChangedICDPath)
353 {
354 ERR() << "Error setting Path for Mock/Null Driver.";
355 mEnableMockICD = false;
356 }
357 }
358 if (mEnableValidationLayers || mEnableMockICD)
359 {
360 const auto &cwd = angle::GetCWD();
361 if (!cwd.valid())
362 {
363 ERR() << "Error getting CWD for Vulkan layers init.";
364 mEnableValidationLayers = false;
365 mEnableMockICD = false;
366 }
367 else
368 {
369 mPreviousCWD = cwd.value();
370 std::string exeDir = angle::GetExecutableDirectory();
371 mChangedCWD = angle::SetCWD(exeDir.c_str());
372 if (!mChangedCWD)
373 {
374 ERR() << "Error setting CWD for Vulkan layers init.";
375 mEnableValidationLayers = false;
376 mEnableMockICD = false;
377 }
378 }
379 }
380
381 // Override environment variable to use the ANGLE layers.
382 if (mEnableValidationLayers)
383 {
384 if (!angle::PrependPathToEnvironmentVar(g_VkLoaderLayersPathEnv, ANGLE_VK_DATA_DIR))
385 {
386 ERR() << "Error setting environment for Vulkan layers init.";
387 mEnableValidationLayers = false;
388 }
389 }
390 #endif // !defined(ANGLE_PLATFORM_ANDROID)
391 }
392
~ScopedVkLoaderEnvironment()393 ~ScopedVkLoaderEnvironment()
394 {
395 if (mChangedCWD)
396 {
397 #if !defined(ANGLE_PLATFORM_ANDROID)
398 ASSERT(mPreviousCWD.valid());
399 angle::SetCWD(mPreviousCWD.value().c_str());
400 #endif // !defined(ANGLE_PLATFORM_ANDROID)
401 }
402 if (mChangedICDPath)
403 {
404 if (mPreviousICDPath.value().empty())
405 {
406 angle::UnsetEnvironmentVar(g_VkICDPathEnv);
407 }
408 else
409 {
410 angle::SetEnvironmentVar(g_VkICDPathEnv, mPreviousICDPath.value().c_str());
411 }
412 }
413 }
414
canEnableValidationLayers() const415 bool canEnableValidationLayers() const { return mEnableValidationLayers; }
416
canEnableMockICD() const417 bool canEnableMockICD() const { return mEnableMockICD; }
418
419 private:
420 bool mEnableValidationLayers;
421 bool mEnableMockICD;
422 bool mChangedCWD;
423 Optional<std::string> mPreviousCWD;
424 bool mChangedICDPath;
425 Optional<std::string> mPreviousICDPath;
426 };
427
ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> & physicalDevices,bool preferMockICD,VkPhysicalDevice * physicalDeviceOut,VkPhysicalDeviceProperties * physicalDevicePropertiesOut)428 void ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> &physicalDevices,
429 bool preferMockICD,
430 VkPhysicalDevice *physicalDeviceOut,
431 VkPhysicalDeviceProperties *physicalDevicePropertiesOut)
432 {
433 ASSERT(!physicalDevices.empty());
434 if (preferMockICD)
435 {
436 for (const VkPhysicalDevice &physicalDevice : physicalDevices)
437 {
438 vkGetPhysicalDeviceProperties(physicalDevice, physicalDevicePropertiesOut);
439 if ((kMockVendorID == physicalDevicePropertiesOut->vendorID) &&
440 (kMockDeviceID == physicalDevicePropertiesOut->deviceID) &&
441 (strcmp(kMockDeviceName, physicalDevicePropertiesOut->deviceName) == 0))
442 {
443 *physicalDeviceOut = physicalDevice;
444 return;
445 }
446 }
447 WARN() << "Vulkan Mock Driver was requested but Mock Device was not found. Using default "
448 "physicalDevice instead.";
449 }
450
451 // Fall back to first device.
452 *physicalDeviceOut = physicalDevices[0];
453 vkGetPhysicalDeviceProperties(*physicalDeviceOut, physicalDevicePropertiesOut);
454 }
455
WaitFences(vk::Context * context,std::vector<vk::Shared<vk::Fence>> * fences,bool block)456 angle::Result WaitFences(vk::Context *context,
457 std::vector<vk::Shared<vk::Fence>> *fences,
458 bool block)
459 {
460 uint64_t timeout = block ? kMaxFenceWaitTimeNs : 0;
461
462 // Iterate backwards over the fences, removing them from the list in constant time when they are
463 // complete.
464 while (!fences->empty())
465 {
466 VkResult result = fences->back().get().wait(context->getDevice(), timeout);
467 if (result == VK_TIMEOUT)
468 {
469 return angle::Result::Continue;
470 }
471 ANGLE_VK_TRY(context, result);
472
473 context->getRenderer()->resetSharedFence(&fences->back());
474 fences->pop_back();
475 }
476
477 return angle::Result::Continue;
478 }
479
480 } // anonymous namespace
481
482 // RendererVk implementation.
RendererVk()483 RendererVk::RendererVk()
484 : mDisplay(nullptr),
485 mCapsInitialized(false),
486 mFeaturesInitialized(false),
487 mInstance(VK_NULL_HANDLE),
488 mEnableValidationLayers(false),
489 mEnableMockICD(false),
490 mDebugUtilsMessenger(VK_NULL_HANDLE),
491 mDebugReportCallback(VK_NULL_HANDLE),
492 mPhysicalDevice(VK_NULL_HANDLE),
493 mPhysicalDeviceSubgroupProperties{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES},
494 mQueue(VK_NULL_HANDLE),
495 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
496 mMaxVertexAttribDivisor(1),
497 mDevice(VK_NULL_HANDLE),
498 mDeviceLost(false),
499 mPipelineCacheVkUpdateTimeout(kPipelineCacheVkUpdatePeriod),
500 mPipelineCacheDirty(false),
501 mPipelineCacheInitialized(false)
502 {
503 VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags};
504 mFormatProperties.fill(invalid);
505 }
506
~RendererVk()507 RendererVk::~RendererVk()
508 {
509 ASSERT(mFencedGarbage.empty());
510 }
511
onDestroy(vk::Context * context)512 void RendererVk::onDestroy(vk::Context *context)
513 {
514 (void)cleanupGarbage(context, true);
515
516 mFenceRecycler.destroy(mDevice);
517
518 mPipelineLayoutCache.destroy(mDevice);
519 mDescriptorSetLayoutCache.destroy(mDevice);
520
521 mPipelineCache.destroy(mDevice);
522
523 GlslangWrapper::Release();
524
525 if (mDevice)
526 {
527 vkDestroyDevice(mDevice, nullptr);
528 mDevice = VK_NULL_HANDLE;
529 }
530
531 if (mDebugUtilsMessenger)
532 {
533 ASSERT(mInstance && vkDestroyDebugUtilsMessengerEXT);
534 vkDestroyDebugUtilsMessengerEXT(mInstance, mDebugUtilsMessenger, nullptr);
535
536 ASSERT(mDebugReportCallback == VK_NULL_HANDLE);
537 }
538 else if (mDebugReportCallback)
539 {
540 ASSERT(mInstance && vkDestroyDebugReportCallbackEXT);
541 vkDestroyDebugReportCallbackEXT(mInstance, mDebugReportCallback, nullptr);
542 }
543
544 if (mInstance)
545 {
546 vkDestroyInstance(mInstance, nullptr);
547 mInstance = VK_NULL_HANDLE;
548 }
549
550 mMemoryProperties.destroy();
551 mPhysicalDevice = VK_NULL_HANDLE;
552 }
553
notifyDeviceLost()554 void RendererVk::notifyDeviceLost()
555 {
556 mDeviceLost = true;
557 mDisplay->notifyDeviceLost();
558 }
559
isDeviceLost() const560 bool RendererVk::isDeviceLost() const
561 {
562 return mDeviceLost;
563 }
564
initialize(DisplayVk * displayVk,egl::Display * display,const char * wsiExtension,const char * wsiLayer)565 angle::Result RendererVk::initialize(DisplayVk *displayVk,
566 egl::Display *display,
567 const char *wsiExtension,
568 const char *wsiLayer)
569 {
570 mDisplay = display;
571 const egl::AttributeMap &attribs = mDisplay->getAttributeMap();
572 ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseDebugLayers(attribs),
573 ShouldEnableMockICD(attribs));
574 mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
575 mEnableMockICD = scopedEnvironment.canEnableMockICD();
576
577 // Gather global layer properties.
578 uint32_t instanceLayerCount = 0;
579 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
580
581 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
582 if (instanceLayerCount > 0)
583 {
584 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount,
585 instanceLayerProps.data()));
586 }
587
588 VulkanLayerVector enabledInstanceLayerNames;
589 if (mEnableValidationLayers)
590 {
591 bool layersRequested =
592 (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
593 mEnableValidationLayers = GetAvailableValidationLayers(instanceLayerProps, layersRequested,
594 &enabledInstanceLayerNames);
595 }
596
597 if (wsiLayer)
598 {
599 enabledInstanceLayerNames.push_back(wsiLayer);
600 }
601
602 // Enumerate instance extensions that are provided by the vulkan
603 // implementation and implicit layers.
604 uint32_t instanceExtensionCount = 0;
605 ANGLE_VK_TRY(displayVk,
606 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
607
608 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
609 if (instanceExtensionCount > 0)
610 {
611 ANGLE_VK_TRY(displayVk,
612 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
613 instanceExtensionProps.data()));
614 }
615
616 // Enumerate instance extensions that are provided by explicit layers.
617 for (const char *layerName : enabledInstanceLayerNames)
618 {
619 uint32_t previousExtensionCount = static_cast<uint32_t>(instanceExtensionProps.size());
620 uint32_t instanceLayerExtensionCount = 0;
621 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
622 layerName, &instanceLayerExtensionCount, nullptr));
623 instanceExtensionProps.resize(previousExtensionCount + instanceLayerExtensionCount);
624 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
625 layerName, &instanceLayerExtensionCount,
626 instanceExtensionProps.data() + previousExtensionCount));
627 }
628
629 ExtensionNameList instanceExtensionNames;
630 if (!instanceExtensionProps.empty())
631 {
632 for (const VkExtensionProperties &i : instanceExtensionProps)
633 {
634 instanceExtensionNames.push_back(i.extensionName);
635 }
636 std::sort(instanceExtensionNames.begin(), instanceExtensionNames.end(), StrLess);
637 }
638
639 ExtensionNameList enabledInstanceExtensions;
640 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
641 enabledInstanceExtensions.push_back(wsiExtension);
642
643 bool enableDebugUtils =
644 mEnableValidationLayers &&
645 ExtensionFound(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instanceExtensionNames);
646 bool enableDebugReport =
647 mEnableValidationLayers && !enableDebugUtils &&
648 ExtensionFound(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instanceExtensionNames);
649
650 if (enableDebugUtils)
651 {
652 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
653 }
654 else if (enableDebugReport)
655 {
656 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
657 }
658
659 // Verify the required extensions are in the extension names set. Fail if not.
660 std::sort(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(), StrLess);
661 ANGLE_VK_TRY(displayVk,
662 VerifyExtensionsPresent(instanceExtensionNames, enabledInstanceExtensions));
663
664 // Enable VK_KHR_get_physical_device_properties_2 if available.
665 if (ExtensionFound(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
666 instanceExtensionNames))
667 {
668 enabledInstanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
669 }
670
671 VkApplicationInfo applicationInfo = {};
672 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
673 applicationInfo.pApplicationName = "ANGLE";
674 applicationInfo.applicationVersion = 1;
675 applicationInfo.pEngineName = "ANGLE";
676 applicationInfo.engineVersion = 1;
677
678 auto enumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
679 vkGetInstanceProcAddr(mInstance, "vkEnumerateInstanceVersion"));
680 if (!enumerateInstanceVersion)
681 {
682 applicationInfo.apiVersion = VK_API_VERSION_1_0;
683 }
684 else
685 {
686 uint32_t apiVersion = VK_API_VERSION_1_0;
687 ANGLE_VK_TRY(displayVk, enumerateInstanceVersion(&apiVersion));
688 if ((VK_VERSION_MAJOR(apiVersion) > 1) || (VK_VERSION_MINOR(apiVersion) >= 1))
689 {
690 // This is the highest version of core Vulkan functionality that ANGLE uses.
691 applicationInfo.apiVersion = kPreferredVulkanAPIVersion;
692 }
693 else
694 {
695 // Since only 1.0 instance-level functionality is available, this must set to 1.0.
696 applicationInfo.apiVersion = VK_API_VERSION_1_0;
697 }
698 }
699
700 VkInstanceCreateInfo instanceInfo = {};
701 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
702 instanceInfo.flags = 0;
703 instanceInfo.pApplicationInfo = &applicationInfo;
704
705 // Enable requested layers and extensions.
706 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
707 instanceInfo.ppEnabledExtensionNames =
708 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
709 instanceInfo.enabledLayerCount = static_cast<uint32_t>(enabledInstanceLayerNames.size());
710 instanceInfo.ppEnabledLayerNames = enabledInstanceLayerNames.data();
711
712 ANGLE_VK_TRY(displayVk, vkCreateInstance(&instanceInfo, nullptr, &mInstance));
713
714 if (enableDebugUtils)
715 {
716 // Try to use the newer EXT_debug_utils if it exists.
717 InitDebugUtilsEXTFunctions(mInstance);
718
719 // Create the messenger callback.
720 VkDebugUtilsMessengerCreateInfoEXT messengerInfo = {};
721
722 constexpr VkDebugUtilsMessageSeverityFlagsEXT kSeveritiesToLog =
723 VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
724 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
725
726 constexpr VkDebugUtilsMessageTypeFlagsEXT kMessagesToLog =
727 VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
728 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
729 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
730
731 messengerInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
732 messengerInfo.messageSeverity = kSeveritiesToLog;
733 messengerInfo.messageType = kMessagesToLog;
734 messengerInfo.pfnUserCallback = &DebugUtilsMessenger;
735 messengerInfo.pUserData = this;
736
737 ANGLE_VK_TRY(displayVk, vkCreateDebugUtilsMessengerEXT(mInstance, &messengerInfo, nullptr,
738 &mDebugUtilsMessenger));
739 }
740 else if (enableDebugReport)
741 {
742 // Fallback to EXT_debug_report.
743 InitDebugReportEXTFunctions(mInstance);
744
745 VkDebugReportCallbackCreateInfoEXT debugReportInfo = {};
746
747 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
748 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
749 debugReportInfo.pfnCallback = &DebugReportCallback;
750 debugReportInfo.pUserData = this;
751
752 ANGLE_VK_TRY(displayVk, vkCreateDebugReportCallbackEXT(mInstance, &debugReportInfo, nullptr,
753 &mDebugReportCallback));
754 }
755
756 if (std::find(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(),
757 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) !=
758 enabledInstanceExtensions.end())
759 {
760 InitGetPhysicalDeviceProperties2KHRFunctions(mInstance);
761 ASSERT(vkGetPhysicalDeviceProperties2KHR);
762 }
763
764 uint32_t physicalDeviceCount = 0;
765 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
766 ANGLE_VK_CHECK(displayVk, physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
767
768 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
769 std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
770 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount,
771 physicalDevices.data()));
772 ChoosePhysicalDevice(physicalDevices, mEnableMockICD, &mPhysicalDevice,
773 &mPhysicalDeviceProperties);
774
775 vkGetPhysicalDeviceFeatures(mPhysicalDevice, &mPhysicalDeviceFeatures);
776
777 // Ensure we can find a graphics queue family.
778 uint32_t queueCount = 0;
779 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
780
781 ANGLE_VK_CHECK(displayVk, queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
782
783 mQueueFamilyProperties.resize(queueCount);
784 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
785 mQueueFamilyProperties.data());
786
787 size_t graphicsQueueFamilyCount = false;
788 uint32_t firstGraphicsQueueFamily = 0;
789 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
790 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
791 {
792 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
793 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
794 {
795 ASSERT(queueInfo.queueCount > 0);
796 graphicsQueueFamilyCount++;
797 if (firstGraphicsQueueFamily == 0)
798 {
799 firstGraphicsQueueFamily = familyIndex;
800 }
801 break;
802 }
803 }
804
805 ANGLE_VK_CHECK(displayVk, graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
806
807 // If only one queue family, go ahead and initialize the device. If there is more than one
808 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
809 if (graphicsQueueFamilyCount == 1)
810 {
811 ANGLE_TRY(initializeDevice(displayVk, firstGraphicsQueueFamily));
812 }
813
814 // Store the physical device memory properties so we can find the right memory pools.
815 mMemoryProperties.init(mPhysicalDevice);
816
817 GlslangWrapper::Initialize();
818
819 // Initialize the format table.
820 mFormatTable.initialize(this, &mNativeTextureCaps, &mNativeCaps.compressedTextureFormats);
821
822 return angle::Result::Continue;
823 }
824
initializeDevice(DisplayVk * displayVk,uint32_t queueFamilyIndex)825 angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex)
826 {
827 uint32_t deviceLayerCount = 0;
828 ANGLE_VK_TRY(displayVk,
829 vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
830
831 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
832 if (deviceLayerCount > 0)
833 {
834 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
835 deviceLayerProps.data()));
836 }
837
838 VulkanLayerVector enabledDeviceLayerNames;
839 if (mEnableValidationLayers)
840 {
841 mEnableValidationLayers =
842 GetAvailableValidationLayers(deviceLayerProps, false, &enabledDeviceLayerNames);
843 }
844
845 const char *wsiLayer = displayVk->getWSILayer();
846 if (wsiLayer)
847 {
848 enabledDeviceLayerNames.push_back(wsiLayer);
849 }
850
851 // Enumerate device extensions that are provided by the vulkan
852 // implementation and implicit layers.
853 uint32_t deviceExtensionCount = 0;
854 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
855 &deviceExtensionCount, nullptr));
856
857 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
858 if (deviceExtensionCount > 0)
859 {
860 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
861 &deviceExtensionCount,
862 deviceExtensionProps.data()));
863 }
864
865 // Enumerate device extensions that are provided by explicit layers.
866 for (const char *layerName : enabledDeviceLayerNames)
867 {
868 uint32_t previousExtensionCount = static_cast<uint32_t>(deviceExtensionProps.size());
869 uint32_t deviceLayerExtensionCount = 0;
870 ANGLE_VK_TRY(displayVk,
871 vkEnumerateDeviceExtensionProperties(mPhysicalDevice, layerName,
872 &deviceLayerExtensionCount, nullptr));
873 deviceExtensionProps.resize(previousExtensionCount + deviceLayerExtensionCount);
874 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(
875 mPhysicalDevice, layerName, &deviceLayerExtensionCount,
876 deviceExtensionProps.data() + previousExtensionCount));
877 }
878
879 ExtensionNameList deviceExtensionNames;
880 if (!deviceExtensionProps.empty())
881 {
882 ASSERT(deviceExtensionNames.size() <= deviceExtensionProps.size());
883 for (const VkExtensionProperties &prop : deviceExtensionProps)
884 {
885 deviceExtensionNames.push_back(prop.extensionName);
886 }
887 std::sort(deviceExtensionNames.begin(), deviceExtensionNames.end(), StrLess);
888 }
889
890 ExtensionNameList enabledDeviceExtensions;
891 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
892
893 initFeatures(deviceExtensionNames);
894 OverrideFeaturesWithDisplayState(&mFeatures, displayVk->getState());
895 mFeaturesInitialized = true;
896
897 // Selectively enable KHR_MAINTENANCE1 to support viewport flipping.
898 if ((getFeatures().flipViewportY.enabled) &&
899 (mPhysicalDeviceProperties.apiVersion < VK_MAKE_VERSION(1, 1, 0)))
900 {
901 enabledDeviceExtensions.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
902 }
903 if (getFeatures().supportsIncrementalPresent.enabled)
904 {
905 enabledDeviceExtensions.push_back(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME);
906 }
907
908 if (getFeatures().supportsAndroidHardwareBuffer.enabled ||
909 getFeatures().supportsExternalMemoryFd.enabled)
910 {
911 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
912 }
913
914 #if defined(ANGLE_PLATFORM_ANDROID)
915 if (getFeatures().supportsAndroidHardwareBuffer.enabled)
916 {
917 enabledDeviceExtensions.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
918 enabledDeviceExtensions.push_back(
919 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
920 InitExternalMemoryHardwareBufferANDROIDFunctions(mInstance);
921 }
922 #else
923 ASSERT(!getFeatures().supportsAndroidHardwareBuffer.enabled);
924 #endif
925
926 if (getFeatures().supportsExternalMemoryFd.enabled)
927 {
928 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
929 }
930
931 if (getFeatures().supportsExternalSemaphoreFd.enabled)
932 {
933 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
934 InitExternalSemaphoreFdFunctions(mInstance);
935 }
936
937 if (getFeatures().supportsExternalSemaphoreFd.enabled)
938 {
939 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
940 }
941
942 if (getFeatures().supportsShaderStencilExport.enabled)
943 {
944 enabledDeviceExtensions.push_back(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
945 }
946
947 std::sort(enabledDeviceExtensions.begin(), enabledDeviceExtensions.end(), StrLess);
948 ANGLE_VK_TRY(displayVk, VerifyExtensionsPresent(deviceExtensionNames, enabledDeviceExtensions));
949
950 // Select additional features to be enabled
951 VkPhysicalDeviceFeatures2KHR enabledFeatures = {};
952 enabledFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
953 enabledFeatures.features.independentBlend = mPhysicalDeviceFeatures.independentBlend;
954 enabledFeatures.features.robustBufferAccess = mPhysicalDeviceFeatures.robustBufferAccess;
955 enabledFeatures.features.samplerAnisotropy = mPhysicalDeviceFeatures.samplerAnisotropy;
956 enabledFeatures.features.vertexPipelineStoresAndAtomics =
957 mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics;
958 enabledFeatures.features.fragmentStoresAndAtomics =
959 mPhysicalDeviceFeatures.fragmentStoresAndAtomics;
960 if (!vk::CommandBuffer::ExecutesInline())
961 {
962 enabledFeatures.features.inheritedQueries = mPhysicalDeviceFeatures.inheritedQueries;
963 }
964
965 VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT divisorFeatures = {};
966 divisorFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
967 divisorFeatures.vertexAttributeInstanceRateDivisor = true;
968
969 float zeroPriority = 0.0f;
970 VkDeviceQueueCreateInfo queueCreateInfo = {};
971 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
972 queueCreateInfo.flags = 0;
973 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
974 queueCreateInfo.queueCount = 1;
975 queueCreateInfo.pQueuePriorities = &zeroPriority;
976
977 // Initialize the device
978 VkDeviceCreateInfo createInfo = {};
979
980 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
981 createInfo.flags = 0;
982 createInfo.queueCreateInfoCount = 1;
983 createInfo.pQueueCreateInfos = &queueCreateInfo;
984 createInfo.enabledLayerCount = static_cast<uint32_t>(enabledDeviceLayerNames.size());
985 createInfo.ppEnabledLayerNames = enabledDeviceLayerNames.data();
986
987 if (vkGetPhysicalDeviceProperties2KHR &&
988 ExtensionFound(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, deviceExtensionNames))
989 {
990 enabledDeviceExtensions.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
991 enabledFeatures.pNext = &divisorFeatures;
992
993 VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT divisorProperties = {};
994 divisorProperties.sType =
995 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
996
997 VkPhysicalDeviceProperties2 deviceProperties = {};
998 deviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
999 deviceProperties.pNext = &divisorProperties;
1000
1001 vkGetPhysicalDeviceProperties2KHR(mPhysicalDevice, &deviceProperties);
1002 mMaxVertexAttribDivisor = divisorProperties.maxVertexAttribDivisor;
1003
1004 createInfo.pNext = &enabledFeatures;
1005 }
1006 else
1007 {
1008 createInfo.pEnabledFeatures = &enabledFeatures.features;
1009 }
1010
1011 if (vkGetPhysicalDeviceProperties2KHR)
1012 {
1013 VkPhysicalDeviceProperties2 deviceProperties = {};
1014 deviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1015 deviceProperties.pNext = &mPhysicalDeviceSubgroupProperties;
1016
1017 vkGetPhysicalDeviceProperties2KHR(mPhysicalDevice, &deviceProperties);
1018 }
1019
1020 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
1021 createInfo.ppEnabledExtensionNames =
1022 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
1023
1024 ANGLE_VK_TRY(displayVk, vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
1025
1026 mCurrentQueueFamilyIndex = queueFamilyIndex;
1027
1028 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, 0, &mQueue);
1029
1030 // Initialize the vulkan pipeline cache.
1031 bool success = false;
1032 ANGLE_TRY(initPipelineCache(displayVk, &mPipelineCache, &success));
1033
1034 return angle::Result::Continue;
1035 }
1036
selectPresentQueueForSurface(DisplayVk * displayVk,VkSurfaceKHR surface,uint32_t * presentQueueOut)1037 angle::Result RendererVk::selectPresentQueueForSurface(DisplayVk *displayVk,
1038 VkSurfaceKHR surface,
1039 uint32_t *presentQueueOut)
1040 {
1041 // We've already initialized a device, and can't re-create it unless it's never been used.
1042 // TODO(jmadill): Handle the re-creation case if necessary.
1043 if (mDevice != VK_NULL_HANDLE)
1044 {
1045 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
1046
1047 // Check if the current device supports present on this surface.
1048 VkBool32 supportsPresent = VK_FALSE;
1049 ANGLE_VK_TRY(displayVk,
1050 vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
1051 surface, &supportsPresent));
1052
1053 if (supportsPresent == VK_TRUE)
1054 {
1055 *presentQueueOut = mCurrentQueueFamilyIndex;
1056 return angle::Result::Continue;
1057 }
1058 }
1059
1060 // Find a graphics and present queue.
1061 Optional<uint32_t> newPresentQueue;
1062 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
1063 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
1064 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
1065 {
1066 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
1067 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
1068 {
1069 VkBool32 supportsPresent = VK_FALSE;
1070 ANGLE_VK_TRY(displayVk, vkGetPhysicalDeviceSurfaceSupportKHR(
1071 mPhysicalDevice, queueIndex, surface, &supportsPresent));
1072
1073 if (supportsPresent == VK_TRUE)
1074 {
1075 newPresentQueue = queueIndex;
1076 break;
1077 }
1078 }
1079 }
1080
1081 ANGLE_VK_CHECK(displayVk, newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
1082 ANGLE_TRY(initializeDevice(displayVk, newPresentQueue.value()));
1083
1084 *presentQueueOut = newPresentQueue.value();
1085 return angle::Result::Continue;
1086 }
1087
getVendorString() const1088 std::string RendererVk::getVendorString() const
1089 {
1090 return GetVendorString(mPhysicalDeviceProperties.vendorID);
1091 }
1092
getRendererDescription() const1093 std::string RendererVk::getRendererDescription() const
1094 {
1095 std::stringstream strstr;
1096
1097 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
1098
1099 strstr << "Vulkan ";
1100 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
1101 strstr << VK_VERSION_MINOR(apiVersion) << ".";
1102 strstr << VK_VERSION_PATCH(apiVersion);
1103
1104 strstr << "(";
1105
1106 // In the case of NVIDIA, deviceName does not necessarily contain "NVIDIA". Add "NVIDIA" so that
1107 // Vulkan end2end tests can be selectively disabled on NVIDIA. TODO(jmadill): should not be
1108 // needed after http://anglebug.com/1874 is fixed and end2end_tests use more sophisticated
1109 // driver detection.
1110 if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
1111 {
1112 strstr << GetVendorString(mPhysicalDeviceProperties.vendorID) << " ";
1113 }
1114
1115 strstr << mPhysicalDeviceProperties.deviceName;
1116 strstr << " (" << gl::FmtHex(mPhysicalDeviceProperties.deviceID) << ")";
1117
1118 strstr << ")";
1119
1120 return strstr.str();
1121 }
1122
getMaxSupportedESVersion() const1123 gl::Version RendererVk::getMaxSupportedESVersion() const
1124 {
1125 // Current highest supported version
1126 gl::Version maxVersion = gl::Version(3, 1);
1127
1128 // Limit to ES3.0 if there are any blockers for 3.1.
1129
1130 // ES3.1 requires at least one atomic counter buffer and four storage buffers in compute.
1131 // Atomic counter buffers are emulated with storage buffers. For simplicity, we always support
1132 // either none or IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS atomic counter buffers. So if
1133 // Vulkan doesn't support at least that many storage buffers in compute, we don't support 3.1.
1134 const uint32_t kMinimumStorageBuffersForES31 =
1135 gl::limits::kMinimumComputeStorageBuffers + gl::IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS;
1136 if (mPhysicalDeviceProperties.limits.maxPerStageDescriptorStorageBuffers <
1137 kMinimumStorageBuffersForES31)
1138 {
1139 maxVersion = std::min(maxVersion, gl::Version(3, 0));
1140 }
1141
1142 // ES3.1 requires at least a maximum offset of at least 2047.
1143 // If the Vulkan implementation can't support that, we cannot support 3.1.
1144 if (mPhysicalDeviceProperties.limits.maxVertexInputAttributeOffset < 2047)
1145 {
1146 maxVersion = std::min(maxVersion, gl::Version(3, 0));
1147 }
1148
1149 // Limit to ES2.0 if there are any blockers for 3.0.
1150
1151 // If the command buffer doesn't support queries, we can't support ES3.
1152 if (!vk::CommandBuffer::SupportsQueries(mPhysicalDeviceFeatures))
1153 {
1154 maxVersion = std::max(maxVersion, gl::Version(2, 0));
1155 }
1156
1157 // If independentBlend is not supported, we can't have a mix of has-alpha and emulated-alpha
1158 // render targets in a framebuffer. We also cannot perform masked clears of multiple render
1159 // targets.
1160 if (!mPhysicalDeviceFeatures.independentBlend)
1161 {
1162 maxVersion = std::max(maxVersion, gl::Version(2, 0));
1163 }
1164
1165 // If vertexPipelineStoresAndAtomics is not supported, we can't currently support transform
1166 // feedback. TODO(syoussefi): this should be conditioned to the extension not being present as
1167 // well, when that code path is implemented. http://anglebug.com/3206
1168 if (!mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics)
1169 {
1170 maxVersion = std::max(maxVersion, gl::Version(2, 0));
1171 }
1172
1173 return maxVersion;
1174 }
1175
getMaxConformantESVersion() const1176 gl::Version RendererVk::getMaxConformantESVersion() const
1177 {
1178 return std::min(getMaxSupportedESVersion(), gl::Version(2, 0));
1179 }
1180
initFeatures(const ExtensionNameList & deviceExtensionNames)1181 void RendererVk::initFeatures(const ExtensionNameList &deviceExtensionNames)
1182 {
1183 // Use OpenGL line rasterization rules by default.
1184 mFeatures.basicGLLineRasterization.enabled = true;
1185
1186 if ((mPhysicalDeviceProperties.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) ||
1187 ExtensionFound(VK_KHR_MAINTENANCE1_EXTENSION_NAME, deviceExtensionNames))
1188 {
1189 // TODO(lucferron): Currently disabled on Intel only since many tests are failing and need
1190 // investigation. http://anglebug.com/2728
1191 mFeatures.flipViewportY.enabled = !IsIntel(mPhysicalDeviceProperties.vendorID);
1192 }
1193
1194 #ifdef ANGLE_PLATFORM_WINDOWS
1195 // http://anglebug.com/2838
1196 mFeatures.extraCopyBufferRegion.enabled = IsIntel(mPhysicalDeviceProperties.vendorID);
1197
1198 // http://anglebug.com/3055
1199 mFeatures.forceCpuPathForCubeMapCopy.enabled = IsIntel(mPhysicalDeviceProperties.vendorID);
1200 #endif
1201
1202 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1203 platform->overrideFeaturesVk(platform, &mFeatures);
1204
1205 // Work around incorrect NVIDIA point size range clamping.
1206 // TODO(jmadill): Narrow driver range once fixed. http://anglebug.com/2970
1207 if (IsNvidia(mPhysicalDeviceProperties.vendorID))
1208 {
1209 mFeatures.clampPointSize.enabled = true;
1210 }
1211
1212 #if defined(ANGLE_PLATFORM_ANDROID)
1213 // Work around ineffective compute-graphics barriers on Nexus 5X.
1214 // TODO(syoussefi): Figure out which other vendors and driver versions are affected.
1215 // http://anglebug.com/3019
1216 mFeatures.flushAfterVertexConversion.enabled =
1217 IsNexus5X(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
1218 #endif
1219
1220 if (ExtensionFound(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, deviceExtensionNames))
1221 {
1222 mFeatures.supportsIncrementalPresent.enabled = true;
1223 }
1224
1225 #if defined(ANGLE_PLATFORM_ANDROID)
1226 mFeatures.supportsAndroidHardwareBuffer.enabled =
1227 ExtensionFound(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
1228 deviceExtensionNames) &&
1229 ExtensionFound(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, deviceExtensionNames);
1230 #endif
1231
1232 if (ExtensionFound(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, deviceExtensionNames))
1233 {
1234 mFeatures.supportsExternalMemoryFd.enabled = true;
1235 }
1236
1237 if (ExtensionFound(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, deviceExtensionNames))
1238 {
1239 mFeatures.supportsExternalSemaphoreFd.enabled = true;
1240 }
1241
1242 if (ExtensionFound(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, deviceExtensionNames))
1243 {
1244 mFeatures.supportsShaderStencilExport.enabled = true;
1245 }
1246
1247 // TODO(syoussefi): when the code path using the extension is implemented, this should be
1248 // conditioned to the extension not being present as well. http://anglebug.com/3206
1249 if (mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics)
1250 {
1251 mFeatures.emulateTransformFeedback.enabled = true;
1252 }
1253
1254 if (IsLinux() && IsIntel(mPhysicalDeviceProperties.vendorID))
1255 {
1256 mFeatures.disableFifoPresentMode.enabled = true;
1257 }
1258
1259 if (IsAndroid() && IsQualcomm(mPhysicalDeviceProperties.vendorID))
1260 {
1261 if (vk::CommandBuffer::ExecutesInline())
1262 {
1263 mFeatures.restartRenderPassAfterLoadOpClear.enabled = true;
1264 }
1265
1266 mFeatures.bindEmptyForUnusedDescriptorSets.enabled = true;
1267 }
1268
1269 if (IsWindows() && IsIntel(mPhysicalDeviceProperties.vendorID))
1270 {
1271 mFeatures.forceNonZeroScissor.enabled = true;
1272 }
1273
1274 if (IsIntel(mPhysicalDeviceProperties.vendorID) ||
1275 (IsWindows() && IsAMD(mPhysicalDeviceProperties.vendorID)))
1276 {
1277 mFeatures.perFrameWindowSizeQuery.enabled = true;
1278 }
1279
1280 if (IsWindows() && IsAMD(mPhysicalDeviceProperties.vendorID))
1281 {
1282 // Disabled on AMD/windows due to buggy behavior.
1283 mFeatures.disallowSeamfulCubeMapEmulation.enabled = true;
1284 }
1285
1286 if (IsAndroid() && IsQualcomm(mPhysicalDeviceProperties.vendorID))
1287 {
1288 mFeatures.forceD16TexFilter.enabled = true;
1289 }
1290
1291 if (IsAndroid() && IsQualcomm(mPhysicalDeviceProperties.vendorID))
1292 {
1293 mFeatures.disableFlippingBlitWithCommand.enabled = true;
1294 }
1295
1296 if (IsPixel2(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID) ||
1297 IsPixel1XL(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID))
1298 {
1299 mFeatures.transientCommandBuffer.enabled = true;
1300 }
1301 }
1302
initPipelineCacheVkKey()1303 void RendererVk::initPipelineCacheVkKey()
1304 {
1305 std::ostringstream hashStream("ANGLE Pipeline Cache: ", std::ios_base::ate);
1306 // Add the pipeline cache UUID to make sure the blob cache always gives a compatible pipeline
1307 // cache. It's not particularly necessary to write it as a hex number as done here, so long as
1308 // there is no '\0' in the result.
1309 for (const uint32_t c : mPhysicalDeviceProperties.pipelineCacheUUID)
1310 {
1311 hashStream << std::hex << c;
1312 }
1313 // Add the vendor and device id too for good measure.
1314 hashStream << std::hex << mPhysicalDeviceProperties.vendorID;
1315 hashStream << std::hex << mPhysicalDeviceProperties.deviceID;
1316
1317 const std::string &hashString = hashStream.str();
1318 angle::base::SHA1HashBytes(reinterpret_cast<const unsigned char *>(hashString.c_str()),
1319 hashString.length(), mPipelineCacheVkBlobKey.data());
1320 }
1321
initPipelineCache(DisplayVk * display,vk::PipelineCache * pipelineCache,bool * success)1322 angle::Result RendererVk::initPipelineCache(DisplayVk *display,
1323 vk::PipelineCache *pipelineCache,
1324 bool *success)
1325 {
1326 initPipelineCacheVkKey();
1327
1328 egl::BlobCache::Value initialData;
1329 *success = display->getBlobCache()->get(display->getScratchBuffer(), mPipelineCacheVkBlobKey,
1330 &initialData);
1331
1332 VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
1333
1334 pipelineCacheCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1335 pipelineCacheCreateInfo.flags = 0;
1336 pipelineCacheCreateInfo.initialDataSize = *success ? initialData.size() : 0;
1337 pipelineCacheCreateInfo.pInitialData = *success ? initialData.data() : nullptr;
1338
1339 ANGLE_VK_TRY(display, pipelineCache->init(mDevice, pipelineCacheCreateInfo));
1340
1341 return angle::Result::Continue;
1342 }
1343
getPipelineCache(vk::PipelineCache ** pipelineCache)1344 angle::Result RendererVk::getPipelineCache(vk::PipelineCache **pipelineCache)
1345 {
1346 if (mPipelineCacheInitialized)
1347 {
1348 *pipelineCache = &mPipelineCache;
1349 return angle::Result::Continue;
1350 }
1351
1352 // We should now recreate the pipeline cache with the blob cache pipeline data.
1353 vk::PipelineCache pCache;
1354 bool success = false;
1355 ANGLE_TRY(initPipelineCache(vk::GetImpl(mDisplay), &pCache, &success));
1356 if (success)
1357 {
1358 // Merge the newly created pipeline cache into the existing one.
1359 mPipelineCache.merge(mDevice, mPipelineCache.getHandle(), 1, pCache.ptr());
1360 }
1361 mPipelineCacheInitialized = true;
1362 pCache.destroy(mDevice);
1363
1364 *pipelineCache = &mPipelineCache;
1365 return angle::Result::Continue;
1366 }
1367
getNativeCaps() const1368 const gl::Caps &RendererVk::getNativeCaps() const
1369 {
1370 ensureCapsInitialized();
1371 return mNativeCaps;
1372 }
1373
getNativeTextureCaps() const1374 const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
1375 {
1376 ensureCapsInitialized();
1377 return mNativeTextureCaps;
1378 }
1379
getNativeExtensions() const1380 const gl::Extensions &RendererVk::getNativeExtensions() const
1381 {
1382 ensureCapsInitialized();
1383 return mNativeExtensions;
1384 }
1385
getNativeLimitations() const1386 const gl::Limitations &RendererVk::getNativeLimitations() const
1387 {
1388 ensureCapsInitialized();
1389 return mNativeLimitations;
1390 }
1391
getDescriptorSetLayout(vk::Context * context,const vk::DescriptorSetLayoutDesc & desc,vk::BindingPointer<vk::DescriptorSetLayout> * descriptorSetLayoutOut)1392 angle::Result RendererVk::getDescriptorSetLayout(
1393 vk::Context *context,
1394 const vk::DescriptorSetLayoutDesc &desc,
1395 vk::BindingPointer<vk::DescriptorSetLayout> *descriptorSetLayoutOut)
1396 {
1397 std::lock_guard<decltype(mDescriptorSetLayoutCacheMutex)> lock(mDescriptorSetLayoutCacheMutex);
1398 return mDescriptorSetLayoutCache.getDescriptorSetLayout(context, desc, descriptorSetLayoutOut);
1399 }
1400
getPipelineLayout(vk::Context * context,const vk::PipelineLayoutDesc & desc,const vk::DescriptorSetLayoutPointerArray & descriptorSetLayouts,vk::BindingPointer<vk::PipelineLayout> * pipelineLayoutOut)1401 angle::Result RendererVk::getPipelineLayout(
1402 vk::Context *context,
1403 const vk::PipelineLayoutDesc &desc,
1404 const vk::DescriptorSetLayoutPointerArray &descriptorSetLayouts,
1405 vk::BindingPointer<vk::PipelineLayout> *pipelineLayoutOut)
1406 {
1407 std::lock_guard<decltype(mPipelineLayoutCacheMutex)> lock(mPipelineLayoutCacheMutex);
1408 return mPipelineLayoutCache.getPipelineLayout(context, desc, descriptorSetLayouts,
1409 pipelineLayoutOut);
1410 }
1411
syncPipelineCacheVk(DisplayVk * displayVk)1412 angle::Result RendererVk::syncPipelineCacheVk(DisplayVk *displayVk)
1413 {
1414 // TODO: Synchronize access to the pipeline/blob caches?
1415 ASSERT(mPipelineCache.valid());
1416
1417 if (--mPipelineCacheVkUpdateTimeout > 0)
1418 {
1419 return angle::Result::Continue;
1420 }
1421 if (!mPipelineCacheDirty)
1422 {
1423 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1424 return angle::Result::Continue;
1425 }
1426
1427 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1428
1429 // Get the size of the cache.
1430 size_t pipelineCacheSize = 0;
1431 VkResult result = mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, nullptr);
1432 if (result != VK_INCOMPLETE)
1433 {
1434 ANGLE_VK_TRY(displayVk, result);
1435 }
1436
1437 angle::MemoryBuffer *pipelineCacheData = nullptr;
1438 ANGLE_VK_CHECK_ALLOC(displayVk,
1439 displayVk->getScratchBuffer(pipelineCacheSize, &pipelineCacheData));
1440
1441 size_t originalPipelineCacheSize = pipelineCacheSize;
1442 result = mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, pipelineCacheData->data());
1443 // Note: currently we don't accept incomplete as we don't expect it (the full size of cache
1444 // was determined just above), so receiving it hints at an implementation bug we would want
1445 // to know about early.
1446 ASSERT(result != VK_INCOMPLETE);
1447 ANGLE_VK_TRY(displayVk, result);
1448
1449 // If vkGetPipelineCacheData ends up writing fewer bytes than requested, zero out the rest of
1450 // the buffer to avoid leaking garbage memory.
1451 ASSERT(pipelineCacheSize <= originalPipelineCacheSize);
1452 if (pipelineCacheSize < originalPipelineCacheSize)
1453 {
1454 memset(pipelineCacheData->data() + pipelineCacheSize, 0,
1455 originalPipelineCacheSize - pipelineCacheSize);
1456 }
1457
1458 displayVk->getBlobCache()->putApplication(mPipelineCacheVkBlobKey, *pipelineCacheData);
1459 mPipelineCacheDirty = false;
1460
1461 return angle::Result::Continue;
1462 }
1463
issueShaderSerial()1464 Serial RendererVk::issueShaderSerial()
1465 {
1466 return mShaderSerialFactory.generate();
1467 }
1468
1469 // These functions look at the mandatory format for support, and fallback to querying the device (if
1470 // necessary) to test the availability of the bits.
hasLinearImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1471 bool RendererVk::hasLinearImageFormatFeatureBits(VkFormat format,
1472 const VkFormatFeatureFlags featureBits)
1473 {
1474 return hasFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(format, featureBits);
1475 }
1476
getImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1477 VkFormatFeatureFlags RendererVk::getImageFormatFeatureBits(VkFormat format,
1478 const VkFormatFeatureFlags featureBits)
1479 {
1480 return getFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1481 }
1482
hasImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1483 bool RendererVk::hasImageFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1484 {
1485 return hasFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1486 }
1487
hasBufferFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1488 bool RendererVk::hasBufferFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1489 {
1490 return hasFormatFeatureBits<&VkFormatProperties::bufferFeatures>(format, featureBits);
1491 }
1492
queueSubmit(vk::Context * context,const VkSubmitInfo & submitInfo,const vk::Fence & fence)1493 angle::Result RendererVk::queueSubmit(vk::Context *context,
1494 const VkSubmitInfo &submitInfo,
1495 const vk::Fence &fence)
1496 {
1497 {
1498 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1499 ANGLE_VK_TRY(context, vkQueueSubmit(mQueue, 1, &submitInfo, fence.getHandle()));
1500 }
1501
1502 ANGLE_TRY(cleanupGarbage(context, false));
1503
1504 return angle::Result::Continue;
1505 }
1506
queueWaitIdle(vk::Context * context)1507 angle::Result RendererVk::queueWaitIdle(vk::Context *context)
1508 {
1509 {
1510 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1511 ANGLE_VK_TRY(context, vkQueueWaitIdle(mQueue));
1512 }
1513
1514 ANGLE_TRY(cleanupGarbage(context, false));
1515
1516 return angle::Result::Continue;
1517 }
1518
queuePresent(const VkPresentInfoKHR & presentInfo)1519 VkResult RendererVk::queuePresent(const VkPresentInfoKHR &presentInfo)
1520 {
1521 ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::queuePresent");
1522
1523 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1524
1525 {
1526 ANGLE_TRACE_EVENT0("gpu.angle", "vkQueuePresentKHR");
1527 return vkQueuePresentKHR(mQueue, &presentInfo);
1528 }
1529 }
1530
nextSerial()1531 Serial RendererVk::nextSerial()
1532 {
1533 return mQueueSerialFactory.generate();
1534 }
1535
newSharedFence(vk::Context * context,vk::Shared<vk::Fence> * sharedFenceOut)1536 angle::Result RendererVk::newSharedFence(vk::Context *context,
1537 vk::Shared<vk::Fence> *sharedFenceOut)
1538 {
1539 vk::Fence fence;
1540 if (mFenceRecycler.empty())
1541 {
1542 VkFenceCreateInfo fenceCreateInfo = {};
1543 fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
1544 fenceCreateInfo.flags = 0;
1545 ANGLE_VK_TRY(context, fence.init(mDevice, fenceCreateInfo));
1546 }
1547 else
1548 {
1549 mFenceRecycler.fetch(mDevice, &fence);
1550 ANGLE_VK_TRY(context, fence.reset(mDevice));
1551 }
1552 sharedFenceOut->assign(mDevice, std::move(fence));
1553 return angle::Result::Continue;
1554 }
1555
addGarbage(vk::Shared<vk::Fence> && fence,std::vector<vk::GarbageObjectBase> && garbage)1556 void RendererVk::addGarbage(vk::Shared<vk::Fence> &&fence,
1557 std::vector<vk::GarbageObjectBase> &&garbage)
1558 {
1559 std::vector<vk::Shared<vk::Fence>> fences;
1560 fences.push_back(std::move(fence));
1561 addGarbage(std::move(fences), std::move(garbage));
1562 }
1563
addGarbage(std::vector<vk::Shared<vk::Fence>> && fences,std::vector<vk::GarbageObjectBase> && garbage)1564 void RendererVk::addGarbage(std::vector<vk::Shared<vk::Fence>> &&fences,
1565 std::vector<vk::GarbageObjectBase> &&garbage)
1566 {
1567 std::lock_guard<decltype(mGarbageMutex)> lock(mGarbageMutex);
1568 mFencedGarbage.emplace_back(std::move(fences), std::move(garbage));
1569 }
1570
1571 template <VkFormatFeatureFlags VkFormatProperties::*features>
getFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1572 VkFormatFeatureFlags RendererVk::getFormatFeatureBits(VkFormat format,
1573 const VkFormatFeatureFlags featureBits)
1574 {
1575 ASSERT(static_cast<uint32_t>(format) < vk::kNumVkFormats);
1576 VkFormatProperties &deviceProperties = mFormatProperties[format];
1577
1578 if (deviceProperties.bufferFeatures == kInvalidFormatFeatureFlags)
1579 {
1580 // If we don't have the actual device features, see if the requested features are mandatory.
1581 // If so, there's no need to query the device.
1582 const VkFormatProperties &mandatoryProperties = vk::GetMandatoryFormatSupport(format);
1583 if (IsMaskFlagSet(mandatoryProperties.*features, featureBits))
1584 {
1585 return featureBits;
1586 }
1587
1588 // Otherwise query the format features and cache it.
1589 vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, format, &deviceProperties);
1590 // Workaround for some Android devices that don't indicate filtering
1591 // support on D16_UNORM and they should.
1592 if (mFeatures.forceD16TexFilter.enabled && format == VK_FORMAT_D16_UNORM)
1593 {
1594 deviceProperties.*features |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
1595 }
1596 }
1597
1598 return deviceProperties.*features & featureBits;
1599 }
1600
1601 template <VkFormatFeatureFlags VkFormatProperties::*features>
hasFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1602 bool RendererVk::hasFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1603 {
1604 return IsMaskFlagSet(getFormatFeatureBits<features>(format, featureBits), featureBits);
1605 }
1606
cleanupGarbage(vk::Context * context,bool block)1607 angle::Result RendererVk::cleanupGarbage(vk::Context *context, bool block)
1608 {
1609 std::lock_guard<decltype(mGarbageMutex)> lock(mGarbageMutex);
1610
1611 auto garbageIter = mFencedGarbage.begin();
1612 while (garbageIter != mFencedGarbage.end())
1613 {
1614 ANGLE_TRY(WaitFences(context, &garbageIter->first, block));
1615 if (garbageIter->first.empty())
1616 {
1617 for (vk::GarbageObjectBase &garbageObject : garbageIter->second)
1618 {
1619 garbageObject.destroy(mDevice);
1620 }
1621 garbageIter = mFencedGarbage.erase(garbageIter);
1622 }
1623 else
1624 {
1625 garbageIter++;
1626 }
1627 }
1628
1629 return angle::Result::Continue;
1630 }
1631
1632 } // namespace rx
1633