1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // RendererVk.cpp:
7 // Implements the class methods for RendererVk.
8 //
9
10 #include "libANGLE/renderer/vulkan/RendererVk.h"
11
12 // Placing this first seems to solve an intellisense bug.
13 #include "libANGLE/renderer/vulkan/vk_utils.h"
14
15 #include <EGL/eglext.h>
16
17 #include "common/debug.h"
18 #include "common/platform.h"
19 #include "common/system_utils.h"
20 #include "gpu_info_util/SystemInfo.h"
21 #include "libANGLE/Context.h"
22 #include "libANGLE/Display.h"
23 #include "libANGLE/renderer/driver_utils.h"
24 #include "libANGLE/renderer/glslang_wrapper_utils.h"
25 #include "libANGLE/renderer/vulkan/CompilerVk.h"
26 #include "libANGLE/renderer/vulkan/ContextVk.h"
27 #include "libANGLE/renderer/vulkan/DisplayVk.h"
28 #include "libANGLE/renderer/vulkan/FramebufferVk.h"
29 #include "libANGLE/renderer/vulkan/ProgramVk.h"
30 #include "libANGLE/renderer/vulkan/ResourceVk.h"
31 #include "libANGLE/renderer/vulkan/VertexArrayVk.h"
32 #include "libANGLE/renderer/vulkan/vk_caps_utils.h"
33 #include "libANGLE/renderer/vulkan/vk_format_utils.h"
34 #include "libANGLE/trace.h"
35 #include "platform/Platform.h"
36
37 // Consts
38 namespace
39 {
40 const uint32_t kMockVendorID = 0xba5eba11;
41 const uint32_t kMockDeviceID = 0xf005ba11;
42 constexpr char kMockDeviceName[] = "Vulkan Mock Device";
43 constexpr char kSwiftShaderDeviceName[] = "SwiftShader Device";
44 constexpr VkFormatFeatureFlags kInvalidFormatFeatureFlags = static_cast<VkFormatFeatureFlags>(-1);
45 } // anonymous namespace
46
47 namespace rx
48 {
49
50 namespace
51 {
52 // Update the pipeline cache every this many swaps.
53 constexpr uint32_t kPipelineCacheVkUpdatePeriod = 60;
54 // Per the Vulkan specification, as long as Vulkan 1.1+ is returned by vkEnumerateInstanceVersion,
55 // ANGLE must indicate the highest version of Vulkan functionality that it uses. The Vulkan
56 // validation layers will issue messages for any core functionality that requires a higher version.
57 // This value must be increased whenever ANGLE starts using functionality from a newer core
58 // version of Vulkan.
59 constexpr uint32_t kPreferredVulkanAPIVersion = VK_API_VERSION_1_1;
60
ChooseICDFromAttribs(const egl::AttributeMap & attribs)61 vk::ICD ChooseICDFromAttribs(const egl::AttributeMap &attribs)
62 {
63 #if !defined(ANGLE_PLATFORM_ANDROID)
64 // Mock ICD does not currently run on Android
65 EGLAttrib deviceType = attribs.get(EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE,
66 EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE);
67
68 switch (deviceType)
69 {
70 case EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE:
71 break;
72 case EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE:
73 return vk::ICD::Mock;
74 case EGL_PLATFORM_ANGLE_DEVICE_TYPE_SWIFTSHADER_ANGLE:
75 return vk::ICD::SwiftShader;
76 default:
77 UNREACHABLE();
78 break;
79 }
80 #endif // !defined(ANGLE_PLATFORM_ANDROID)
81
82 return vk::ICD::Default;
83 }
84
StrLess(const char * a,const char * b)85 bool StrLess(const char *a, const char *b)
86 {
87 return strcmp(a, b) < 0;
88 }
89
ExtensionFound(const char * needle,const RendererVk::ExtensionNameList & haystack)90 bool ExtensionFound(const char *needle, const RendererVk::ExtensionNameList &haystack)
91 {
92 // NOTE: The list must be sorted.
93 return std::binary_search(haystack.begin(), haystack.end(), needle, StrLess);
94 }
95
VerifyExtensionsPresent(const RendererVk::ExtensionNameList & haystack,const RendererVk::ExtensionNameList & needles)96 VkResult VerifyExtensionsPresent(const RendererVk::ExtensionNameList &haystack,
97 const RendererVk::ExtensionNameList &needles)
98 {
99 // NOTE: The lists must be sorted.
100 if (std::includes(haystack.begin(), haystack.end(), needles.begin(), needles.end(), StrLess))
101 {
102 return VK_SUCCESS;
103 }
104 for (const char *needle : needles)
105 {
106 if (!ExtensionFound(needle, haystack))
107 {
108 ERR() << "Extension not supported: " << needle;
109 }
110 }
111 return VK_ERROR_EXTENSION_NOT_PRESENT;
112 }
113
114 // Array of Validation error/warning messages that will be ignored, should include bugID
115 constexpr const char *kSkippedMessages[] = {
116 // http://anglebug.com/2866
117 "UNASSIGNED-CoreValidation-Shader-OutputNotConsumed",
118 // http://anglebug.com/2796
119 "UNASSIGNED-CoreValidation-Shader-PointSizeMissing",
120 // http://anglebug.com/3832
121 "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
122 // http://anglebug.com/3450
123 "VUID-vkDestroySemaphore-semaphore-parameter",
124 // http://anglebug.com/4063
125 "VUID-VkDeviceCreateInfo-pNext-pNext",
126 "VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext",
127 "VUID_Undefined",
128 // http://anglebug.com/3078
129 "UNASSIGNED-CoreValidation-Shader-InterfaceTypeMismatch",
130 // http://anglebug.com/4510
131 "VUID-vkQueuePresentKHR-pWaitSemaphores-03268",
132 // http://anglebug.com/4572
133 "VUID-vkCmdCopyImageToBuffer-srcImage-01998",
134 // http://anglebug.com/4577
135 "VUID-vkCmdClearColorImage-image-01993",
136 // http://anglebug.com/4578 for next two
137 "VUID-vkCmdBlitImage-srcImage-01999",
138 "VUID-vkCmdBlitImage-filter-02001",
139 // http://anglebug.com/4579
140 "VUID-vkCmdBlitImage-dstImage-02000",
141 // http://anglebug.com/4580
142 "VUID-vkCmdResolveImage-dstImage-02003",
143 };
144
145 // Suppress validation errors that are known
146 // return "true" if given code/prefix/message is known, else return "false"
IsIgnoredDebugMessage(const char * message)147 bool IsIgnoredDebugMessage(const char *message)
148 {
149 if (!message)
150 {
151 return false;
152 }
153 for (const char *msg : kSkippedMessages)
154 {
155 if (strstr(message, msg) != nullptr)
156 {
157 return true;
158 }
159 }
160 return false;
161 }
162
GetVkObjectTypeName(VkObjectType type)163 const char *GetVkObjectTypeName(VkObjectType type)
164 {
165 switch (type)
166 {
167 case VK_OBJECT_TYPE_UNKNOWN:
168 return "Unknown";
169 case VK_OBJECT_TYPE_INSTANCE:
170 return "Instance";
171 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
172 return "Physical Device";
173 case VK_OBJECT_TYPE_DEVICE:
174 return "Device";
175 case VK_OBJECT_TYPE_QUEUE:
176 return "Queue";
177 case VK_OBJECT_TYPE_SEMAPHORE:
178 return "Semaphore";
179 case VK_OBJECT_TYPE_COMMAND_BUFFER:
180 return "Command Buffer";
181 case VK_OBJECT_TYPE_FENCE:
182 return "Fence";
183 case VK_OBJECT_TYPE_DEVICE_MEMORY:
184 return "Device Memory";
185 case VK_OBJECT_TYPE_BUFFER:
186 return "Buffer";
187 case VK_OBJECT_TYPE_IMAGE:
188 return "Image";
189 case VK_OBJECT_TYPE_EVENT:
190 return "Event";
191 case VK_OBJECT_TYPE_QUERY_POOL:
192 return "Query Pool";
193 case VK_OBJECT_TYPE_BUFFER_VIEW:
194 return "Buffer View";
195 case VK_OBJECT_TYPE_IMAGE_VIEW:
196 return "Image View";
197 case VK_OBJECT_TYPE_SHADER_MODULE:
198 return "Shader Module";
199 case VK_OBJECT_TYPE_PIPELINE_CACHE:
200 return "Pipeline Cache";
201 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
202 return "Pipeline Layout";
203 case VK_OBJECT_TYPE_RENDER_PASS:
204 return "Render Pass";
205 case VK_OBJECT_TYPE_PIPELINE:
206 return "Pipeline";
207 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
208 return "Descriptor Set Layout";
209 case VK_OBJECT_TYPE_SAMPLER:
210 return "Sampler";
211 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
212 return "Descriptor Pool";
213 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
214 return "Descriptor Set";
215 case VK_OBJECT_TYPE_FRAMEBUFFER:
216 return "Framebuffer";
217 case VK_OBJECT_TYPE_COMMAND_POOL:
218 return "Command Pool";
219 case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
220 return "Sampler YCbCr Conversion";
221 case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
222 return "Descriptor Update Template";
223 case VK_OBJECT_TYPE_SURFACE_KHR:
224 return "Surface";
225 case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
226 return "Swapchain";
227 case VK_OBJECT_TYPE_DISPLAY_KHR:
228 return "Display";
229 case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
230 return "Display Mode";
231 case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
232 return "Debug Report Callback";
233 case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV:
234 return "Indirect Commands Layout";
235 case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
236 return "Debug Utils Messenger";
237 case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
238 return "Validation Cache";
239 case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV:
240 return "Acceleration Structure";
241 default:
242 return "<Unrecognized>";
243 }
244 }
245
246 // This function is unused on Android/Fuschia/GGP
247 #if !defined(ANGLE_PLATFORM_ANDROID) && !defined(ANGLE_PLATFORM_FUCHSIA) && \
248 !defined(ANGLE_PLATFORM_GGP)
WrapICDEnvironment(const char * icdEnvironment)249 const std::string WrapICDEnvironment(const char *icdEnvironment)
250 {
251 # if defined(ANGLE_PLATFORM_APPLE)
252 // On MacOS the libraries are bundled into the application directory
253 std::string ret = angle::GetHelperExecutableDir() + icdEnvironment;
254 return ret;
255 # endif // defined(ANGLE_PLATFORM_APPLE)
256 return icdEnvironment;
257 }
258 #endif // !defined(ANGLE_PLATFORM_ANDROID) && !defined(ANGLE_PLATFORM_FUCHSIA) &&
259 // !defined(ANGLE_PLATFORM_GGP)
260
261 VKAPI_ATTR VkBool32 VKAPI_CALL
DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * callbackData,void * userData)262 DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
263 VkDebugUtilsMessageTypeFlagsEXT messageTypes,
264 const VkDebugUtilsMessengerCallbackDataEXT *callbackData,
265 void *userData)
266 {
267 // See if it's an issue we are aware of and don't want to be spammed about.
268 if (IsIgnoredDebugMessage(callbackData->pMessageIdName))
269 {
270 return VK_FALSE;
271 }
272
273 std::ostringstream log;
274 if (callbackData->pMessageIdName)
275 {
276 log << "[ " << callbackData->pMessageIdName << " ] ";
277 }
278 log << callbackData->pMessage << std::endl;
279
280 // Aesthetic value based on length of the function name, line number, etc.
281 constexpr size_t kStartIndent = 28;
282
283 // Output the debug marker hierarchy under which this error has occured.
284 size_t indent = kStartIndent;
285 if (callbackData->queueLabelCount > 0)
286 {
287 log << std::string(indent++, ' ') << "<Queue Label Hierarchy:>" << std::endl;
288 for (uint32_t i = 0; i < callbackData->queueLabelCount; ++i)
289 {
290 log << std::string(indent++, ' ') << callbackData->pQueueLabels[i].pLabelName
291 << std::endl;
292 }
293 }
294 if (callbackData->cmdBufLabelCount > 0)
295 {
296 log << std::string(indent++, ' ') << "<Command Buffer Label Hierarchy:>" << std::endl;
297 for (uint32_t i = 0; i < callbackData->cmdBufLabelCount; ++i)
298 {
299 log << std::string(indent++, ' ') << callbackData->pCmdBufLabels[i].pLabelName
300 << std::endl;
301 }
302 }
303 // Output the objects involved in this error message.
304 if (callbackData->objectCount > 0)
305 {
306 for (uint32_t i = 0; i < callbackData->objectCount; ++i)
307 {
308 const char *objectName = callbackData->pObjects[i].pObjectName;
309 const char *objectType = GetVkObjectTypeName(callbackData->pObjects[i].objectType);
310 uint64_t objectHandle = callbackData->pObjects[i].objectHandle;
311 log << std::string(indent, ' ') << "Object: ";
312 if (objectHandle == 0)
313 {
314 log << "VK_NULL_HANDLE";
315 }
316 else
317 {
318 log << "0x" << std::hex << objectHandle << std::dec;
319 }
320 log << " (type = " << objectType << "(" << callbackData->pObjects[i].objectType << "))";
321 if (objectName)
322 {
323 log << " [" << objectName << "]";
324 }
325 log << std::endl;
326 }
327 }
328
329 bool isError = (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0;
330 std::string msg = log.str();
331
332 RendererVk *rendererVk = static_cast<RendererVk *>(userData);
333 rendererVk->onNewValidationMessage(msg);
334
335 if (isError)
336 {
337 ERR() << msg;
338 }
339 else
340 {
341 WARN() << msg;
342 }
343
344 return VK_FALSE;
345 }
346
DebugReportCallback(VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const char * layerPrefix,const char * message,void * userData)347 VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
348 VkDebugReportObjectTypeEXT objectType,
349 uint64_t object,
350 size_t location,
351 int32_t messageCode,
352 const char *layerPrefix,
353 const char *message,
354 void *userData)
355 {
356 if (IsIgnoredDebugMessage(message))
357 {
358 return VK_FALSE;
359 }
360 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
361 {
362 ERR() << message;
363 #if !defined(NDEBUG)
364 // Abort the call in Debug builds.
365 return VK_TRUE;
366 #endif
367 }
368 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
369 {
370 WARN() << message;
371 }
372 else
373 {
374 // Uncomment this if you want Vulkan spam.
375 // WARN() << message;
376 }
377
378 return VK_FALSE;
379 }
380
381 // If we're loading the validation layers, we could be running from any random directory.
382 // Change to the executable directory so we can find the layers, then change back to the
383 // previous directory to be safe we don't disrupt the application.
384 class ScopedVkLoaderEnvironment : angle::NonCopyable
385 {
386 public:
ScopedVkLoaderEnvironment(bool enableValidationLayers,vk::ICD icd)387 ScopedVkLoaderEnvironment(bool enableValidationLayers, vk::ICD icd)
388 : mEnableValidationLayers(enableValidationLayers),
389 mICD(icd),
390 mChangedCWD(false),
391 mChangedICDEnv(false)
392 {
393 // Changing CWD and setting environment variables makes no sense on Android,
394 // since this code is a part of Java application there.
395 // Android Vulkan loader doesn't need this either.
396 #if !defined(ANGLE_PLATFORM_ANDROID) && !defined(ANGLE_PLATFORM_FUCHSIA) && \
397 !defined(ANGLE_PLATFORM_GGP)
398 if (icd == vk::ICD::Mock)
399 {
400 if (!setICDEnvironment(WrapICDEnvironment(ANGLE_VK_MOCK_ICD_JSON).c_str()))
401 {
402 ERR() << "Error setting environment for Mock/Null Driver.";
403 }
404 }
405 # if defined(ANGLE_VK_SWIFTSHADER_ICD_JSON)
406 else if (icd == vk::ICD::SwiftShader)
407 {
408 if (!setICDEnvironment(WrapICDEnvironment(ANGLE_VK_SWIFTSHADER_ICD_JSON).c_str()))
409 {
410 ERR() << "Error setting environment for SwiftShader.";
411 }
412 }
413 # endif // defined(ANGLE_VK_SWIFTSHADER_ICD_JSON)
414 if (mEnableValidationLayers || icd != vk::ICD::Default)
415 {
416 const auto &cwd = angle::GetCWD();
417 if (!cwd.valid())
418 {
419 ERR() << "Error getting CWD for Vulkan layers init.";
420 mEnableValidationLayers = false;
421 mICD = vk::ICD::Default;
422 }
423 else
424 {
425 mPreviousCWD = cwd.value();
426 std::string exeDir = angle::GetExecutableDirectory();
427 mChangedCWD = angle::SetCWD(exeDir.c_str());
428 if (!mChangedCWD)
429 {
430 ERR() << "Error setting CWD for Vulkan layers init.";
431 mEnableValidationLayers = false;
432 mICD = vk::ICD::Default;
433 }
434 }
435 }
436
437 // Override environment variable to use the ANGLE layers.
438 if (mEnableValidationLayers)
439 {
440 if (!angle::PrependPathToEnvironmentVar(vk::gLoaderLayersPathEnv, ANGLE_VK_LAYERS_DIR))
441 {
442 ERR() << "Error setting environment for Vulkan layers init.";
443 mEnableValidationLayers = false;
444 }
445 }
446 #endif // !defined(ANGLE_PLATFORM_ANDROID)
447 }
448
~ScopedVkLoaderEnvironment()449 ~ScopedVkLoaderEnvironment()
450 {
451 if (mChangedCWD)
452 {
453 #if !defined(ANGLE_PLATFORM_ANDROID)
454 ASSERT(mPreviousCWD.valid());
455 angle::SetCWD(mPreviousCWD.value().c_str());
456 #endif // !defined(ANGLE_PLATFORM_ANDROID)
457 }
458 if (mChangedICDEnv)
459 {
460 if (mPreviousICDEnv.value().empty())
461 {
462 angle::UnsetEnvironmentVar(vk::gLoaderICDFilenamesEnv);
463 }
464 else
465 {
466 angle::SetEnvironmentVar(vk::gLoaderICDFilenamesEnv,
467 mPreviousICDEnv.value().c_str());
468 }
469 }
470 }
471
canEnableValidationLayers() const472 bool canEnableValidationLayers() const { return mEnableValidationLayers; }
getEnabledICD() const473 vk::ICD getEnabledICD() const { return mICD; }
474
475 private:
setICDEnvironment(const char * icd)476 bool setICDEnvironment(const char *icd)
477 {
478 // Override environment variable to use built Mock ICD
479 // ANGLE_VK_ICD_JSON gets set to the built mock ICD in BUILD.gn
480 mPreviousICDEnv = angle::GetEnvironmentVar(vk::gLoaderICDFilenamesEnv);
481 mChangedICDEnv = angle::SetEnvironmentVar(vk::gLoaderICDFilenamesEnv, icd);
482
483 if (!mChangedICDEnv)
484 {
485 mICD = vk::ICD::Default;
486 }
487 return mChangedICDEnv;
488 }
489
490 bool mEnableValidationLayers;
491 vk::ICD mICD;
492 bool mChangedCWD;
493 Optional<std::string> mPreviousCWD;
494 bool mChangedICDEnv;
495 Optional<std::string> mPreviousICDEnv;
496 };
497
498 using ICDFilterFunc = std::function<bool(const VkPhysicalDeviceProperties &)>;
499
GetFilterForICD(vk::ICD preferredICD)500 ICDFilterFunc GetFilterForICD(vk::ICD preferredICD)
501 {
502 switch (preferredICD)
503 {
504 case vk::ICD::Mock:
505 return [](const VkPhysicalDeviceProperties &deviceProperties) {
506 return ((deviceProperties.vendorID == kMockVendorID) &&
507 (deviceProperties.deviceID == kMockDeviceID) &&
508 (strcmp(deviceProperties.deviceName, kMockDeviceName) == 0));
509 };
510 case vk::ICD::SwiftShader:
511 return [](const VkPhysicalDeviceProperties &deviceProperties) {
512 return (IsSwiftshader(deviceProperties.vendorID, deviceProperties.deviceID) &&
513 (strncmp(deviceProperties.deviceName, kSwiftShaderDeviceName,
514 strlen(kSwiftShaderDeviceName)) == 0));
515 };
516 default:
517 const std::string anglePreferredDevice =
518 angle::GetEnvironmentVar(vk::gANGLEPreferredDevice);
519 return [anglePreferredDevice](const VkPhysicalDeviceProperties &deviceProperties) {
520 return (anglePreferredDevice.empty() ||
521 anglePreferredDevice == deviceProperties.deviceName);
522 };
523 }
524 }
525
ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> & physicalDevices,vk::ICD preferredICD,VkPhysicalDevice * physicalDeviceOut,VkPhysicalDeviceProperties * physicalDevicePropertiesOut)526 void ChoosePhysicalDevice(const std::vector<VkPhysicalDevice> &physicalDevices,
527 vk::ICD preferredICD,
528 VkPhysicalDevice *physicalDeviceOut,
529 VkPhysicalDeviceProperties *physicalDevicePropertiesOut)
530 {
531 ASSERT(!physicalDevices.empty());
532
533 ICDFilterFunc filter = GetFilterForICD(preferredICD);
534
535 for (const VkPhysicalDevice &physicalDevice : physicalDevices)
536 {
537 vkGetPhysicalDeviceProperties(physicalDevice, physicalDevicePropertiesOut);
538 if (filter(*physicalDevicePropertiesOut))
539 {
540 *physicalDeviceOut = physicalDevice;
541 return;
542 }
543 }
544 WARN() << "Preferred device ICD not found. Using default physicalDevice instead.";
545
546 // Fall back to first device.
547 *physicalDeviceOut = physicalDevices[0];
548 vkGetPhysicalDeviceProperties(*physicalDeviceOut, physicalDevicePropertiesOut);
549 }
550
ShouldUseValidationLayers(const egl::AttributeMap & attribs)551 bool ShouldUseValidationLayers(const egl::AttributeMap &attribs)
552 {
553 #if defined(ANGLE_ENABLE_VULKAN_VALIDATION_LAYERS_BY_DEFAULT)
554 return ShouldUseDebugLayers(attribs);
555 #else
556 EGLAttrib debugSetting =
557 attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE);
558 return debugSetting == EGL_TRUE;
559 #endif // defined(ANGLE_ENABLE_VULKAN_VALIDATION_LAYERS_BY_DEFAULT)
560 }
561
LimitVersionTo(const gl::Version & current,const gl::Version & lower)562 gl::Version LimitVersionTo(const gl::Version ¤t, const gl::Version &lower)
563 {
564 return std::min(current, lower);
565 }
566 } // namespace
567
568 // RendererVk implementation.
RendererVk()569 RendererVk::RendererVk()
570 : mDisplay(nullptr),
571 mCapsInitialized(false),
572 mInstance(VK_NULL_HANDLE),
573 mEnableValidationLayers(false),
574 mEnableDebugUtils(false),
575 mEnabledICD(vk::ICD::Default),
576 mDebugUtilsMessenger(VK_NULL_HANDLE),
577 mDebugReportCallback(VK_NULL_HANDLE),
578 mPhysicalDevice(VK_NULL_HANDLE),
579 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
580 mMaxVertexAttribDivisor(1),
581 mMaxVertexAttribStride(0),
582 mMinImportedHostPointerAlignment(1),
583 mDevice(VK_NULL_HANDLE),
584 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
585 mCurrentQueueSerial(mQueueSerialFactory.generate()),
586 mDeviceLost(false),
587 mPipelineCacheVkUpdateTimeout(kPipelineCacheVkUpdatePeriod),
588 mPipelineCacheDirty(false),
589 mPipelineCacheInitialized(false),
590 mGlslangInitialized(false)
591 {
592 VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags};
593 mFormatProperties.fill(invalid);
594
595 // We currently don't have any big-endian devices in the list of supported platforms. There are
596 // a number of places in the Vulkan backend that make this assumption. This assertion is made
597 // early to fail immediately on big-endian platforms.
598 ASSERT(IsLittleEndian());
599 }
600
~RendererVk()601 RendererVk::~RendererVk()
602 {
603 ASSERT(mSharedGarbage.empty());
604 }
605
onDestroy()606 void RendererVk::onDestroy()
607 {
608 // Force all commands to finish by flushing all queues.
609 for (VkQueue queue : mQueues)
610 {
611 if (queue != VK_NULL_HANDLE)
612 {
613 vkQueueWaitIdle(queue);
614 }
615 }
616
617 // Then assign an infinite "last completed" serial to force garbage to delete.
618 mLastCompletedQueueSerial = Serial::Infinite();
619 (void)cleanupGarbage(true);
620 ASSERT(mSharedGarbage.empty());
621
622 for (PendingOneOffCommands &pending : mPendingOneOffCommands)
623 {
624 pending.commandBuffer.releaseHandle();
625 }
626
627 mOneOffCommandPool.destroy(mDevice);
628
629 mFenceRecycler.destroy(mDevice);
630
631 mPipelineLayoutCache.destroy(mDevice);
632 mDescriptorSetLayoutCache.destroy(mDevice);
633
634 mPipelineCache.destroy(mDevice);
635
636 vma::DestroyAllocator(mAllocator);
637
638 if (mGlslangInitialized)
639 {
640 GlslangRelease();
641 mGlslangInitialized = false;
642 }
643
644 if (mDevice)
645 {
646 vkDestroyDevice(mDevice, nullptr);
647 mDevice = VK_NULL_HANDLE;
648 }
649
650 if (mDebugUtilsMessenger)
651 {
652 ASSERT(mInstance && vkDestroyDebugUtilsMessengerEXT);
653 vkDestroyDebugUtilsMessengerEXT(mInstance, mDebugUtilsMessenger, nullptr);
654
655 ASSERT(mDebugReportCallback == VK_NULL_HANDLE);
656 }
657 else if (mDebugReportCallback)
658 {
659 ASSERT(mInstance && vkDestroyDebugReportCallbackEXT);
660 vkDestroyDebugReportCallbackEXT(mInstance, mDebugReportCallback, nullptr);
661 }
662
663 if (mInstance)
664 {
665 vkDestroyInstance(mInstance, nullptr);
666 mInstance = VK_NULL_HANDLE;
667 }
668
669 mMemoryProperties.destroy();
670 mPhysicalDevice = VK_NULL_HANDLE;
671 }
672
notifyDeviceLost()673 void RendererVk::notifyDeviceLost()
674 {
675 mLastCompletedQueueSerial = mLastSubmittedQueueSerial;
676 mDeviceLost = true;
677 mDisplay->notifyDeviceLost();
678 }
679
isDeviceLost() const680 bool RendererVk::isDeviceLost() const
681 {
682 return mDeviceLost;
683 }
684
initialize(DisplayVk * displayVk,egl::Display * display,const char * wsiExtension,const char * wsiLayer)685 angle::Result RendererVk::initialize(DisplayVk *displayVk,
686 egl::Display *display,
687 const char *wsiExtension,
688 const char *wsiLayer)
689 {
690 // Set all vk* function ptrs
691 ANGLE_VK_TRY(displayVk, volkInitialize());
692 mDisplay = display;
693 const egl::AttributeMap &attribs = mDisplay->getAttributeMap();
694 ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseValidationLayers(attribs),
695 ChooseICDFromAttribs(attribs));
696 mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
697 mEnabledICD = scopedEnvironment.getEnabledICD();
698
699 // Gather global layer properties.
700 uint32_t instanceLayerCount = 0;
701 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
702
703 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
704 if (instanceLayerCount > 0)
705 {
706 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount,
707 instanceLayerProps.data()));
708 }
709
710 VulkanLayerVector enabledInstanceLayerNames;
711 if (mEnableValidationLayers)
712 {
713 bool layersRequested =
714 (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
715 mEnableValidationLayers = GetAvailableValidationLayers(instanceLayerProps, layersRequested,
716 &enabledInstanceLayerNames);
717 }
718
719 if (wsiLayer)
720 {
721 enabledInstanceLayerNames.push_back(wsiLayer);
722 }
723
724 // Enumerate instance extensions that are provided by the vulkan
725 // implementation and implicit layers.
726 uint32_t instanceExtensionCount = 0;
727 ANGLE_VK_TRY(displayVk,
728 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
729
730 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
731 if (instanceExtensionCount > 0)
732 {
733 ANGLE_VK_TRY(displayVk,
734 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
735 instanceExtensionProps.data()));
736 }
737
738 // Enumerate instance extensions that are provided by explicit layers.
739 for (const char *layerName : enabledInstanceLayerNames)
740 {
741 uint32_t previousExtensionCount = static_cast<uint32_t>(instanceExtensionProps.size());
742 uint32_t instanceLayerExtensionCount = 0;
743 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
744 layerName, &instanceLayerExtensionCount, nullptr));
745 instanceExtensionProps.resize(previousExtensionCount + instanceLayerExtensionCount);
746 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
747 layerName, &instanceLayerExtensionCount,
748 instanceExtensionProps.data() + previousExtensionCount));
749 }
750
751 ExtensionNameList instanceExtensionNames;
752 if (!instanceExtensionProps.empty())
753 {
754 for (const VkExtensionProperties &i : instanceExtensionProps)
755 {
756 instanceExtensionNames.push_back(i.extensionName);
757 }
758 std::sort(instanceExtensionNames.begin(), instanceExtensionNames.end(), StrLess);
759 }
760
761 ExtensionNameList enabledInstanceExtensions;
762 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
763 enabledInstanceExtensions.push_back(wsiExtension);
764 mEnableDebugUtils = mEnableValidationLayers &&
765 ExtensionFound(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instanceExtensionNames);
766
767 bool enableDebugReport =
768 mEnableValidationLayers && !mEnableDebugUtils &&
769 ExtensionFound(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instanceExtensionNames);
770
771 if (mEnableDebugUtils)
772 {
773 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
774 }
775 else if (enableDebugReport)
776 {
777 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
778 }
779
780 if (ExtensionFound(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, instanceExtensionNames))
781 {
782 enabledInstanceExtensions.push_back(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME);
783 ANGLE_FEATURE_CONDITION(&mFeatures, supportsSwapchainColorspace, true);
784 }
785
786 // Verify the required extensions are in the extension names set. Fail if not.
787 std::sort(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(), StrLess);
788 ANGLE_VK_TRY(displayVk,
789 VerifyExtensionsPresent(instanceExtensionNames, enabledInstanceExtensions));
790
791 // Enable VK_KHR_get_physical_device_properties_2 if available.
792 if (ExtensionFound(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
793 instanceExtensionNames))
794 {
795 enabledInstanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
796 }
797
798 VkApplicationInfo applicationInfo = {};
799 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
800 applicationInfo.pApplicationName = "ANGLE";
801 applicationInfo.applicationVersion = 1;
802 applicationInfo.pEngineName = "ANGLE";
803 applicationInfo.engineVersion = 1;
804
805 auto enumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
806 vkGetInstanceProcAddr(mInstance, "vkEnumerateInstanceVersion"));
807 if (!enumerateInstanceVersion)
808 {
809 applicationInfo.apiVersion = VK_API_VERSION_1_0;
810 }
811 else
812 {
813 uint32_t apiVersion = VK_API_VERSION_1_0;
814 ANGLE_VK_TRY(displayVk, enumerateInstanceVersion(&apiVersion));
815 if ((VK_VERSION_MAJOR(apiVersion) > 1) || (VK_VERSION_MINOR(apiVersion) >= 1))
816 {
817 // This is the highest version of core Vulkan functionality that ANGLE uses.
818 applicationInfo.apiVersion = kPreferredVulkanAPIVersion;
819 }
820 else
821 {
822 // Since only 1.0 instance-level functionality is available, this must set to 1.0.
823 applicationInfo.apiVersion = VK_API_VERSION_1_0;
824 }
825 }
826
827 VkInstanceCreateInfo instanceInfo = {};
828 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
829 instanceInfo.flags = 0;
830 instanceInfo.pApplicationInfo = &applicationInfo;
831
832 // Enable requested layers and extensions.
833 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
834 instanceInfo.ppEnabledExtensionNames =
835 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
836 instanceInfo.enabledLayerCount = static_cast<uint32_t>(enabledInstanceLayerNames.size());
837 instanceInfo.ppEnabledLayerNames = enabledInstanceLayerNames.data();
838 ANGLE_VK_TRY(displayVk, vkCreateInstance(&instanceInfo, nullptr, &mInstance));
839 volkLoadInstance(mInstance);
840
841 if (mEnableDebugUtils)
842 {
843 // Use the newer EXT_debug_utils if it exists.
844 // Create the messenger callback.
845 VkDebugUtilsMessengerCreateInfoEXT messengerInfo = {};
846
847 constexpr VkDebugUtilsMessageSeverityFlagsEXT kSeveritiesToLog =
848 VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
849 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
850
851 constexpr VkDebugUtilsMessageTypeFlagsEXT kMessagesToLog =
852 VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
853 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
854 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
855
856 messengerInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
857 messengerInfo.messageSeverity = kSeveritiesToLog;
858 messengerInfo.messageType = kMessagesToLog;
859 messengerInfo.pfnUserCallback = &DebugUtilsMessenger;
860 messengerInfo.pUserData = this;
861
862 ANGLE_VK_TRY(displayVk, vkCreateDebugUtilsMessengerEXT(mInstance, &messengerInfo, nullptr,
863 &mDebugUtilsMessenger));
864 }
865 else if (enableDebugReport)
866 {
867 // Fallback to EXT_debug_report.
868 VkDebugReportCallbackCreateInfoEXT debugReportInfo = {};
869
870 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
871 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
872 debugReportInfo.pfnCallback = &DebugReportCallback;
873 debugReportInfo.pUserData = this;
874
875 ANGLE_VK_TRY(displayVk, vkCreateDebugReportCallbackEXT(mInstance, &debugReportInfo, nullptr,
876 &mDebugReportCallback));
877 }
878
879 if (std::find(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(),
880 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) !=
881 enabledInstanceExtensions.end())
882 {
883 ASSERT(vkGetPhysicalDeviceProperties2KHR);
884 }
885
886 uint32_t physicalDeviceCount = 0;
887 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
888 ANGLE_VK_CHECK(displayVk, physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
889
890 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
891 std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
892 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount,
893 physicalDevices.data()));
894 ChoosePhysicalDevice(physicalDevices, mEnabledICD, &mPhysicalDevice,
895 &mPhysicalDeviceProperties);
896
897 mGarbageCollectionFlushThreshold =
898 static_cast<uint32_t>(mPhysicalDeviceProperties.limits.maxMemoryAllocationCount *
899 kPercentMaxMemoryAllocationCount);
900
901 vkGetPhysicalDeviceFeatures(mPhysicalDevice, &mPhysicalDeviceFeatures);
902
903 // Ensure we can find a graphics queue family.
904 uint32_t queueCount = 0;
905 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
906
907 ANGLE_VK_CHECK(displayVk, queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
908
909 mQueueFamilyProperties.resize(queueCount);
910 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
911 mQueueFamilyProperties.data());
912
913 size_t graphicsQueueFamilyCount = false;
914 uint32_t firstGraphicsQueueFamily = 0;
915 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
916 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
917 {
918 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
919 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
920 {
921 ASSERT(queueInfo.queueCount > 0);
922 graphicsQueueFamilyCount++;
923 if (firstGraphicsQueueFamily == 0)
924 {
925 firstGraphicsQueueFamily = familyIndex;
926 }
927 break;
928 }
929 }
930
931 ANGLE_VK_CHECK(displayVk, graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
932
933 // If only one queue family, go ahead and initialize the device. If there is more than one
934 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
935 if (graphicsQueueFamilyCount == 1)
936 {
937 ANGLE_TRY(initializeDevice(displayVk, firstGraphicsQueueFamily));
938 }
939
940 // Create VMA allocator
941 ANGLE_VK_TRY(displayVk, vma::InitAllocator(mPhysicalDevice, mDevice, mInstance, &mAllocator));
942
943 // Store the physical device memory properties so we can find the right memory pools.
944 mMemoryProperties.init(mPhysicalDevice);
945
946 if (!mGlslangInitialized)
947 {
948 GlslangInitialize();
949 mGlslangInitialized = true;
950 }
951
952 // Initialize the format table.
953 mFormatTable.initialize(this, &mNativeTextureCaps, &mNativeCaps.compressedTextureFormats);
954
955 return angle::Result::Continue;
956 }
957
queryDeviceExtensionFeatures(const ExtensionNameList & deviceExtensionNames)958 void RendererVk::queryDeviceExtensionFeatures(const ExtensionNameList &deviceExtensionNames)
959 {
960 // Default initialize all extension features to false.
961 mLineRasterizationFeatures = {};
962 mLineRasterizationFeatures.sType =
963 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT;
964
965 mProvokingVertexFeatures = {};
966 mProvokingVertexFeatures.sType =
967 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT;
968
969 mVertexAttributeDivisorFeatures = {};
970 mVertexAttributeDivisorFeatures.sType =
971 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
972
973 mVertexAttributeDivisorProperties = {};
974 mVertexAttributeDivisorProperties.sType =
975 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
976
977 mTransformFeedbackFeatures = {};
978 mTransformFeedbackFeatures.sType =
979 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
980
981 mIndexTypeUint8Features = {};
982 mIndexTypeUint8Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT;
983
984 mPhysicalDeviceSubgroupProperties = {};
985 mPhysicalDeviceSubgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
986
987 mPhysicalDeviceExternalMemoryHostProperties = {};
988 mPhysicalDeviceExternalMemoryHostProperties.sType =
989 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
990
991 if (!vkGetPhysicalDeviceProperties2KHR || !vkGetPhysicalDeviceFeatures2KHR)
992 {
993 return;
994 }
995
996 // Query features and properties.
997 VkPhysicalDeviceFeatures2KHR deviceFeatures = {};
998 deviceFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
999
1000 VkPhysicalDeviceProperties2 deviceProperties = {};
1001 deviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1002
1003 // Query line rasterization features
1004 if (ExtensionFound(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, deviceExtensionNames))
1005 {
1006 vk::AddToPNextChain(&deviceFeatures, &mLineRasterizationFeatures);
1007 }
1008
1009 // Query provoking vertex features
1010 if (ExtensionFound(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, deviceExtensionNames))
1011 {
1012 vk::AddToPNextChain(&deviceFeatures, &mProvokingVertexFeatures);
1013 }
1014
1015 // Query attribute divisor features and properties
1016 if (ExtensionFound(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, deviceExtensionNames))
1017 {
1018 vk::AddToPNextChain(&deviceFeatures, &mVertexAttributeDivisorFeatures);
1019 vk::AddToPNextChain(&deviceProperties, &mVertexAttributeDivisorProperties);
1020 }
1021
1022 // Query transform feedback features
1023 if (ExtensionFound(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, deviceExtensionNames))
1024 {
1025 vk::AddToPNextChain(&deviceFeatures, &mTransformFeedbackFeatures);
1026 }
1027
1028 // Query uint8 index type features
1029 if (ExtensionFound(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, deviceExtensionNames))
1030 {
1031 vk::AddToPNextChain(&deviceFeatures, &mIndexTypeUint8Features);
1032 }
1033
1034 // Query external memory host properties
1035 if (ExtensionFound(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, deviceExtensionNames))
1036 {
1037 vk::AddToPNextChain(&deviceProperties, &mPhysicalDeviceExternalMemoryHostProperties);
1038 }
1039
1040 // Query subgroup properties
1041 vk::AddToPNextChain(&deviceProperties, &mPhysicalDeviceSubgroupProperties);
1042
1043 vkGetPhysicalDeviceFeatures2KHR(mPhysicalDevice, &deviceFeatures);
1044 vkGetPhysicalDeviceProperties2KHR(mPhysicalDevice, &deviceProperties);
1045
1046 // Clean up pNext chains
1047 mLineRasterizationFeatures.pNext = nullptr;
1048 mProvokingVertexFeatures.pNext = nullptr;
1049 mVertexAttributeDivisorFeatures.pNext = nullptr;
1050 mVertexAttributeDivisorProperties.pNext = nullptr;
1051 mTransformFeedbackFeatures.pNext = nullptr;
1052 mIndexTypeUint8Features.pNext = nullptr;
1053 mPhysicalDeviceSubgroupProperties.pNext = nullptr;
1054 mPhysicalDeviceExternalMemoryHostProperties.pNext = nullptr;
1055 }
1056
initializeDevice(DisplayVk * displayVk,uint32_t queueFamilyIndex)1057 angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex)
1058 {
1059 uint32_t deviceLayerCount = 0;
1060 ANGLE_VK_TRY(displayVk,
1061 vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
1062
1063 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
1064 if (deviceLayerCount > 0)
1065 {
1066 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
1067 deviceLayerProps.data()));
1068 }
1069
1070 VulkanLayerVector enabledDeviceLayerNames;
1071 if (mEnableValidationLayers)
1072 {
1073 mEnableValidationLayers =
1074 GetAvailableValidationLayers(deviceLayerProps, false, &enabledDeviceLayerNames);
1075 }
1076
1077 const char *wsiLayer = displayVk->getWSILayer();
1078 if (wsiLayer)
1079 {
1080 enabledDeviceLayerNames.push_back(wsiLayer);
1081 }
1082
1083 // Enumerate device extensions that are provided by the vulkan
1084 // implementation and implicit layers.
1085 uint32_t deviceExtensionCount = 0;
1086 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
1087 &deviceExtensionCount, nullptr));
1088
1089 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
1090 if (deviceExtensionCount > 0)
1091 {
1092 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
1093 &deviceExtensionCount,
1094 deviceExtensionProps.data()));
1095 }
1096
1097 // Enumerate device extensions that are provided by explicit layers.
1098 for (const char *layerName : enabledDeviceLayerNames)
1099 {
1100 uint32_t previousExtensionCount = static_cast<uint32_t>(deviceExtensionProps.size());
1101 uint32_t deviceLayerExtensionCount = 0;
1102 ANGLE_VK_TRY(displayVk,
1103 vkEnumerateDeviceExtensionProperties(mPhysicalDevice, layerName,
1104 &deviceLayerExtensionCount, nullptr));
1105 deviceExtensionProps.resize(previousExtensionCount + deviceLayerExtensionCount);
1106 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(
1107 mPhysicalDevice, layerName, &deviceLayerExtensionCount,
1108 deviceExtensionProps.data() + previousExtensionCount));
1109 }
1110
1111 ExtensionNameList deviceExtensionNames;
1112 if (!deviceExtensionProps.empty())
1113 {
1114 ASSERT(deviceExtensionNames.size() <= deviceExtensionProps.size());
1115 for (const VkExtensionProperties &prop : deviceExtensionProps)
1116 {
1117 deviceExtensionNames.push_back(prop.extensionName);
1118 }
1119 std::sort(deviceExtensionNames.begin(), deviceExtensionNames.end(), StrLess);
1120 }
1121
1122 ExtensionNameList enabledDeviceExtensions;
1123 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1124
1125 // Queues: map low, med, high priority to whatever is supported up to 3 queues
1126 uint32_t queueCount = std::min(mQueueFamilyProperties[queueFamilyIndex].queueCount,
1127 static_cast<uint32_t>(egl::ContextPriority::EnumCount));
1128
1129 constexpr float kVulkanQueuePriorityLow = 0.0;
1130 constexpr float kVulkanQueuePriorityMedium = 0.4;
1131 constexpr float kVulkanQueuePriorityHigh = 1.0;
1132
1133 // Index order: Low, High, Medium - so no need to rearrange according to count:
1134 // If we have 1 queue - all same, if 2 - Low and High, if 3 Low, High and Medium.
1135 constexpr uint32_t kQueueIndexLow = 0;
1136 constexpr uint32_t kQueueIndexHigh = 1;
1137 constexpr uint32_t kQueueIndexMedium = 2;
1138
1139 constexpr float queuePriorities[static_cast<uint32_t>(egl::ContextPriority::EnumCount)] = {
1140 kVulkanQueuePriorityMedium, kVulkanQueuePriorityHigh, kVulkanQueuePriorityLow};
1141
1142 VkDeviceQueueCreateInfo queueCreateInfo = {};
1143 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1144 queueCreateInfo.flags = 0;
1145 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
1146 queueCreateInfo.queueCount = queueCount;
1147 queueCreateInfo.pQueuePriorities = queuePriorities;
1148
1149 // Query extensions and their features.
1150 queryDeviceExtensionFeatures(deviceExtensionNames);
1151
1152 // Initialize features and workarounds.
1153 initFeatures(displayVk, deviceExtensionNames);
1154
1155 // Selectively enable KHR_MAINTENANCE1 to support viewport flipping.
1156 if ((getFeatures().flipViewportY.enabled) &&
1157 (mPhysicalDeviceProperties.apiVersion < VK_MAKE_VERSION(1, 1, 0)))
1158 {
1159 enabledDeviceExtensions.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
1160 }
1161 if (getFeatures().supportsIncrementalPresent.enabled)
1162 {
1163 enabledDeviceExtensions.push_back(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME);
1164 }
1165
1166 #if defined(ANGLE_PLATFORM_ANDROID)
1167 if (getFeatures().supportsAndroidHardwareBuffer.enabled)
1168 {
1169 enabledDeviceExtensions.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
1170 enabledDeviceExtensions.push_back(
1171 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
1172 }
1173 #else
1174 ASSERT(!getFeatures().supportsAndroidHardwareBuffer.enabled);
1175 #endif
1176
1177 if (getFeatures().supportsAndroidHardwareBuffer.enabled ||
1178 getFeatures().supportsExternalMemoryFd.enabled ||
1179 getFeatures().supportsExternalMemoryFuchsia.enabled)
1180 {
1181 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
1182 }
1183
1184 if (getFeatures().supportsExternalMemoryFd.enabled)
1185 {
1186 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
1187 }
1188
1189 if (getFeatures().supportsExternalMemoryFuchsia.enabled)
1190 {
1191 enabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME);
1192 }
1193
1194 if (getFeatures().supportsExternalSemaphoreFd.enabled ||
1195 getFeatures().supportsExternalSemaphoreFuchsia.enabled)
1196 {
1197 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
1198 }
1199
1200 if (getFeatures().supportsExternalSemaphoreFd.enabled)
1201 {
1202 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
1203 }
1204
1205 if (getFeatures().supportsExternalSemaphoreFuchsia.enabled)
1206 {
1207 enabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
1208 }
1209
1210 if (getFeatures().supportsShaderStencilExport.enabled)
1211 {
1212 enabledDeviceExtensions.push_back(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
1213 }
1214
1215 std::sort(enabledDeviceExtensions.begin(), enabledDeviceExtensions.end(), StrLess);
1216 ANGLE_VK_TRY(displayVk, VerifyExtensionsPresent(deviceExtensionNames, enabledDeviceExtensions));
1217
1218 // Select additional features to be enabled.
1219 VkPhysicalDeviceFeatures2KHR enabledFeatures = {};
1220 enabledFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
1221 // Used to support framebuffers with multiple attachments:
1222 enabledFeatures.features.independentBlend = mPhysicalDeviceFeatures.independentBlend;
1223 // Used to support robust buffer access:
1224 enabledFeatures.features.robustBufferAccess = mPhysicalDeviceFeatures.robustBufferAccess;
1225 // Used to support Anisotropic filtering:
1226 enabledFeatures.features.samplerAnisotropy = mPhysicalDeviceFeatures.samplerAnisotropy;
1227 // Used to emulate transform feedback:
1228 enabledFeatures.features.vertexPipelineStoresAndAtomics =
1229 mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics;
1230 // Used to implement storage buffers and images in the fragment shader:
1231 enabledFeatures.features.fragmentStoresAndAtomics =
1232 mPhysicalDeviceFeatures.fragmentStoresAndAtomics;
1233 // Used to support geometry shaders:
1234 enabledFeatures.features.geometryShader = mPhysicalDeviceFeatures.geometryShader;
1235 // Used to support EXT_gpu_shader5:
1236 enabledFeatures.features.shaderImageGatherExtended =
1237 mPhysicalDeviceFeatures.shaderImageGatherExtended;
1238 // Used to support EXT_gpu_shader5:
1239 enabledFeatures.features.shaderUniformBufferArrayDynamicIndexing =
1240 mPhysicalDeviceFeatures.shaderUniformBufferArrayDynamicIndexing;
1241 // Used to support EXT_gpu_shader5 and sampler array of array emulation:
1242 enabledFeatures.features.shaderSampledImageArrayDynamicIndexing =
1243 mPhysicalDeviceFeatures.shaderSampledImageArrayDynamicIndexing;
1244 // Used to support atomic counter emulation:
1245 enabledFeatures.features.shaderStorageBufferArrayDynamicIndexing =
1246 mPhysicalDeviceFeatures.shaderStorageBufferArrayDynamicIndexing;
1247
1248 if (!vk::CommandBuffer::ExecutesInline())
1249 {
1250 enabledFeatures.features.inheritedQueries = mPhysicalDeviceFeatures.inheritedQueries;
1251 }
1252
1253 // Setup device initialization struct
1254 VkDeviceCreateInfo createInfo = {};
1255
1256 // Based on available extension features, decide on which extensions and features to enable.
1257
1258 if (mLineRasterizationFeatures.bresenhamLines)
1259 {
1260 enabledDeviceExtensions.push_back(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME);
1261 vk::AddToPNextChain(&createInfo, &mLineRasterizationFeatures);
1262 }
1263
1264 if (mProvokingVertexFeatures.provokingVertexLast)
1265 {
1266 enabledDeviceExtensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
1267 vk::AddToPNextChain(&createInfo, &mProvokingVertexFeatures);
1268 }
1269
1270 if (mVertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor)
1271 {
1272 enabledDeviceExtensions.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
1273 vk::AddToPNextChain(&createInfo, &mVertexAttributeDivisorFeatures);
1274
1275 // We only store 8 bit divisor in GraphicsPipelineDesc so capping value & we emulate if
1276 // exceeded
1277 mMaxVertexAttribDivisor =
1278 std::min(mVertexAttributeDivisorProperties.maxVertexAttribDivisor,
1279 static_cast<uint32_t>(std::numeric_limits<uint8_t>::max()));
1280 }
1281
1282 if (getFeatures().supportsTransformFeedbackExtension.enabled)
1283 {
1284 enabledDeviceExtensions.push_back(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
1285 vk::AddToPNextChain(&createInfo, &mTransformFeedbackFeatures);
1286 }
1287
1288 if (getFeatures().supportsIndexTypeUint8.enabled)
1289 {
1290 enabledDeviceExtensions.push_back(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME);
1291 vk::AddToPNextChain(&createInfo, &mIndexTypeUint8Features);
1292 }
1293
1294 if (getFeatures().supportsExternalMemoryHost.enabled)
1295 {
1296 enabledDeviceExtensions.push_back(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
1297 mMinImportedHostPointerAlignment =
1298 mPhysicalDeviceExternalMemoryHostProperties.minImportedHostPointerAlignment;
1299 }
1300
1301 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
1302 createInfo.flags = 0;
1303 createInfo.queueCreateInfoCount = 1;
1304 createInfo.pQueueCreateInfos = &queueCreateInfo;
1305 createInfo.enabledLayerCount = static_cast<uint32_t>(enabledDeviceLayerNames.size());
1306 createInfo.ppEnabledLayerNames = enabledDeviceLayerNames.data();
1307 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
1308 createInfo.ppEnabledExtensionNames =
1309 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
1310 // Enable core features without assuming VkPhysicalDeviceFeatures2KHR is accepted in the pNext
1311 // chain of VkDeviceCreateInfo.
1312 createInfo.pEnabledFeatures = &enabledFeatures.features;
1313
1314 ANGLE_VK_TRY(displayVk, vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
1315 volkLoadDevice(mDevice);
1316
1317 mCurrentQueueFamilyIndex = queueFamilyIndex;
1318
1319 // When only 1 Queue, use same for all, Low index. Identify as Medium, since it's default.
1320 VkQueue queue;
1321 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, kQueueIndexLow, &queue);
1322 mQueues[egl::ContextPriority::Low] = queue;
1323 mQueues[egl::ContextPriority::Medium] = queue;
1324 mQueues[egl::ContextPriority::High] = queue;
1325 mPriorities[egl::ContextPriority::Low] = egl::ContextPriority::Medium;
1326 mPriorities[egl::ContextPriority::Medium] = egl::ContextPriority::Medium;
1327 mPriorities[egl::ContextPriority::High] = egl::ContextPriority::Medium;
1328
1329 // If at least 2 queues, High has its own queue
1330 if (queueCount > 1)
1331 {
1332 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, kQueueIndexHigh,
1333 &mQueues[egl::ContextPriority::High]);
1334 mPriorities[egl::ContextPriority::High] = egl::ContextPriority::High;
1335 }
1336 // If at least 3 queues, Medium has its own queue. Adjust Low priority.
1337 if (queueCount > 2)
1338 {
1339 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, kQueueIndexMedium,
1340 &mQueues[egl::ContextPriority::Medium]);
1341 mPriorities[egl::ContextPriority::Low] = egl::ContextPriority::Low;
1342 }
1343
1344 // Initialize the vulkan pipeline cache.
1345 bool success = false;
1346 ANGLE_TRY(initPipelineCache(displayVk, &mPipelineCache, &success));
1347
1348 return angle::Result::Continue;
1349 }
1350
selectPresentQueueForSurface(DisplayVk * displayVk,VkSurfaceKHR surface,uint32_t * presentQueueOut)1351 angle::Result RendererVk::selectPresentQueueForSurface(DisplayVk *displayVk,
1352 VkSurfaceKHR surface,
1353 uint32_t *presentQueueOut)
1354 {
1355 // We've already initialized a device, and can't re-create it unless it's never been used.
1356 // TODO(jmadill): Handle the re-creation case if necessary.
1357 if (mDevice != VK_NULL_HANDLE)
1358 {
1359 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
1360
1361 // Check if the current device supports present on this surface.
1362 VkBool32 supportsPresent = VK_FALSE;
1363 ANGLE_VK_TRY(displayVk,
1364 vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
1365 surface, &supportsPresent));
1366
1367 if (supportsPresent == VK_TRUE)
1368 {
1369 *presentQueueOut = mCurrentQueueFamilyIndex;
1370 return angle::Result::Continue;
1371 }
1372 }
1373
1374 // Find a graphics and present queue.
1375 Optional<uint32_t> newPresentQueue;
1376 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
1377 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
1378 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
1379 {
1380 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
1381 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
1382 {
1383 VkBool32 supportsPresent = VK_FALSE;
1384 ANGLE_VK_TRY(displayVk, vkGetPhysicalDeviceSurfaceSupportKHR(
1385 mPhysicalDevice, queueIndex, surface, &supportsPresent));
1386
1387 if (supportsPresent == VK_TRUE)
1388 {
1389 newPresentQueue = queueIndex;
1390 break;
1391 }
1392 }
1393 }
1394
1395 ANGLE_VK_CHECK(displayVk, newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
1396 ANGLE_TRY(initializeDevice(displayVk, newPresentQueue.value()));
1397
1398 *presentQueueOut = newPresentQueue.value();
1399 return angle::Result::Continue;
1400 }
1401
getVendorString() const1402 std::string RendererVk::getVendorString() const
1403 {
1404 return GetVendorString(mPhysicalDeviceProperties.vendorID);
1405 }
1406
getRendererDescription() const1407 std::string RendererVk::getRendererDescription() const
1408 {
1409 std::stringstream strstr;
1410
1411 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
1412
1413 strstr << "Vulkan ";
1414 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
1415 strstr << VK_VERSION_MINOR(apiVersion) << ".";
1416 strstr << VK_VERSION_PATCH(apiVersion);
1417
1418 strstr << "(";
1419
1420 // In the case of NVIDIA, deviceName does not necessarily contain "NVIDIA". Add "NVIDIA" so that
1421 // Vulkan end2end tests can be selectively disabled on NVIDIA. TODO(jmadill): should not be
1422 // needed after http://anglebug.com/1874 is fixed and end2end_tests use more sophisticated
1423 // driver detection.
1424 if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
1425 {
1426 strstr << GetVendorString(mPhysicalDeviceProperties.vendorID) << " ";
1427 }
1428
1429 strstr << mPhysicalDeviceProperties.deviceName;
1430 strstr << " (" << gl::FmtHex(mPhysicalDeviceProperties.deviceID) << ")";
1431
1432 strstr << ")";
1433
1434 return strstr.str();
1435 }
1436
getMaxSupportedESVersion() const1437 gl::Version RendererVk::getMaxSupportedESVersion() const
1438 {
1439 // Current highest supported version
1440 gl::Version maxVersion = gl::Version(3, 1);
1441
1442 // Early out without downgrading ES version if mock ICD enabled.
1443 // Mock ICD doesn't expose sufficient capabilities yet.
1444 // https://github.com/KhronosGroup/Vulkan-Tools/issues/84
1445 if (isMockICDEnabled())
1446 {
1447 return maxVersion;
1448 }
1449
1450 // Limit to ES3.1 if there are any blockers for 3.2.
1451 if (!vk::CanSupportGPUShader5EXT(mPhysicalDeviceFeatures))
1452 {
1453 maxVersion = LimitVersionTo(maxVersion, {3, 1});
1454 }
1455
1456 // Limit to ES3.0 if there are any blockers for 3.1.
1457
1458 // ES3.1 requires at least one atomic counter buffer and four storage buffers in compute.
1459 // Atomic counter buffers are emulated with storage buffers. For simplicity, we always support
1460 // either none or IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS atomic counter buffers. So if
1461 // Vulkan doesn't support at least that many storage buffers in compute, we don't support 3.1.
1462 const uint32_t kMinimumStorageBuffersForES31 =
1463 gl::limits::kMinimumComputeStorageBuffers + gl::IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS;
1464 if (mPhysicalDeviceProperties.limits.maxPerStageDescriptorStorageBuffers <
1465 kMinimumStorageBuffersForES31)
1466 {
1467 maxVersion = LimitVersionTo(maxVersion, {3, 0});
1468 }
1469
1470 // ES3.1 requires at least a maximum offset of at least 2047.
1471 // If the Vulkan implementation can't support that, we cannot support 3.1.
1472 if (mPhysicalDeviceProperties.limits.maxVertexInputAttributeOffset < 2047)
1473 {
1474 maxVersion = LimitVersionTo(maxVersion, {3, 0});
1475 }
1476
1477 // Limit to ES2.0 if there are any blockers for 3.0.
1478 // TODO: http://anglebug.com/3972 Limit to GLES 2.0 if flat shading can't be emulated
1479
1480 // Multisample textures (ES3.1) and multisample renderbuffers (ES3.0) require the Vulkan driver
1481 // to support the standard sample locations (in order to pass dEQP tests that check these
1482 // locations). If the Vulkan implementation can't support that, we cannot support 3.0/3.1.
1483 if (mPhysicalDeviceProperties.limits.standardSampleLocations != VK_TRUE)
1484 {
1485 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1486 }
1487
1488 // If the command buffer doesn't support queries, we can't support ES3.
1489 if (!vk::CommandBuffer::SupportsQueries(mPhysicalDeviceFeatures))
1490 {
1491 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1492 }
1493
1494 // If independentBlend is not supported, we can't have a mix of has-alpha and emulated-alpha
1495 // render targets in a framebuffer. We also cannot perform masked clears of multiple render
1496 // targets.
1497 if (!mPhysicalDeviceFeatures.independentBlend)
1498 {
1499 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1500 }
1501
1502 // If the Vulkan transform feedback extension is not present, we use an emulation path that
1503 // requires the vertexPipelineStoresAndAtomics feature. Without the extension or this feature,
1504 // we can't currently support transform feedback.
1505 if (!mFeatures.supportsTransformFeedbackExtension.enabled &&
1506 !mFeatures.emulateTransformFeedback.enabled)
1507 {
1508 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1509 }
1510
1511 // Limit to GLES 2.0 if maxPerStageDescriptorUniformBuffers is too low.
1512 // Table 6.31 MAX_VERTEX_UNIFORM_BLOCKS minimum value = 12
1513 // Table 6.32 MAX_FRAGMENT_UNIFORM_BLOCKS minimum value = 12
1514 // NOTE: We reserve some uniform buffers for emulation, so use the NativeCaps which takes this
1515 // into account, rather than the physical device maxPerStageDescriptorUniformBuffers limits.
1516 for (gl::ShaderType shaderType : gl::AllShaderTypes())
1517 {
1518 if (static_cast<GLuint>(getNativeCaps().maxShaderUniformBlocks[shaderType]) <
1519 gl::limits::kMinimumShaderUniformBlocks)
1520 {
1521 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1522 }
1523 }
1524
1525 // Limit to GLES 2.0 if maxVertexOutputComponents is too low.
1526 // Table 6.31 MAX VERTEX OUTPUT COMPONENTS minimum value = 64
1527 // NOTE: We reserve some vertex output components for emulation, so use the NativeCaps which
1528 // takes this into account, rather than the physical device maxVertexOutputComponents limits.
1529 if (static_cast<GLuint>(getNativeCaps().maxVertexOutputComponents) <
1530 gl::limits::kMinimumVertexOutputComponents)
1531 {
1532 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1533 }
1534
1535 return maxVersion;
1536 }
1537
getMaxConformantESVersion() const1538 gl::Version RendererVk::getMaxConformantESVersion() const
1539 {
1540 return LimitVersionTo(getMaxSupportedESVersion(), {3, 0});
1541 }
1542
initFeatures(DisplayVk * displayVk,const ExtensionNameList & deviceExtensionNames)1543 void RendererVk::initFeatures(DisplayVk *displayVk, const ExtensionNameList &deviceExtensionNames)
1544 {
1545 if (displayVk->getState().featuresAllDisabled)
1546 {
1547 ApplyFeatureOverrides(&mFeatures, displayVk->getState());
1548 return;
1549 }
1550
1551 bool isAMD = IsAMD(mPhysicalDeviceProperties.vendorID);
1552 bool isIntel = IsIntel(mPhysicalDeviceProperties.vendorID);
1553 bool isNvidia = IsNvidia(mPhysicalDeviceProperties.vendorID);
1554 bool isQualcomm = IsQualcomm(mPhysicalDeviceProperties.vendorID);
1555 bool isARM = IsARM(mPhysicalDeviceProperties.vendorID);
1556 bool isSwS =
1557 IsSwiftshader(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
1558
1559 if (mLineRasterizationFeatures.bresenhamLines == VK_TRUE)
1560 {
1561 ASSERT(mLineRasterizationFeatures.sType ==
1562 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT);
1563 ANGLE_FEATURE_CONDITION(&mFeatures, bresenhamLineRasterization, true);
1564 }
1565 else
1566 {
1567 // Use OpenGL line rasterization rules if extension not available by default.
1568 // TODO(jmadill): Fix Android support. http://anglebug.com/2830
1569 ANGLE_FEATURE_CONDITION(&mFeatures, basicGLLineRasterization, !IsAndroid());
1570 }
1571
1572 if (mProvokingVertexFeatures.provokingVertexLast == VK_TRUE)
1573 {
1574 ASSERT(mProvokingVertexFeatures.sType ==
1575 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT);
1576 ANGLE_FEATURE_CONDITION(&mFeatures, provokingVertex, true);
1577 }
1578
1579 // TODO(lucferron): Currently disabled on Intel only since many tests are failing and need
1580 // investigation. http://anglebug.com/2728
1581 ANGLE_FEATURE_CONDITION(
1582 &mFeatures, flipViewportY,
1583 !IsIntel(mPhysicalDeviceProperties.vendorID) &&
1584 (mPhysicalDeviceProperties.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) ||
1585 ExtensionFound(VK_KHR_MAINTENANCE1_EXTENSION_NAME, deviceExtensionNames));
1586
1587 // http://anglebug.com/2838
1588 ANGLE_FEATURE_CONDITION(&mFeatures, extraCopyBufferRegion, IsWindows() && isIntel);
1589
1590 // http://anglebug.com/3055
1591 ANGLE_FEATURE_CONDITION(&mFeatures, forceCPUPathForCubeMapCopy, IsWindows() && isIntel);
1592
1593 // Work around incorrect NVIDIA point size range clamping.
1594 // http://anglebug.com/2970#c10
1595 // Clamp if driver version is:
1596 // < 430 on Windows
1597 // < 421 otherwise
1598 angle::VersionInfo nvidiaVersion;
1599 if (isNvidia)
1600 {
1601 nvidiaVersion =
1602 angle::ParseNvidiaDriverVersion(this->mPhysicalDeviceProperties.driverVersion);
1603 }
1604 ANGLE_FEATURE_CONDITION(&mFeatures, clampPointSize,
1605 isNvidia && nvidiaVersion.major < uint32_t(IsWindows() ? 430 : 421));
1606
1607 // Work around ineffective compute-graphics barriers on Nexus 5X.
1608 // TODO(syoussefi): Figure out which other vendors and driver versions are affected.
1609 // http://anglebug.com/3019
1610 ANGLE_FEATURE_CONDITION(&mFeatures, flushAfterVertexConversion,
1611 IsAndroid() && IsNexus5X(mPhysicalDeviceProperties.vendorID,
1612 mPhysicalDeviceProperties.deviceID));
1613
1614 ANGLE_FEATURE_CONDITION(
1615 &mFeatures, supportsIncrementalPresent,
1616 ExtensionFound(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, deviceExtensionNames));
1617
1618 #if defined(ANGLE_PLATFORM_ANDROID)
1619 ANGLE_FEATURE_CONDITION(
1620 &mFeatures, supportsAndroidHardwareBuffer,
1621 IsAndroid() &&
1622 ExtensionFound(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
1623 deviceExtensionNames) &&
1624 ExtensionFound(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, deviceExtensionNames));
1625 #endif
1626
1627 ANGLE_FEATURE_CONDITION(
1628 &mFeatures, supportsExternalMemoryFd,
1629 ExtensionFound(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, deviceExtensionNames));
1630
1631 ANGLE_FEATURE_CONDITION(
1632 &mFeatures, supportsExternalMemoryFuchsia,
1633 ExtensionFound(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, deviceExtensionNames));
1634
1635 ANGLE_FEATURE_CONDITION(
1636 &mFeatures, supportsExternalSemaphoreFd,
1637 ExtensionFound(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, deviceExtensionNames));
1638
1639 ANGLE_FEATURE_CONDITION(
1640 &mFeatures, supportsExternalSemaphoreFuchsia,
1641 ExtensionFound(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, deviceExtensionNames));
1642
1643 ANGLE_FEATURE_CONDITION(
1644 &mFeatures, supportsShaderStencilExport,
1645 ExtensionFound(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, deviceExtensionNames));
1646
1647 ANGLE_FEATURE_CONDITION(&mFeatures, supportsTransformFeedbackExtension,
1648 mTransformFeedbackFeatures.transformFeedback == VK_TRUE);
1649
1650 ANGLE_FEATURE_CONDITION(&mFeatures, supportsIndexTypeUint8,
1651 mIndexTypeUint8Features.indexTypeUint8 == VK_TRUE);
1652
1653 ANGLE_FEATURE_CONDITION(&mFeatures, emulateTransformFeedback,
1654 (mFeatures.supportsTransformFeedbackExtension.enabled == VK_FALSE &&
1655 mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics == VK_TRUE));
1656
1657 ANGLE_FEATURE_CONDITION(&mFeatures, disableFifoPresentMode, IsLinux() && isIntel);
1658
1659 ANGLE_FEATURE_CONDITION(&mFeatures, bindEmptyForUnusedDescriptorSets,
1660 IsAndroid() && isQualcomm);
1661
1662 ANGLE_FEATURE_CONDITION(&mFeatures, forceOldRewriteStructSamplers, IsAndroid() && !isSwS);
1663
1664 ANGLE_FEATURE_CONDITION(&mFeatures, perFrameWindowSizeQuery,
1665 isIntel || (IsWindows() && isAMD) || IsFuchsia() || isARM);
1666
1667 // Disabled on AMD/windows due to buggy behavior.
1668 ANGLE_FEATURE_CONDITION(&mFeatures, disallowSeamfulCubeMapEmulation, IsWindows() && isAMD);
1669
1670 ANGLE_FEATURE_CONDITION(&mFeatures, padBuffersToMaxVertexAttribStride, isAMD);
1671 mMaxVertexAttribStride = std::min(static_cast<uint32_t>(gl::limits::kMaxVertexAttribStride),
1672 mPhysicalDeviceProperties.limits.maxVertexInputBindingStride);
1673
1674 ANGLE_FEATURE_CONDITION(&mFeatures, forceD16TexFilter, IsAndroid() && isQualcomm);
1675
1676 ANGLE_FEATURE_CONDITION(&mFeatures, disableFlippingBlitWithCommand, IsAndroid() && isQualcomm);
1677
1678 // Allocation sanitization disabled by default because of a heaveyweight implementation
1679 // that can cause OOM and timeouts.
1680 ANGLE_FEATURE_CONDITION(&mFeatures, allocateNonZeroMemory, false);
1681
1682 ANGLE_FEATURE_CONDITION(&mFeatures, persistentlyMappedBuffers, true);
1683
1684 ANGLE_FEATURE_CONDITION(
1685 &mFeatures, supportsExternalMemoryHost,
1686 ExtensionFound(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, deviceExtensionNames));
1687
1688 // Pre-rotation support is not fully ready to be enabled.
1689 ANGLE_FEATURE_CONDITION(&mFeatures, enablePreRotateSurfaces, false);
1690
1691 // Currently disable FramebufferVk cache on Apple: http://anglebug.com/4442
1692 ANGLE_FEATURE_CONDITION(&mFeatures, enableFramebufferVkCache, !IsApple());
1693
1694 // Currently disabled by default: http://anglebug.com/3078
1695 ANGLE_FEATURE_CONDITION(&mFeatures, enablePrecisionQualifiers, false);
1696
1697 ANGLE_FEATURE_CONDITION(&mFeatures, supportDepthStencilRenderingFeedbackLoops, true);
1698
1699 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1700 platform->overrideFeaturesVk(platform, &mFeatures);
1701
1702 ApplyFeatureOverrides(&mFeatures, displayVk->getState());
1703 }
1704
initPipelineCacheVkKey()1705 void RendererVk::initPipelineCacheVkKey()
1706 {
1707 std::ostringstream hashStream("ANGLE Pipeline Cache: ", std::ios_base::ate);
1708 // Add the pipeline cache UUID to make sure the blob cache always gives a compatible pipeline
1709 // cache. It's not particularly necessary to write it as a hex number as done here, so long as
1710 // there is no '\0' in the result.
1711 for (const uint32_t c : mPhysicalDeviceProperties.pipelineCacheUUID)
1712 {
1713 hashStream << std::hex << c;
1714 }
1715 // Add the vendor and device id too for good measure.
1716 hashStream << std::hex << mPhysicalDeviceProperties.vendorID;
1717 hashStream << std::hex << mPhysicalDeviceProperties.deviceID;
1718
1719 const std::string &hashString = hashStream.str();
1720 angle::base::SHA1HashBytes(reinterpret_cast<const unsigned char *>(hashString.c_str()),
1721 hashString.length(), mPipelineCacheVkBlobKey.data());
1722 }
1723
initPipelineCache(DisplayVk * display,vk::PipelineCache * pipelineCache,bool * success)1724 angle::Result RendererVk::initPipelineCache(DisplayVk *display,
1725 vk::PipelineCache *pipelineCache,
1726 bool *success)
1727 {
1728 initPipelineCacheVkKey();
1729
1730 egl::BlobCache::Value initialData;
1731 *success = display->getBlobCache()->get(display->getScratchBuffer(), mPipelineCacheVkBlobKey,
1732 &initialData);
1733
1734 VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
1735
1736 pipelineCacheCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1737 pipelineCacheCreateInfo.flags = 0;
1738 pipelineCacheCreateInfo.initialDataSize = *success ? initialData.size() : 0;
1739 pipelineCacheCreateInfo.pInitialData = *success ? initialData.data() : nullptr;
1740
1741 ANGLE_VK_TRY(display, pipelineCache->init(mDevice, pipelineCacheCreateInfo));
1742
1743 return angle::Result::Continue;
1744 }
1745
getPipelineCache(vk::PipelineCache ** pipelineCache)1746 angle::Result RendererVk::getPipelineCache(vk::PipelineCache **pipelineCache)
1747 {
1748 if (mPipelineCacheInitialized)
1749 {
1750 *pipelineCache = &mPipelineCache;
1751 return angle::Result::Continue;
1752 }
1753
1754 // We should now recreate the pipeline cache with the blob cache pipeline data.
1755 vk::PipelineCache pCache;
1756 bool success = false;
1757 ANGLE_TRY(initPipelineCache(vk::GetImpl(mDisplay), &pCache, &success));
1758 if (success)
1759 {
1760 // Merge the newly created pipeline cache into the existing one.
1761 mPipelineCache.merge(mDevice, mPipelineCache.getHandle(), 1, pCache.ptr());
1762 }
1763 mPipelineCacheInitialized = true;
1764 pCache.destroy(mDevice);
1765
1766 *pipelineCache = &mPipelineCache;
1767 return angle::Result::Continue;
1768 }
1769
getNativeCaps() const1770 const gl::Caps &RendererVk::getNativeCaps() const
1771 {
1772 ensureCapsInitialized();
1773 return mNativeCaps;
1774 }
1775
getNativeTextureCaps() const1776 const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
1777 {
1778 ensureCapsInitialized();
1779 return mNativeTextureCaps;
1780 }
1781
getNativeExtensions() const1782 const gl::Extensions &RendererVk::getNativeExtensions() const
1783 {
1784 ensureCapsInitialized();
1785 return mNativeExtensions;
1786 }
1787
getNativeLimitations() const1788 const gl::Limitations &RendererVk::getNativeLimitations() const
1789 {
1790 ensureCapsInitialized();
1791 return mNativeLimitations;
1792 }
1793
getDescriptorSetLayout(vk::Context * context,const vk::DescriptorSetLayoutDesc & desc,vk::BindingPointer<vk::DescriptorSetLayout> * descriptorSetLayoutOut)1794 angle::Result RendererVk::getDescriptorSetLayout(
1795 vk::Context *context,
1796 const vk::DescriptorSetLayoutDesc &desc,
1797 vk::BindingPointer<vk::DescriptorSetLayout> *descriptorSetLayoutOut)
1798 {
1799 std::lock_guard<decltype(mDescriptorSetLayoutCacheMutex)> lock(mDescriptorSetLayoutCacheMutex);
1800 return mDescriptorSetLayoutCache.getDescriptorSetLayout(context, desc, descriptorSetLayoutOut);
1801 }
1802
getPipelineLayout(vk::Context * context,const vk::PipelineLayoutDesc & desc,const vk::DescriptorSetLayoutPointerArray & descriptorSetLayouts,vk::BindingPointer<vk::PipelineLayout> * pipelineLayoutOut)1803 angle::Result RendererVk::getPipelineLayout(
1804 vk::Context *context,
1805 const vk::PipelineLayoutDesc &desc,
1806 const vk::DescriptorSetLayoutPointerArray &descriptorSetLayouts,
1807 vk::BindingPointer<vk::PipelineLayout> *pipelineLayoutOut)
1808 {
1809 std::lock_guard<decltype(mPipelineLayoutCacheMutex)> lock(mPipelineLayoutCacheMutex);
1810 return mPipelineLayoutCache.getPipelineLayout(context, desc, descriptorSetLayouts,
1811 pipelineLayoutOut);
1812 }
1813
getPipelineCacheSize(DisplayVk * displayVk,size_t * pipelineCacheSizeOut)1814 angle::Result RendererVk::getPipelineCacheSize(DisplayVk *displayVk, size_t *pipelineCacheSizeOut)
1815 {
1816 VkResult result = mPipelineCache.getCacheData(mDevice, pipelineCacheSizeOut, nullptr);
1817 ANGLE_VK_TRY(displayVk, result);
1818
1819 return angle::Result::Continue;
1820 }
1821
syncPipelineCacheVk(DisplayVk * displayVk)1822 angle::Result RendererVk::syncPipelineCacheVk(DisplayVk *displayVk)
1823 {
1824 // TODO: Synchronize access to the pipeline/blob caches?
1825 ASSERT(mPipelineCache.valid());
1826
1827 if (--mPipelineCacheVkUpdateTimeout > 0)
1828 {
1829 return angle::Result::Continue;
1830 }
1831 if (!mPipelineCacheDirty)
1832 {
1833 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1834 return angle::Result::Continue;
1835 }
1836
1837 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1838
1839 size_t pipelineCacheSize = 0;
1840 ANGLE_TRY(getPipelineCacheSize(displayVk, &pipelineCacheSize));
1841 // Make sure we will receive enough data to hold the pipeline cache header
1842 // Table 7. Layout for pipeline cache header version VK_PIPELINE_CACHE_HEADER_VERSION_ONE
1843 const size_t kPipelineCacheHeaderSize = 16 + VK_UUID_SIZE;
1844 if (pipelineCacheSize < kPipelineCacheHeaderSize)
1845 {
1846 // No pipeline cache data to read, so return
1847 return angle::Result::Continue;
1848 }
1849
1850 angle::MemoryBuffer *pipelineCacheData = nullptr;
1851 ANGLE_VK_CHECK_ALLOC(displayVk,
1852 displayVk->getScratchBuffer(pipelineCacheSize, &pipelineCacheData));
1853
1854 size_t oldPipelineCacheSize = pipelineCacheSize;
1855 VkResult result =
1856 mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, pipelineCacheData->data());
1857 // We don't need all of the cache data, so just make sure we at least got the header
1858 // Vulkan Spec 9.6. Pipeline Cache
1859 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/chap9.html#pipelines-cache
1860 // If pDataSize is less than what is necessary to store this header, nothing will be written to
1861 // pData and zero will be written to pDataSize.
1862 // Any data written to pData is valid and can be provided as the pInitialData member of the
1863 // VkPipelineCacheCreateInfo structure passed to vkCreatePipelineCache.
1864 if (ANGLE_UNLIKELY(pipelineCacheSize < kPipelineCacheHeaderSize))
1865 {
1866 WARN() << "Not enough pipeline cache data read.";
1867 return angle::Result::Continue;
1868 }
1869 else if (ANGLE_UNLIKELY(result == VK_INCOMPLETE))
1870 {
1871 WARN() << "Received VK_INCOMPLETE: Old: " << oldPipelineCacheSize
1872 << ", New: " << pipelineCacheSize;
1873 }
1874 else
1875 {
1876 ANGLE_VK_TRY(displayVk, result);
1877 }
1878
1879 // If vkGetPipelineCacheData ends up writing fewer bytes than requested, zero out the rest of
1880 // the buffer to avoid leaking garbage memory.
1881 ASSERT(pipelineCacheSize <= pipelineCacheData->size());
1882 if (pipelineCacheSize < pipelineCacheData->size())
1883 {
1884 memset(pipelineCacheData->data() + pipelineCacheSize, 0,
1885 pipelineCacheData->size() - pipelineCacheSize);
1886 }
1887
1888 displayVk->getBlobCache()->putApplication(mPipelineCacheVkBlobKey, *pipelineCacheData);
1889 mPipelineCacheDirty = false;
1890
1891 return angle::Result::Continue;
1892 }
1893
issueShaderSerial()1894 Serial RendererVk::issueShaderSerial()
1895 {
1896 return mShaderSerialFactory.generate();
1897 }
1898
1899 // These functions look at the mandatory format for support, and fallback to querying the device (if
1900 // necessary) to test the availability of the bits.
hasLinearImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1901 bool RendererVk::hasLinearImageFormatFeatureBits(VkFormat format,
1902 const VkFormatFeatureFlags featureBits)
1903 {
1904 return hasFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(format, featureBits);
1905 }
1906
getImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1907 VkFormatFeatureFlags RendererVk::getImageFormatFeatureBits(VkFormat format,
1908 const VkFormatFeatureFlags featureBits)
1909 {
1910 return getFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1911 }
1912
hasImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1913 bool RendererVk::hasImageFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1914 {
1915 return hasFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1916 }
1917
hasBufferFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1918 bool RendererVk::hasBufferFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1919 {
1920 return hasFormatFeatureBits<&VkFormatProperties::bufferFeatures>(format, featureBits);
1921 }
1922
queueSubmit(vk::Context * context,egl::ContextPriority priority,const VkSubmitInfo & submitInfo,const vk::Fence * fence,Serial * serialOut)1923 angle::Result RendererVk::queueSubmit(vk::Context *context,
1924 egl::ContextPriority priority,
1925 const VkSubmitInfo &submitInfo,
1926 const vk::Fence *fence,
1927 Serial *serialOut)
1928 {
1929 {
1930 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1931 VkFence handle = fence ? fence->getHandle() : VK_NULL_HANDLE;
1932 ANGLE_VK_TRY(context, vkQueueSubmit(mQueues[priority], 1, &submitInfo, handle));
1933 }
1934
1935 ANGLE_TRY(cleanupGarbage(false));
1936
1937 *serialOut = mCurrentQueueSerial;
1938 mLastSubmittedQueueSerial = mCurrentQueueSerial;
1939 mCurrentQueueSerial = mQueueSerialFactory.generate();
1940
1941 return angle::Result::Continue;
1942 }
1943
queueSubmitOneOff(vk::Context * context,vk::PrimaryCommandBuffer && primary,egl::ContextPriority priority,Serial * serialOut)1944 angle::Result RendererVk::queueSubmitOneOff(vk::Context *context,
1945 vk::PrimaryCommandBuffer &&primary,
1946 egl::ContextPriority priority,
1947 Serial *serialOut)
1948 {
1949 VkSubmitInfo submitInfo = {};
1950 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1951 submitInfo.commandBufferCount = 1;
1952 submitInfo.pCommandBuffers = primary.ptr();
1953
1954 ANGLE_TRY(queueSubmit(context, priority, submitInfo, nullptr, serialOut));
1955
1956 mPendingOneOffCommands.push_back({*serialOut, std::move(primary)});
1957
1958 return angle::Result::Continue;
1959 }
1960
queueWaitIdle(vk::Context * context,egl::ContextPriority priority)1961 angle::Result RendererVk::queueWaitIdle(vk::Context *context, egl::ContextPriority priority)
1962 {
1963 {
1964 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1965 ANGLE_VK_TRY(context, vkQueueWaitIdle(mQueues[priority]));
1966 }
1967
1968 ANGLE_TRY(cleanupGarbage(false));
1969
1970 return angle::Result::Continue;
1971 }
1972
deviceWaitIdle(vk::Context * context)1973 angle::Result RendererVk::deviceWaitIdle(vk::Context *context)
1974 {
1975 {
1976 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1977 ANGLE_VK_TRY(context, vkDeviceWaitIdle(mDevice));
1978 }
1979
1980 ANGLE_TRY(cleanupGarbage(false));
1981
1982 return angle::Result::Continue;
1983 }
1984
queuePresent(egl::ContextPriority priority,const VkPresentInfoKHR & presentInfo)1985 VkResult RendererVk::queuePresent(egl::ContextPriority priority,
1986 const VkPresentInfoKHR &presentInfo)
1987 {
1988 ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::queuePresent");
1989
1990 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1991
1992 {
1993 ANGLE_TRACE_EVENT0("gpu.angle", "vkQueuePresentKHR");
1994 return vkQueuePresentKHR(mQueues[priority], &presentInfo);
1995 }
1996 }
1997
newSharedFence(vk::Context * context,vk::Shared<vk::Fence> * sharedFenceOut)1998 angle::Result RendererVk::newSharedFence(vk::Context *context,
1999 vk::Shared<vk::Fence> *sharedFenceOut)
2000 {
2001 vk::Fence fence;
2002 if (mFenceRecycler.empty())
2003 {
2004 VkFenceCreateInfo fenceCreateInfo = {};
2005 fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
2006 fenceCreateInfo.flags = 0;
2007 ANGLE_VK_TRY(context, fence.init(mDevice, fenceCreateInfo));
2008 }
2009 else
2010 {
2011 mFenceRecycler.fetch(&fence);
2012 ANGLE_VK_TRY(context, fence.reset(mDevice));
2013 }
2014 sharedFenceOut->assign(mDevice, std::move(fence));
2015 return angle::Result::Continue;
2016 }
2017
2018 template <VkFormatFeatureFlags VkFormatProperties::*features>
getFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)2019 VkFormatFeatureFlags RendererVk::getFormatFeatureBits(VkFormat format,
2020 const VkFormatFeatureFlags featureBits)
2021 {
2022 ASSERT(static_cast<uint32_t>(format) < vk::kNumVkFormats);
2023 VkFormatProperties &deviceProperties = mFormatProperties[format];
2024
2025 if (deviceProperties.bufferFeatures == kInvalidFormatFeatureFlags)
2026 {
2027 // If we don't have the actual device features, see if the requested features are mandatory.
2028 // If so, there's no need to query the device.
2029 const VkFormatProperties &mandatoryProperties = vk::GetMandatoryFormatSupport(format);
2030 if (IsMaskFlagSet(mandatoryProperties.*features, featureBits))
2031 {
2032 return featureBits;
2033 }
2034
2035 // Otherwise query the format features and cache it.
2036 vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, format, &deviceProperties);
2037 // Workaround for some Android devices that don't indicate filtering
2038 // support on D16_UNORM and they should.
2039 if (mFeatures.forceD16TexFilter.enabled && format == VK_FORMAT_D16_UNORM)
2040 {
2041 deviceProperties.*features |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
2042 }
2043 }
2044
2045 return deviceProperties.*features & featureBits;
2046 }
2047
2048 template <VkFormatFeatureFlags VkFormatProperties::*features>
hasFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)2049 bool RendererVk::hasFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
2050 {
2051 return IsMaskFlagSet(getFormatFeatureBits<features>(format, featureBits), featureBits);
2052 }
2053
cleanupGarbage(bool block)2054 angle::Result RendererVk::cleanupGarbage(bool block)
2055 {
2056 std::lock_guard<decltype(mGarbageMutex)> lock(mGarbageMutex);
2057
2058 for (auto garbageIter = mSharedGarbage.begin(); garbageIter != mSharedGarbage.end();)
2059 {
2060 // Possibly 'counter' should be always zero when we add the object to garbage.
2061 vk::SharedGarbage &garbage = *garbageIter;
2062 if (garbage.destroyIfComplete(this, mLastCompletedQueueSerial))
2063 {
2064 garbageIter = mSharedGarbage.erase(garbageIter);
2065 }
2066 else
2067 {
2068 garbageIter++;
2069 }
2070 }
2071
2072 return angle::Result::Continue;
2073 }
2074
onNewValidationMessage(const std::string & message)2075 void RendererVk::onNewValidationMessage(const std::string &message)
2076 {
2077 mLastValidationMessage = message;
2078 ++mValidationMessageCount;
2079 }
2080
getAndClearLastValidationMessage(uint32_t * countSinceLastClear)2081 std::string RendererVk::getAndClearLastValidationMessage(uint32_t *countSinceLastClear)
2082 {
2083 *countSinceLastClear = mValidationMessageCount;
2084 mValidationMessageCount = 0;
2085
2086 return std::move(mLastValidationMessage);
2087 }
2088
getMaxFenceWaitTimeNs() const2089 uint64_t RendererVk::getMaxFenceWaitTimeNs() const
2090 {
2091 constexpr uint64_t kMaxFenceWaitTimeNs = 120'000'000'000llu;
2092
2093 return kMaxFenceWaitTimeNs;
2094 }
2095
onCompletedSerial(Serial serial)2096 void RendererVk::onCompletedSerial(Serial serial)
2097 {
2098 if (serial > mLastCompletedQueueSerial)
2099 {
2100 mLastCompletedQueueSerial = serial;
2101 }
2102 }
2103
reloadVolkIfNeeded() const2104 void RendererVk::reloadVolkIfNeeded() const
2105 {
2106 if ((mInstance != VK_NULL_HANDLE) && (volkGetLoadedInstance() != mInstance))
2107 {
2108 volkLoadInstance(mInstance);
2109 }
2110
2111 if ((mDevice != VK_NULL_HANDLE) && (volkGetLoadedDevice() != mDevice))
2112 {
2113 volkLoadDevice(mDevice);
2114 }
2115 }
2116
getCommandBufferOneOff(vk::Context * context,vk::PrimaryCommandBuffer * commandBufferOut)2117 angle::Result RendererVk::getCommandBufferOneOff(vk::Context *context,
2118 vk::PrimaryCommandBuffer *commandBufferOut)
2119 {
2120 if (!mOneOffCommandPool.valid())
2121 {
2122 VkCommandPoolCreateInfo createInfo = {};
2123 createInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
2124 createInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
2125 ANGLE_VK_TRY(context, mOneOffCommandPool.init(mDevice, createInfo));
2126 }
2127
2128 if (!mPendingOneOffCommands.empty() &&
2129 mPendingOneOffCommands.front().serial < mLastCompletedQueueSerial)
2130 {
2131 *commandBufferOut = std::move(mPendingOneOffCommands.front().commandBuffer);
2132 mPendingOneOffCommands.pop_front();
2133 ANGLE_VK_TRY(context, commandBufferOut->reset());
2134 }
2135 else
2136 {
2137 VkCommandBufferAllocateInfo allocInfo = {};
2138 allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
2139 allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
2140 allocInfo.commandBufferCount = 1;
2141 allocInfo.commandPool = mOneOffCommandPool.getHandle();
2142
2143 ANGLE_VK_TRY(context, commandBufferOut->init(context->getDevice(), allocInfo));
2144 }
2145
2146 VkCommandBufferBeginInfo beginInfo = {};
2147 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2148 beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2149 beginInfo.pInheritanceInfo = nullptr;
2150 ANGLE_VK_TRY(context, commandBufferOut->begin(beginInfo));
2151
2152 return angle::Result::Continue;
2153 }
2154 } // namespace rx
2155