1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // RendererVk.cpp:
7 // Implements the class methods for RendererVk.
8 //
9
10 #include "libANGLE/renderer/vulkan/RendererVk.h"
11
12 // Placing this first seems to solve an intellisense bug.
13 #include "libANGLE/renderer/vulkan/vk_utils.h"
14
15 #include <EGL/eglext.h>
16
17 #include "common/debug.h"
18 #include "common/platform.h"
19 #include "common/system_utils.h"
20 #include "common/vulkan/vulkan_icd.h"
21 #include "gpu_info_util/SystemInfo.h"
22 #include "libANGLE/Context.h"
23 #include "libANGLE/Display.h"
24 #include "libANGLE/renderer/driver_utils.h"
25 #include "libANGLE/renderer/glslang_wrapper_utils.h"
26 #include "libANGLE/renderer/vulkan/CompilerVk.h"
27 #include "libANGLE/renderer/vulkan/ContextVk.h"
28 #include "libANGLE/renderer/vulkan/DisplayVk.h"
29 #include "libANGLE/renderer/vulkan/FramebufferVk.h"
30 #include "libANGLE/renderer/vulkan/ProgramVk.h"
31 #include "libANGLE/renderer/vulkan/ResourceVk.h"
32 #include "libANGLE/renderer/vulkan/VertexArrayVk.h"
33 #include "libANGLE/renderer/vulkan/vk_caps_utils.h"
34 #include "libANGLE/renderer/vulkan/vk_format_utils.h"
35 #include "libANGLE/renderer/vulkan/vk_google_filtering_precision.h"
36 #include "libANGLE/trace.h"
37 #include "platform/PlatformMethods.h"
38
39 // Consts
40 namespace
41 {
42 constexpr VkFormatFeatureFlags kInvalidFormatFeatureFlags = static_cast<VkFormatFeatureFlags>(-1);
43 } // anonymous namespace
44
45 namespace rx
46 {
47
48 namespace
49 {
50 // Update the pipeline cache every this many swaps.
51 constexpr uint32_t kPipelineCacheVkUpdatePeriod = 60;
52 // Per the Vulkan specification, as long as Vulkan 1.1+ is returned by vkEnumerateInstanceVersion,
53 // ANGLE must indicate the highest version of Vulkan functionality that it uses. The Vulkan
54 // validation layers will issue messages for any core functionality that requires a higher version.
55 // This value must be increased whenever ANGLE starts using functionality from a newer core
56 // version of Vulkan.
57 constexpr uint32_t kPreferredVulkanAPIVersion = VK_API_VERSION_1_1;
58
ChooseICDFromAttribs(const egl::AttributeMap & attribs)59 angle::vk::ICD ChooseICDFromAttribs(const egl::AttributeMap &attribs)
60 {
61 #if !defined(ANGLE_PLATFORM_ANDROID)
62 // Mock ICD does not currently run on Android
63 EGLAttrib deviceType = attribs.get(EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE,
64 EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE);
65
66 switch (deviceType)
67 {
68 case EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE:
69 break;
70 case EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE:
71 return angle::vk::ICD::Mock;
72 case EGL_PLATFORM_ANGLE_DEVICE_TYPE_SWIFTSHADER_ANGLE:
73 return angle::vk::ICD::SwiftShader;
74 default:
75 UNREACHABLE();
76 break;
77 }
78 #endif // !defined(ANGLE_PLATFORM_ANDROID)
79
80 return angle::vk::ICD::Default;
81 }
82
StrLess(const char * a,const char * b)83 bool StrLess(const char *a, const char *b)
84 {
85 return strcmp(a, b) < 0;
86 }
87
ExtensionFound(const char * needle,const RendererVk::ExtensionNameList & haystack)88 bool ExtensionFound(const char *needle, const RendererVk::ExtensionNameList &haystack)
89 {
90 // NOTE: The list must be sorted.
91 return std::binary_search(haystack.begin(), haystack.end(), needle, StrLess);
92 }
93
VerifyExtensionsPresent(const RendererVk::ExtensionNameList & haystack,const RendererVk::ExtensionNameList & needles)94 VkResult VerifyExtensionsPresent(const RendererVk::ExtensionNameList &haystack,
95 const RendererVk::ExtensionNameList &needles)
96 {
97 // NOTE: The lists must be sorted.
98 if (std::includes(haystack.begin(), haystack.end(), needles.begin(), needles.end(), StrLess))
99 {
100 return VK_SUCCESS;
101 }
102 for (const char *needle : needles)
103 {
104 if (!ExtensionFound(needle, haystack))
105 {
106 ERR() << "Extension not supported: " << needle;
107 }
108 }
109 return VK_ERROR_EXTENSION_NOT_PRESENT;
110 }
111
112 // Array of Validation error/warning messages that will be ignored, should include bugID
113 constexpr const char *kSkippedMessages[] = {
114 // http://anglebug.com/2866
115 "UNASSIGNED-CoreValidation-Shader-OutputNotConsumed",
116 // http://anglebug.com/2796
117 "UNASSIGNED-CoreValidation-Shader-PointSizeMissing",
118 // http://anglebug.com/3832
119 "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
120 // http://anglebug.com/4063
121 "VUID-VkDeviceCreateInfo-pNext-pNext",
122 "VUID-VkPipelineRasterizationStateCreateInfo-pNext-pNext",
123 "VUID_Undefined",
124 // http://anglebug.com/3078
125 "UNASSIGNED-CoreValidation-Shader-InterfaceTypeMismatch",
126 // http://anglebug.com/4583
127 "VUID-VkGraphicsPipelineCreateInfo-blendEnable-02023",
128 };
129
130 // Suppress validation errors that are known
131 // return "true" if given code/prefix/message is known, else return "false"
IsIgnoredDebugMessage(const char * message)132 bool IsIgnoredDebugMessage(const char *message)
133 {
134 if (!message)
135 {
136 return false;
137 }
138 for (const char *msg : kSkippedMessages)
139 {
140 if (strstr(message, msg) != nullptr)
141 {
142 return true;
143 }
144 }
145 return false;
146 }
147
GetVkObjectTypeName(VkObjectType type)148 const char *GetVkObjectTypeName(VkObjectType type)
149 {
150 switch (type)
151 {
152 case VK_OBJECT_TYPE_UNKNOWN:
153 return "Unknown";
154 case VK_OBJECT_TYPE_INSTANCE:
155 return "Instance";
156 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
157 return "Physical Device";
158 case VK_OBJECT_TYPE_DEVICE:
159 return "Device";
160 case VK_OBJECT_TYPE_QUEUE:
161 return "Queue";
162 case VK_OBJECT_TYPE_SEMAPHORE:
163 return "Semaphore";
164 case VK_OBJECT_TYPE_COMMAND_BUFFER:
165 return "Command Buffer";
166 case VK_OBJECT_TYPE_FENCE:
167 return "Fence";
168 case VK_OBJECT_TYPE_DEVICE_MEMORY:
169 return "Device Memory";
170 case VK_OBJECT_TYPE_BUFFER:
171 return "Buffer";
172 case VK_OBJECT_TYPE_IMAGE:
173 return "Image";
174 case VK_OBJECT_TYPE_EVENT:
175 return "Event";
176 case VK_OBJECT_TYPE_QUERY_POOL:
177 return "Query Pool";
178 case VK_OBJECT_TYPE_BUFFER_VIEW:
179 return "Buffer View";
180 case VK_OBJECT_TYPE_IMAGE_VIEW:
181 return "Image View";
182 case VK_OBJECT_TYPE_SHADER_MODULE:
183 return "Shader Module";
184 case VK_OBJECT_TYPE_PIPELINE_CACHE:
185 return "Pipeline Cache";
186 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
187 return "Pipeline Layout";
188 case VK_OBJECT_TYPE_RENDER_PASS:
189 return "Render Pass";
190 case VK_OBJECT_TYPE_PIPELINE:
191 return "Pipeline";
192 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
193 return "Descriptor Set Layout";
194 case VK_OBJECT_TYPE_SAMPLER:
195 return "Sampler";
196 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
197 return "Descriptor Pool";
198 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
199 return "Descriptor Set";
200 case VK_OBJECT_TYPE_FRAMEBUFFER:
201 return "Framebuffer";
202 case VK_OBJECT_TYPE_COMMAND_POOL:
203 return "Command Pool";
204 case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
205 return "Sampler YCbCr Conversion";
206 case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
207 return "Descriptor Update Template";
208 case VK_OBJECT_TYPE_SURFACE_KHR:
209 return "Surface";
210 case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
211 return "Swapchain";
212 case VK_OBJECT_TYPE_DISPLAY_KHR:
213 return "Display";
214 case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
215 return "Display Mode";
216 case VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT:
217 return "Debug Report Callback";
218 case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV:
219 return "Indirect Commands Layout";
220 case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
221 return "Debug Utils Messenger";
222 case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
223 return "Validation Cache";
224 case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV:
225 return "Acceleration Structure";
226 default:
227 return "<Unrecognized>";
228 }
229 }
230
231 VKAPI_ATTR VkBool32 VKAPI_CALL
DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * callbackData,void * userData)232 DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
233 VkDebugUtilsMessageTypeFlagsEXT messageTypes,
234 const VkDebugUtilsMessengerCallbackDataEXT *callbackData,
235 void *userData)
236 {
237 // See if it's an issue we are aware of and don't want to be spammed about.
238 if (IsIgnoredDebugMessage(callbackData->pMessageIdName))
239 {
240 return VK_FALSE;
241 }
242
243 std::ostringstream log;
244 if (callbackData->pMessageIdName)
245 {
246 log << "[ " << callbackData->pMessageIdName << " ] ";
247 }
248 log << callbackData->pMessage << std::endl;
249
250 // Aesthetic value based on length of the function name, line number, etc.
251 constexpr size_t kStartIndent = 28;
252
253 // Output the debug marker hierarchy under which this error has occured.
254 size_t indent = kStartIndent;
255 if (callbackData->queueLabelCount > 0)
256 {
257 log << std::string(indent++, ' ') << "<Queue Label Hierarchy:>" << std::endl;
258 for (uint32_t i = 0; i < callbackData->queueLabelCount; ++i)
259 {
260 log << std::string(indent++, ' ') << callbackData->pQueueLabels[i].pLabelName
261 << std::endl;
262 }
263 }
264 if (callbackData->cmdBufLabelCount > 0)
265 {
266 log << std::string(indent++, ' ') << "<Command Buffer Label Hierarchy:>" << std::endl;
267 for (uint32_t i = 0; i < callbackData->cmdBufLabelCount; ++i)
268 {
269 log << std::string(indent++, ' ') << callbackData->pCmdBufLabels[i].pLabelName
270 << std::endl;
271 }
272 }
273 // Output the objects involved in this error message.
274 if (callbackData->objectCount > 0)
275 {
276 for (uint32_t i = 0; i < callbackData->objectCount; ++i)
277 {
278 const char *objectName = callbackData->pObjects[i].pObjectName;
279 const char *objectType = GetVkObjectTypeName(callbackData->pObjects[i].objectType);
280 uint64_t objectHandle = callbackData->pObjects[i].objectHandle;
281 log << std::string(indent, ' ') << "Object: ";
282 if (objectHandle == 0)
283 {
284 log << "VK_NULL_HANDLE";
285 }
286 else
287 {
288 log << "0x" << std::hex << objectHandle << std::dec;
289 }
290 log << " (type = " << objectType << "(" << callbackData->pObjects[i].objectType << "))";
291 if (objectName)
292 {
293 log << " [" << objectName << "]";
294 }
295 log << std::endl;
296 }
297 }
298
299 bool isError = (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0;
300 std::string msg = log.str();
301
302 RendererVk *rendererVk = static_cast<RendererVk *>(userData);
303 rendererVk->onNewValidationMessage(msg);
304
305 if (isError)
306 {
307 ERR() << msg;
308 }
309 else
310 {
311 WARN() << msg;
312 }
313
314 return VK_FALSE;
315 }
316
DebugReportCallback(VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const char * layerPrefix,const char * message,void * userData)317 VKAPI_ATTR VkBool32 VKAPI_CALL DebugReportCallback(VkDebugReportFlagsEXT flags,
318 VkDebugReportObjectTypeEXT objectType,
319 uint64_t object,
320 size_t location,
321 int32_t messageCode,
322 const char *layerPrefix,
323 const char *message,
324 void *userData)
325 {
326 if (IsIgnoredDebugMessage(message))
327 {
328 return VK_FALSE;
329 }
330 if ((flags & VK_DEBUG_REPORT_ERROR_BIT_EXT) != 0)
331 {
332 ERR() << message;
333 #if !defined(NDEBUG)
334 // Abort the call in Debug builds.
335 return VK_TRUE;
336 #endif
337 }
338 else if ((flags & VK_DEBUG_REPORT_WARNING_BIT_EXT) != 0)
339 {
340 WARN() << message;
341 }
342 else
343 {
344 // Uncomment this if you want Vulkan spam.
345 // WARN() << message;
346 }
347
348 return VK_FALSE;
349 }
350
ShouldUseValidationLayers(const egl::AttributeMap & attribs)351 bool ShouldUseValidationLayers(const egl::AttributeMap &attribs)
352 {
353 #if defined(ANGLE_ENABLE_VULKAN_VALIDATION_LAYERS_BY_DEFAULT)
354 return ShouldUseDebugLayers(attribs);
355 #else
356 EGLAttrib debugSetting =
357 attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE);
358 return debugSetting == EGL_TRUE;
359 #endif // defined(ANGLE_ENABLE_VULKAN_VALIDATION_LAYERS_BY_DEFAULT)
360 }
361
LimitVersionTo(const gl::Version & current,const gl::Version & lower)362 gl::Version LimitVersionTo(const gl::Version ¤t, const gl::Version &lower)
363 {
364 return std::min(current, lower);
365 }
366
FencePropertiesCompatibleWithAndroid(const VkExternalFenceProperties & externalFenceProperties)367 ANGLE_MAYBE_UNUSED bool FencePropertiesCompatibleWithAndroid(
368 const VkExternalFenceProperties &externalFenceProperties)
369 {
370 // handleType here is the external fence type -
371 // we want type compatible with creating and export/dup() Android FD
372
373 // Imported handleType that can be exported - need for vkGetFenceFdKHR()
374 if ((externalFenceProperties.exportFromImportedHandleTypes &
375 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) == 0)
376 {
377 return false;
378 }
379
380 // HandleTypes which can be specified at creating a fence
381 if ((externalFenceProperties.compatibleHandleTypes &
382 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) == 0)
383 {
384 return false;
385 }
386
387 constexpr VkExternalFenceFeatureFlags kFeatureFlags =
388 (VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR |
389 VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR);
390 if ((externalFenceProperties.externalFenceFeatures & kFeatureFlags) != kFeatureFlags)
391 {
392 return false;
393 }
394
395 return true;
396 }
397
SemaphorePropertiesCompatibleWithAndroid(const VkExternalSemaphoreProperties & externalSemaphoreProperties)398 ANGLE_MAYBE_UNUSED bool SemaphorePropertiesCompatibleWithAndroid(
399 const VkExternalSemaphoreProperties &externalSemaphoreProperties)
400 {
401 // handleType here is the external semaphore type -
402 // we want type compatible with importing an Android FD
403
404 constexpr VkExternalSemaphoreFeatureFlags kFeatureFlags =
405 (VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR);
406 if ((externalSemaphoreProperties.externalSemaphoreFeatures & kFeatureFlags) != kFeatureFlags)
407 {
408 return false;
409 }
410
411 return true;
412 }
413
414 } // namespace
415
416 // RendererVk implementation.
RendererVk()417 RendererVk::RendererVk()
418 : mDisplay(nullptr),
419 mCapsInitialized(false),
420 mInstance(VK_NULL_HANDLE),
421 mEnableValidationLayers(false),
422 mEnableDebugUtils(false),
423 mEnabledICD(angle::vk::ICD::Default),
424 mDebugUtilsMessenger(VK_NULL_HANDLE),
425 mDebugReportCallback(VK_NULL_HANDLE),
426 mPhysicalDevice(VK_NULL_HANDLE),
427 mExternalFenceProperties{},
428 mExternalSemaphoreProperties{},
429 mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
430 mMaxVertexAttribDivisor(1),
431 mMaxVertexAttribStride(0),
432 mMinImportedHostPointerAlignment(1),
433 mDevice(VK_NULL_HANDLE),
434 mLastCompletedQueueSerial(mQueueSerialFactory.generate()),
435 mCurrentQueueSerial(mQueueSerialFactory.generate()),
436 mDeviceLost(false),
437 mPipelineCacheVkUpdateTimeout(kPipelineCacheVkUpdatePeriod),
438 mPipelineCacheDirty(false),
439 mPipelineCacheInitialized(false),
440 mGlslangInitialized(false)
441 {
442 VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags};
443 mFormatProperties.fill(invalid);
444
445 // We currently don't have any big-endian devices in the list of supported platforms. There are
446 // a number of places in the Vulkan backend that make this assumption. This assertion is made
447 // early to fail immediately on big-endian platforms.
448 ASSERT(IsLittleEndian());
449 }
450
~RendererVk()451 RendererVk::~RendererVk()
452 {
453 ASSERT(mSharedGarbage.empty());
454 }
455
onDestroy()456 void RendererVk::onDestroy()
457 {
458 if (getFeatures().enableCommandProcessingThread.enabled)
459 {
460 // Shutdown worker thread
461 mCommandProcessor.shutdown(&mCommandProcessorThread);
462 }
463
464 // Force all commands to finish by flushing all queues.
465 for (VkQueue queue : mQueues)
466 {
467 if (queue != VK_NULL_HANDLE)
468 {
469 vkQueueWaitIdle(queue);
470 }
471 }
472
473 // Then assign an infinite "last completed" serial to force garbage to delete.
474 mLastCompletedQueueSerial = Serial::Infinite();
475 (void)cleanupGarbage(true);
476 ASSERT(mSharedGarbage.empty());
477
478 for (PendingOneOffCommands &pending : mPendingOneOffCommands)
479 {
480 pending.commandBuffer.releaseHandle();
481 }
482
483 mOneOffCommandPool.destroy(mDevice);
484
485 mFenceRecycler.destroy(mDevice);
486
487 mPipelineLayoutCache.destroy(mDevice);
488 mDescriptorSetLayoutCache.destroy(mDevice);
489
490 mPipelineCache.destroy(mDevice);
491 mSamplerCache.destroy(this);
492 mTheNullBuffer.destroy(this);
493
494 mAllocator.destroy();
495
496 if (mGlslangInitialized)
497 {
498 GlslangRelease();
499 mGlslangInitialized = false;
500 }
501
502 if (mDevice)
503 {
504 vkDestroyDevice(mDevice, nullptr);
505 mDevice = VK_NULL_HANDLE;
506 }
507
508 if (mDebugUtilsMessenger)
509 {
510 vkDestroyDebugUtilsMessengerEXT(mInstance, mDebugUtilsMessenger, nullptr);
511
512 ASSERT(mDebugReportCallback == VK_NULL_HANDLE);
513 }
514 else if (mDebugReportCallback)
515 {
516 vkDestroyDebugReportCallbackEXT(mInstance, mDebugReportCallback, nullptr);
517 }
518
519 if (mInstance)
520 {
521 vkDestroyInstance(mInstance, nullptr);
522 mInstance = VK_NULL_HANDLE;
523 }
524
525 mMemoryProperties.destroy();
526 mPhysicalDevice = VK_NULL_HANDLE;
527 }
528
notifyDeviceLost()529 void RendererVk::notifyDeviceLost()
530 {
531 mLastCompletedQueueSerial = mLastSubmittedQueueSerial;
532 mDeviceLost = true;
533 mDisplay->notifyDeviceLost();
534 }
535
isDeviceLost() const536 bool RendererVk::isDeviceLost() const
537 {
538 return mDeviceLost;
539 }
540
initialize(DisplayVk * displayVk,egl::Display * display,const char * wsiExtension,const char * wsiLayer)541 angle::Result RendererVk::initialize(DisplayVk *displayVk,
542 egl::Display *display,
543 const char *wsiExtension,
544 const char *wsiLayer)
545 {
546 #if defined(ANGLE_SHARED_LIBVULKAN)
547 // Set all vk* function ptrs
548 ANGLE_VK_TRY(displayVk, volkInitialize());
549 #endif // defined(ANGLE_SHARED_LIBVULKAN)
550
551 mDisplay = display;
552 const egl::AttributeMap &attribs = mDisplay->getAttributeMap();
553 angle::vk::ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseValidationLayers(attribs),
554 ChooseICDFromAttribs(attribs));
555 mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
556 mEnabledICD = scopedEnvironment.getEnabledICD();
557
558 // Gather global layer properties.
559 uint32_t instanceLayerCount = 0;
560 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
561
562 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
563 if (instanceLayerCount > 0)
564 {
565 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount,
566 instanceLayerProps.data()));
567 }
568
569 VulkanLayerVector enabledInstanceLayerNames;
570 if (mEnableValidationLayers)
571 {
572 bool layersRequested =
573 (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
574 mEnableValidationLayers = GetAvailableValidationLayers(instanceLayerProps, layersRequested,
575 &enabledInstanceLayerNames);
576 }
577
578 if (wsiLayer)
579 {
580 enabledInstanceLayerNames.push_back(wsiLayer);
581 }
582
583 // Enumerate instance extensions that are provided by the vulkan
584 // implementation and implicit layers.
585 uint32_t instanceExtensionCount = 0;
586 ANGLE_VK_TRY(displayVk,
587 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount, nullptr));
588
589 std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
590 if (instanceExtensionCount > 0)
591 {
592 ANGLE_VK_TRY(displayVk,
593 vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
594 instanceExtensionProps.data()));
595 }
596
597 // Enumerate instance extensions that are provided by explicit layers.
598 for (const char *layerName : enabledInstanceLayerNames)
599 {
600 uint32_t previousExtensionCount = static_cast<uint32_t>(instanceExtensionProps.size());
601 uint32_t instanceLayerExtensionCount = 0;
602 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
603 layerName, &instanceLayerExtensionCount, nullptr));
604 instanceExtensionProps.resize(previousExtensionCount + instanceLayerExtensionCount);
605 ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
606 layerName, &instanceLayerExtensionCount,
607 instanceExtensionProps.data() + previousExtensionCount));
608 }
609
610 ExtensionNameList instanceExtensionNames;
611 if (!instanceExtensionProps.empty())
612 {
613 for (const VkExtensionProperties &i : instanceExtensionProps)
614 {
615 instanceExtensionNames.push_back(i.extensionName);
616 }
617 std::sort(instanceExtensionNames.begin(), instanceExtensionNames.end(), StrLess);
618 }
619
620 ExtensionNameList enabledInstanceExtensions;
621 enabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
622 enabledInstanceExtensions.push_back(wsiExtension);
623 mEnableDebugUtils = mEnableValidationLayers &&
624 ExtensionFound(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instanceExtensionNames);
625
626 bool enableDebugReport =
627 mEnableValidationLayers && !mEnableDebugUtils &&
628 ExtensionFound(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, instanceExtensionNames);
629
630 if (mEnableDebugUtils)
631 {
632 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
633 }
634 else if (enableDebugReport)
635 {
636 enabledInstanceExtensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
637 }
638
639 if (ExtensionFound(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, instanceExtensionNames))
640 {
641 enabledInstanceExtensions.push_back(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME);
642 ANGLE_FEATURE_CONDITION(&mFeatures, supportsSwapchainColorspace, true);
643 }
644
645 // Verify the required extensions are in the extension names set. Fail if not.
646 std::sort(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(), StrLess);
647 ANGLE_VK_TRY(displayVk,
648 VerifyExtensionsPresent(instanceExtensionNames, enabledInstanceExtensions));
649
650 // Enable VK_KHR_get_physical_device_properties_2 if available.
651 if (ExtensionFound(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
652 instanceExtensionNames))
653 {
654 enabledInstanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
655 }
656
657 VkApplicationInfo applicationInfo = {};
658 applicationInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
659 applicationInfo.pApplicationName = "ANGLE";
660 applicationInfo.applicationVersion = 1;
661 applicationInfo.pEngineName = "ANGLE";
662 applicationInfo.engineVersion = 1;
663
664 auto enumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
665 vkGetInstanceProcAddr(mInstance, "vkEnumerateInstanceVersion"));
666 if (!enumerateInstanceVersion)
667 {
668 applicationInfo.apiVersion = VK_API_VERSION_1_0;
669 }
670 else
671 {
672 uint32_t apiVersion = VK_API_VERSION_1_0;
673 ANGLE_VK_TRY(displayVk, enumerateInstanceVersion(&apiVersion));
674 if ((VK_VERSION_MAJOR(apiVersion) > 1) || (VK_VERSION_MINOR(apiVersion) >= 1))
675 {
676 // This is the highest version of core Vulkan functionality that ANGLE uses.
677 applicationInfo.apiVersion = kPreferredVulkanAPIVersion;
678 }
679 else
680 {
681 // Since only 1.0 instance-level functionality is available, this must set to 1.0.
682 applicationInfo.apiVersion = VK_API_VERSION_1_0;
683 }
684 }
685
686 VkInstanceCreateInfo instanceInfo = {};
687 instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
688 instanceInfo.flags = 0;
689 instanceInfo.pApplicationInfo = &applicationInfo;
690
691 // Enable requested layers and extensions.
692 instanceInfo.enabledExtensionCount = static_cast<uint32_t>(enabledInstanceExtensions.size());
693 instanceInfo.ppEnabledExtensionNames =
694 enabledInstanceExtensions.empty() ? nullptr : enabledInstanceExtensions.data();
695 instanceInfo.enabledLayerCount = static_cast<uint32_t>(enabledInstanceLayerNames.size());
696 instanceInfo.ppEnabledLayerNames = enabledInstanceLayerNames.data();
697 ANGLE_VK_TRY(displayVk, vkCreateInstance(&instanceInfo, nullptr, &mInstance));
698 #if defined(ANGLE_SHARED_LIBVULKAN)
699 // Load volk if we are linking dynamically
700 volkLoadInstance(mInstance);
701 #endif // defined(ANGLE_SHARED_LIBVULKAN)
702
703 if (mEnableDebugUtils)
704 {
705 // Use the newer EXT_debug_utils if it exists.
706 #if !defined(ANGLE_SHARED_LIBVULKAN)
707 InitDebugUtilsEXTFunctions(mInstance);
708 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
709
710 // Create the messenger callback.
711 VkDebugUtilsMessengerCreateInfoEXT messengerInfo = {};
712
713 constexpr VkDebugUtilsMessageSeverityFlagsEXT kSeveritiesToLog =
714 VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
715 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
716
717 constexpr VkDebugUtilsMessageTypeFlagsEXT kMessagesToLog =
718 VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
719 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
720 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
721
722 messengerInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
723 messengerInfo.messageSeverity = kSeveritiesToLog;
724 messengerInfo.messageType = kMessagesToLog;
725 messengerInfo.pfnUserCallback = &DebugUtilsMessenger;
726 messengerInfo.pUserData = this;
727
728 ANGLE_VK_TRY(displayVk, vkCreateDebugUtilsMessengerEXT(mInstance, &messengerInfo, nullptr,
729 &mDebugUtilsMessenger));
730 }
731 else if (enableDebugReport)
732 {
733 // Fallback to EXT_debug_report.
734 #if !defined(ANGLE_SHARED_LIBVULKAN)
735 InitDebugReportEXTFunctions(mInstance);
736 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
737
738 VkDebugReportCallbackCreateInfoEXT debugReportInfo = {};
739
740 debugReportInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
741 debugReportInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
742 debugReportInfo.pfnCallback = &DebugReportCallback;
743 debugReportInfo.pUserData = this;
744
745 ANGLE_VK_TRY(displayVk, vkCreateDebugReportCallbackEXT(mInstance, &debugReportInfo, nullptr,
746 &mDebugReportCallback));
747 }
748
749 if (std::find(enabledInstanceExtensions.begin(), enabledInstanceExtensions.end(),
750 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) !=
751 enabledInstanceExtensions.end())
752 {
753 #if !defined(ANGLE_SHARED_LIBVULKAN)
754 InitGetPhysicalDeviceProperties2KHRFunctions(mInstance);
755 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
756 ASSERT(vkGetPhysicalDeviceProperties2KHR);
757 }
758
759 uint32_t physicalDeviceCount = 0;
760 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
761 ANGLE_VK_CHECK(displayVk, physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
762
763 // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
764 std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
765 ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount,
766 physicalDevices.data()));
767 ChoosePhysicalDevice(physicalDevices, mEnabledICD, &mPhysicalDevice,
768 &mPhysicalDeviceProperties);
769
770 mGarbageCollectionFlushThreshold =
771 static_cast<uint32_t>(mPhysicalDeviceProperties.limits.maxMemoryAllocationCount *
772 kPercentMaxMemoryAllocationCount);
773
774 vkGetPhysicalDeviceFeatures(mPhysicalDevice, &mPhysicalDeviceFeatures);
775
776 // Ensure we can find a graphics queue family.
777 uint32_t queueCount = 0;
778 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount, nullptr);
779
780 ANGLE_VK_CHECK(displayVk, queueCount > 0, VK_ERROR_INITIALIZATION_FAILED);
781
782 mQueueFamilyProperties.resize(queueCount);
783 vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueCount,
784 mQueueFamilyProperties.data());
785
786 size_t graphicsQueueFamilyCount = false;
787 uint32_t firstGraphicsQueueFamily = 0;
788 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
789 for (uint32_t familyIndex = 0; familyIndex < queueCount; ++familyIndex)
790 {
791 const auto &queueInfo = mQueueFamilyProperties[familyIndex];
792 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
793 {
794 ASSERT(queueInfo.queueCount > 0);
795 graphicsQueueFamilyCount++;
796 if (firstGraphicsQueueFamily == 0)
797 {
798 firstGraphicsQueueFamily = familyIndex;
799 }
800 break;
801 }
802 }
803
804 ANGLE_VK_CHECK(displayVk, graphicsQueueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
805
806 // If only one queue family, go ahead and initialize the device. If there is more than one
807 // queue, we'll have to wait until we see a WindowSurface to know which supports present.
808 if (graphicsQueueFamilyCount == 1)
809 {
810 ANGLE_TRY(initializeDevice(displayVk, firstGraphicsQueueFamily));
811 }
812
813 // Create VMA allocator
814 ANGLE_VK_TRY(displayVk,
815 mAllocator.init(mPhysicalDevice, mDevice, mInstance, applicationInfo.apiVersion));
816
817 // Store the physical device memory properties so we can find the right memory pools.
818 mMemoryProperties.init(mPhysicalDevice);
819
820 // Must be initialized after the allocator and memory properties.
821 {
822 VkBufferCreateInfo bufferCreateInfo = {};
823 bufferCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
824 bufferCreateInfo.size = 16;
825 bufferCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
826 ANGLE_TRY(
827 mTheNullBuffer.init(displayVk, bufferCreateInfo, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT));
828 }
829
830 if (!mGlslangInitialized)
831 {
832 GlslangInitialize();
833 mGlslangInitialized = true;
834 }
835
836 // Initialize the format table.
837 mFormatTable.initialize(this, &mNativeTextureCaps, &mNativeCaps.compressedTextureFormats);
838
839 if (getFeatures().enableCommandProcessingThread.enabled)
840 {
841 mCommandProcessorThread =
842 std::thread(&CommandProcessor::processCommandProcessorTasks, &mCommandProcessor);
843 }
844 return angle::Result::Continue;
845 }
846
queryDeviceExtensionFeatures(const ExtensionNameList & deviceExtensionNames)847 void RendererVk::queryDeviceExtensionFeatures(const ExtensionNameList &deviceExtensionNames)
848 {
849 // Default initialize all extension features to false.
850 mLineRasterizationFeatures = {};
851 mLineRasterizationFeatures.sType =
852 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT;
853
854 mProvokingVertexFeatures = {};
855 mProvokingVertexFeatures.sType =
856 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT;
857
858 mVertexAttributeDivisorFeatures = {};
859 mVertexAttributeDivisorFeatures.sType =
860 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
861
862 mVertexAttributeDivisorProperties = {};
863 mVertexAttributeDivisorProperties.sType =
864 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
865
866 mTransformFeedbackFeatures = {};
867 mTransformFeedbackFeatures.sType =
868 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
869
870 mIndexTypeUint8Features = {};
871 mIndexTypeUint8Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT;
872
873 mPhysicalDeviceSubgroupProperties = {};
874 mPhysicalDeviceSubgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
875
876 mPhysicalDeviceExternalMemoryHostProperties = {};
877 mPhysicalDeviceExternalMemoryHostProperties.sType =
878 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
879
880 if (!vkGetPhysicalDeviceProperties2KHR || !vkGetPhysicalDeviceFeatures2KHR)
881 {
882 return;
883 }
884
885 // Query features and properties.
886 VkPhysicalDeviceFeatures2KHR deviceFeatures = {};
887 deviceFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
888
889 VkPhysicalDeviceProperties2 deviceProperties = {};
890 deviceProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
891
892 // Query line rasterization features
893 if (ExtensionFound(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, deviceExtensionNames))
894 {
895 vk::AddToPNextChain(&deviceFeatures, &mLineRasterizationFeatures);
896 }
897
898 // Query provoking vertex features
899 if (ExtensionFound(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, deviceExtensionNames))
900 {
901 vk::AddToPNextChain(&deviceFeatures, &mProvokingVertexFeatures);
902 }
903
904 // Query attribute divisor features and properties
905 if (ExtensionFound(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, deviceExtensionNames))
906 {
907 vk::AddToPNextChain(&deviceFeatures, &mVertexAttributeDivisorFeatures);
908 vk::AddToPNextChain(&deviceProperties, &mVertexAttributeDivisorProperties);
909 }
910
911 // Query transform feedback features
912 if (ExtensionFound(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, deviceExtensionNames))
913 {
914 vk::AddToPNextChain(&deviceFeatures, &mTransformFeedbackFeatures);
915 }
916
917 // Query uint8 index type features
918 if (ExtensionFound(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, deviceExtensionNames))
919 {
920 vk::AddToPNextChain(&deviceFeatures, &mIndexTypeUint8Features);
921 }
922
923 // Query external memory host properties
924 if (ExtensionFound(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, deviceExtensionNames))
925 {
926 vk::AddToPNextChain(&deviceProperties, &mPhysicalDeviceExternalMemoryHostProperties);
927 }
928
929 // Query subgroup properties
930 vk::AddToPNextChain(&deviceProperties, &mPhysicalDeviceSubgroupProperties);
931
932 vkGetPhysicalDeviceFeatures2KHR(mPhysicalDevice, &deviceFeatures);
933 vkGetPhysicalDeviceProperties2KHR(mPhysicalDevice, &deviceProperties);
934
935 // Fence properties
936 if (mFeatures.supportsExternalFenceCapabilities.enabled)
937 {
938 mExternalFenceProperties.sType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES;
939
940 VkPhysicalDeviceExternalFenceInfo externalFenceInfo = {};
941 externalFenceInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO;
942 externalFenceInfo.handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
943
944 vkGetPhysicalDeviceExternalFencePropertiesKHR(mPhysicalDevice, &externalFenceInfo,
945 &mExternalFenceProperties);
946 }
947
948 // Semaphore properties
949 if (mFeatures.supportsExternalSemaphoreCapabilities.enabled)
950 {
951 mExternalSemaphoreProperties.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
952
953 VkPhysicalDeviceExternalSemaphoreInfo externalSemaphoreInfo = {};
954 externalSemaphoreInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
955 externalSemaphoreInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
956
957 vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(mPhysicalDevice, &externalSemaphoreInfo,
958 &mExternalSemaphoreProperties);
959 }
960
961 // Clean up pNext chains
962 mLineRasterizationFeatures.pNext = nullptr;
963 mProvokingVertexFeatures.pNext = nullptr;
964 mVertexAttributeDivisorFeatures.pNext = nullptr;
965 mVertexAttributeDivisorProperties.pNext = nullptr;
966 mTransformFeedbackFeatures.pNext = nullptr;
967 mIndexTypeUint8Features.pNext = nullptr;
968 mPhysicalDeviceSubgroupProperties.pNext = nullptr;
969 mPhysicalDeviceExternalMemoryHostProperties.pNext = nullptr;
970 }
971
initializeDevice(DisplayVk * displayVk,uint32_t queueFamilyIndex)972 angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex)
973 {
974 uint32_t deviceLayerCount = 0;
975 ANGLE_VK_TRY(displayVk,
976 vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
977
978 std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
979 if (deviceLayerCount > 0)
980 {
981 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
982 deviceLayerProps.data()));
983 }
984
985 VulkanLayerVector enabledDeviceLayerNames;
986 if (mEnableValidationLayers)
987 {
988 mEnableValidationLayers =
989 GetAvailableValidationLayers(deviceLayerProps, false, &enabledDeviceLayerNames);
990 }
991
992 const char *wsiLayer = displayVk->getWSILayer();
993 if (wsiLayer)
994 {
995 enabledDeviceLayerNames.push_back(wsiLayer);
996 }
997
998 // Enumerate device extensions that are provided by the vulkan
999 // implementation and implicit layers.
1000 uint32_t deviceExtensionCount = 0;
1001 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
1002 &deviceExtensionCount, nullptr));
1003
1004 std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
1005 if (deviceExtensionCount > 0)
1006 {
1007 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
1008 &deviceExtensionCount,
1009 deviceExtensionProps.data()));
1010 }
1011
1012 // Enumerate device extensions that are provided by explicit layers.
1013 for (const char *layerName : enabledDeviceLayerNames)
1014 {
1015 uint32_t previousExtensionCount = static_cast<uint32_t>(deviceExtensionProps.size());
1016 uint32_t deviceLayerExtensionCount = 0;
1017 ANGLE_VK_TRY(displayVk,
1018 vkEnumerateDeviceExtensionProperties(mPhysicalDevice, layerName,
1019 &deviceLayerExtensionCount, nullptr));
1020 deviceExtensionProps.resize(previousExtensionCount + deviceLayerExtensionCount);
1021 ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(
1022 mPhysicalDevice, layerName, &deviceLayerExtensionCount,
1023 deviceExtensionProps.data() + previousExtensionCount));
1024 }
1025
1026 ExtensionNameList deviceExtensionNames;
1027 if (!deviceExtensionProps.empty())
1028 {
1029 ASSERT(deviceExtensionNames.size() <= deviceExtensionProps.size());
1030 for (const VkExtensionProperties &prop : deviceExtensionProps)
1031 {
1032 deviceExtensionNames.push_back(prop.extensionName);
1033 }
1034 std::sort(deviceExtensionNames.begin(), deviceExtensionNames.end(), StrLess);
1035 }
1036
1037 ExtensionNameList enabledDeviceExtensions;
1038 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1039
1040 // Queues: map low, med, high priority to whatever is supported up to 3 queues
1041 uint32_t queueCount = std::min(mQueueFamilyProperties[queueFamilyIndex].queueCount,
1042 static_cast<uint32_t>(egl::ContextPriority::EnumCount));
1043
1044 constexpr float kVulkanQueuePriorityLow = 0.0;
1045 constexpr float kVulkanQueuePriorityMedium = 0.4;
1046 constexpr float kVulkanQueuePriorityHigh = 1.0;
1047
1048 // Index order: Low, High, Medium - so no need to rearrange according to count:
1049 // If we have 1 queue - all same, if 2 - Low and High, if 3 Low, High and Medium.
1050 constexpr uint32_t kQueueIndexLow = 0;
1051 constexpr uint32_t kQueueIndexHigh = 1;
1052 constexpr uint32_t kQueueIndexMedium = 2;
1053
1054 constexpr float queuePriorities[static_cast<uint32_t>(egl::ContextPriority::EnumCount)] = {
1055 kVulkanQueuePriorityMedium, kVulkanQueuePriorityHigh, kVulkanQueuePriorityLow};
1056
1057 VkDeviceQueueCreateInfo queueCreateInfo = {};
1058 queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1059 queueCreateInfo.flags = 0;
1060 queueCreateInfo.queueFamilyIndex = queueFamilyIndex;
1061 queueCreateInfo.queueCount = queueCount;
1062 queueCreateInfo.pQueuePriorities = queuePriorities;
1063
1064 // Query extensions and their features.
1065 queryDeviceExtensionFeatures(deviceExtensionNames);
1066
1067 // Initialize features and workarounds.
1068 initFeatures(displayVk, deviceExtensionNames);
1069
1070 // Selectively enable KHR_MAINTENANCE1 to support viewport flipping.
1071 if ((getFeatures().flipViewportY.enabled) &&
1072 (mPhysicalDeviceProperties.apiVersion < VK_MAKE_VERSION(1, 1, 0)))
1073 {
1074 enabledDeviceExtensions.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
1075 }
1076 if (getFeatures().supportsIncrementalPresent.enabled)
1077 {
1078 enabledDeviceExtensions.push_back(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME);
1079 }
1080
1081 #if defined(ANGLE_PLATFORM_ANDROID)
1082 if (getFeatures().supportsAndroidHardwareBuffer.enabled)
1083 {
1084 enabledDeviceExtensions.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
1085 enabledDeviceExtensions.push_back(
1086 VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
1087 # if !defined(ANGLE_SHARED_LIBVULKAN)
1088 InitExternalMemoryHardwareBufferANDROIDFunctions(mInstance);
1089 # endif // !defined(ANGLE_SHARED_LIBVULKAN)
1090 }
1091 #else
1092 ASSERT(!getFeatures().supportsAndroidHardwareBuffer.enabled);
1093 #endif
1094
1095 #if defined(ANGLE_PLATFORM_GGP)
1096 if (getFeatures().supportsGGPFrameToken.enabled)
1097 {
1098 enabledDeviceExtensions.push_back(VK_GGP_FRAME_TOKEN_EXTENSION_NAME);
1099 }
1100 ANGLE_VK_CHECK(displayVk, getFeatures().supportsGGPFrameToken.enabled,
1101 VK_ERROR_EXTENSION_NOT_PRESENT);
1102 #else
1103 ASSERT(!getFeatures().supportsGGPFrameToken.enabled);
1104 #endif
1105
1106 if (getFeatures().supportsAndroidHardwareBuffer.enabled ||
1107 getFeatures().supportsExternalMemoryFd.enabled ||
1108 getFeatures().supportsExternalMemoryFuchsia.enabled)
1109 {
1110 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
1111 }
1112
1113 if (getFeatures().supportsExternalMemoryFd.enabled)
1114 {
1115 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
1116 }
1117
1118 if (getFeatures().supportsExternalMemoryFuchsia.enabled)
1119 {
1120 enabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME);
1121 }
1122
1123 if (getFeatures().supportsExternalSemaphoreFd.enabled ||
1124 getFeatures().supportsExternalSemaphoreFuchsia.enabled)
1125 {
1126 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
1127 #if !defined(ANGLE_SHARED_LIBVULKAN)
1128 InitExternalSemaphoreFdFunctions(mInstance);
1129 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1130 }
1131
1132 if (getFeatures().supportsExternalSemaphoreCapabilities.enabled)
1133 {
1134 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
1135 }
1136
1137 if (getFeatures().supportsExternalFenceCapabilities.enabled)
1138 {
1139 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
1140 }
1141
1142 if (getFeatures().supportsExternalSemaphoreFd.enabled)
1143 {
1144 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
1145 }
1146
1147 if (getFeatures().supportsExternalSemaphoreCapabilities.enabled)
1148 {
1149 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
1150 #if !defined(ANGLE_SHARED_LIBVULKAN)
1151 InitExternalSemaphoreCapabilitiesFunctions(mInstance);
1152 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1153 }
1154
1155 if (getFeatures().supportsExternalFenceFd.enabled)
1156 {
1157 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME);
1158 #if !defined(ANGLE_SHARED_LIBVULKAN)
1159 InitExternalFenceFdFunctions(mInstance);
1160 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1161 }
1162
1163 if (getFeatures().supportsExternalFenceCapabilities.enabled)
1164 {
1165 enabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
1166 #if !defined(ANGLE_SHARED_LIBVULKAN)
1167 InitExternalFenceCapabilitiesFunctions(mInstance);
1168 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1169 }
1170
1171 if (getFeatures().supportsExternalSemaphoreFuchsia.enabled)
1172 {
1173 enabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
1174 }
1175
1176 if (getFeatures().supportsShaderStencilExport.enabled)
1177 {
1178 enabledDeviceExtensions.push_back(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
1179 }
1180
1181 std::sort(enabledDeviceExtensions.begin(), enabledDeviceExtensions.end(), StrLess);
1182 ANGLE_VK_TRY(displayVk, VerifyExtensionsPresent(deviceExtensionNames, enabledDeviceExtensions));
1183
1184 // Select additional features to be enabled.
1185 VkPhysicalDeviceFeatures2KHR enabledFeatures = {};
1186 enabledFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
1187 // Used to support framebuffers with multiple attachments:
1188 enabledFeatures.features.independentBlend = mPhysicalDeviceFeatures.independentBlend;
1189 // Used to support robust buffer access:
1190 enabledFeatures.features.robustBufferAccess = mPhysicalDeviceFeatures.robustBufferAccess;
1191 // Used to support Anisotropic filtering:
1192 enabledFeatures.features.samplerAnisotropy = mPhysicalDeviceFeatures.samplerAnisotropy;
1193 // Used to emulate transform feedback:
1194 enabledFeatures.features.vertexPipelineStoresAndAtomics =
1195 mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics;
1196 // Used to implement storage buffers and images in the fragment shader:
1197 enabledFeatures.features.fragmentStoresAndAtomics =
1198 mPhysicalDeviceFeatures.fragmentStoresAndAtomics;
1199 // Used to support geometry shaders:
1200 enabledFeatures.features.geometryShader = mPhysicalDeviceFeatures.geometryShader;
1201 // Used to support EXT_gpu_shader5:
1202 enabledFeatures.features.shaderImageGatherExtended =
1203 mPhysicalDeviceFeatures.shaderImageGatherExtended;
1204 // Used to support EXT_gpu_shader5:
1205 enabledFeatures.features.shaderUniformBufferArrayDynamicIndexing =
1206 mPhysicalDeviceFeatures.shaderUniformBufferArrayDynamicIndexing;
1207 // Used to support EXT_gpu_shader5 and sampler array of array emulation:
1208 enabledFeatures.features.shaderSampledImageArrayDynamicIndexing =
1209 mPhysicalDeviceFeatures.shaderSampledImageArrayDynamicIndexing;
1210 // Used to support atomic counter emulation:
1211 enabledFeatures.features.shaderStorageBufferArrayDynamicIndexing =
1212 mPhysicalDeviceFeatures.shaderStorageBufferArrayDynamicIndexing;
1213 // Used to support APPLE_clip_distance
1214 enabledFeatures.features.shaderClipDistance = mPhysicalDeviceFeatures.shaderClipDistance;
1215
1216 if (!vk::CommandBuffer::ExecutesInline())
1217 {
1218 enabledFeatures.features.inheritedQueries = mPhysicalDeviceFeatures.inheritedQueries;
1219 }
1220
1221 // Setup device initialization struct
1222 VkDeviceCreateInfo createInfo = {};
1223
1224 // Based on available extension features, decide on which extensions and features to enable.
1225
1226 if (mLineRasterizationFeatures.bresenhamLines)
1227 {
1228 enabledDeviceExtensions.push_back(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME);
1229 vk::AddToPNextChain(&createInfo, &mLineRasterizationFeatures);
1230 }
1231
1232 if (mProvokingVertexFeatures.provokingVertexLast)
1233 {
1234 enabledDeviceExtensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
1235 vk::AddToPNextChain(&createInfo, &mProvokingVertexFeatures);
1236 }
1237
1238 if (mVertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor)
1239 {
1240 enabledDeviceExtensions.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
1241 vk::AddToPNextChain(&createInfo, &mVertexAttributeDivisorFeatures);
1242
1243 // We only store 8 bit divisor in GraphicsPipelineDesc so capping value & we emulate if
1244 // exceeded
1245 mMaxVertexAttribDivisor =
1246 std::min(mVertexAttributeDivisorProperties.maxVertexAttribDivisor,
1247 static_cast<uint32_t>(std::numeric_limits<uint8_t>::max()));
1248 }
1249
1250 if (getFeatures().supportsTransformFeedbackExtension.enabled)
1251 {
1252 enabledDeviceExtensions.push_back(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
1253 vk::AddToPNextChain(&createInfo, &mTransformFeedbackFeatures);
1254 }
1255
1256 if (getFeatures().supportsIndexTypeUint8.enabled)
1257 {
1258 enabledDeviceExtensions.push_back(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME);
1259 vk::AddToPNextChain(&createInfo, &mIndexTypeUint8Features);
1260 }
1261
1262 if (getFeatures().supportsExternalMemoryHost.enabled)
1263 {
1264 enabledDeviceExtensions.push_back(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
1265 mMinImportedHostPointerAlignment =
1266 mPhysicalDeviceExternalMemoryHostProperties.minImportedHostPointerAlignment;
1267 #if !defined(ANGLE_SHARED_LIBVULKAN)
1268 InitExternalMemoryHostFunctions(mInstance);
1269 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1270 }
1271
1272 createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
1273 createInfo.flags = 0;
1274 createInfo.queueCreateInfoCount = 1;
1275 createInfo.pQueueCreateInfos = &queueCreateInfo;
1276 createInfo.enabledLayerCount = static_cast<uint32_t>(enabledDeviceLayerNames.size());
1277 createInfo.ppEnabledLayerNames = enabledDeviceLayerNames.data();
1278 createInfo.enabledExtensionCount = static_cast<uint32_t>(enabledDeviceExtensions.size());
1279 createInfo.ppEnabledExtensionNames =
1280 enabledDeviceExtensions.empty() ? nullptr : enabledDeviceExtensions.data();
1281 // Enable core features without assuming VkPhysicalDeviceFeatures2KHR is accepted in the pNext
1282 // chain of VkDeviceCreateInfo.
1283 createInfo.pEnabledFeatures = &enabledFeatures.features;
1284
1285 ANGLE_VK_TRY(displayVk, vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
1286 #if defined(ANGLE_SHARED_LIBVULKAN)
1287 // Load volk if we are loading dynamically
1288 volkLoadDevice(mDevice);
1289 #endif // defined(ANGLE_SHARED_LIBVULKAN)
1290
1291 mCurrentQueueFamilyIndex = queueFamilyIndex;
1292
1293 // When only 1 Queue, use same for all, Low index. Identify as Medium, since it's default.
1294 VkQueue queue;
1295 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, kQueueIndexLow, &queue);
1296 mQueues[egl::ContextPriority::Low] = queue;
1297 mQueues[egl::ContextPriority::Medium] = queue;
1298 mQueues[egl::ContextPriority::High] = queue;
1299 mPriorities[egl::ContextPriority::Low] = egl::ContextPriority::Medium;
1300 mPriorities[egl::ContextPriority::Medium] = egl::ContextPriority::Medium;
1301 mPriorities[egl::ContextPriority::High] = egl::ContextPriority::Medium;
1302
1303 // If at least 2 queues, High has its own queue
1304 if (queueCount > 1)
1305 {
1306 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, kQueueIndexHigh,
1307 &mQueues[egl::ContextPriority::High]);
1308 mPriorities[egl::ContextPriority::High] = egl::ContextPriority::High;
1309 }
1310 // If at least 3 queues, Medium has its own queue. Adjust Low priority.
1311 if (queueCount > 2)
1312 {
1313 vkGetDeviceQueue(mDevice, mCurrentQueueFamilyIndex, kQueueIndexMedium,
1314 &mQueues[egl::ContextPriority::Medium]);
1315 mPriorities[egl::ContextPriority::Low] = egl::ContextPriority::Low;
1316 }
1317
1318 #if !defined(ANGLE_SHARED_LIBVULKAN)
1319 if (getFeatures().supportsTransformFeedbackExtension.enabled)
1320 {
1321 InitTransformFeedbackEXTFunctions(mDevice);
1322 }
1323 #endif // !defined(ANGLE_SHARED_LIBVULKAN)
1324
1325 // Initialize the vulkan pipeline cache.
1326 bool success = false;
1327 ANGLE_TRY(initPipelineCache(displayVk, &mPipelineCache, &success));
1328
1329 return angle::Result::Continue;
1330 }
1331
selectPresentQueueForSurface(DisplayVk * displayVk,VkSurfaceKHR surface,uint32_t * presentQueueOut)1332 angle::Result RendererVk::selectPresentQueueForSurface(DisplayVk *displayVk,
1333 VkSurfaceKHR surface,
1334 uint32_t *presentQueueOut)
1335 {
1336 // We've already initialized a device, and can't re-create it unless it's never been used.
1337 // TODO(jmadill): Handle the re-creation case if necessary.
1338 if (mDevice != VK_NULL_HANDLE)
1339 {
1340 ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
1341
1342 // Check if the current device supports present on this surface.
1343 VkBool32 supportsPresent = VK_FALSE;
1344 ANGLE_VK_TRY(displayVk,
1345 vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
1346 surface, &supportsPresent));
1347
1348 if (supportsPresent == VK_TRUE)
1349 {
1350 *presentQueueOut = mCurrentQueueFamilyIndex;
1351 return angle::Result::Continue;
1352 }
1353 }
1354
1355 // Find a graphics and present queue.
1356 Optional<uint32_t> newPresentQueue;
1357 uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
1358 constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
1359 for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
1360 {
1361 const auto &queueInfo = mQueueFamilyProperties[queueIndex];
1362 if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
1363 {
1364 VkBool32 supportsPresent = VK_FALSE;
1365 ANGLE_VK_TRY(displayVk, vkGetPhysicalDeviceSurfaceSupportKHR(
1366 mPhysicalDevice, queueIndex, surface, &supportsPresent));
1367
1368 if (supportsPresent == VK_TRUE)
1369 {
1370 newPresentQueue = queueIndex;
1371 break;
1372 }
1373 }
1374 }
1375
1376 ANGLE_VK_CHECK(displayVk, newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
1377 ANGLE_TRY(initializeDevice(displayVk, newPresentQueue.value()));
1378
1379 *presentQueueOut = newPresentQueue.value();
1380 return angle::Result::Continue;
1381 }
1382
getVendorString() const1383 std::string RendererVk::getVendorString() const
1384 {
1385 return GetVendorString(mPhysicalDeviceProperties.vendorID);
1386 }
1387
getRendererDescription() const1388 std::string RendererVk::getRendererDescription() const
1389 {
1390 std::stringstream strstr;
1391
1392 uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
1393
1394 strstr << "Vulkan ";
1395 strstr << VK_VERSION_MAJOR(apiVersion) << ".";
1396 strstr << VK_VERSION_MINOR(apiVersion) << ".";
1397 strstr << VK_VERSION_PATCH(apiVersion);
1398
1399 strstr << "(";
1400
1401 // In the case of NVIDIA, deviceName does not necessarily contain "NVIDIA". Add "NVIDIA" so that
1402 // Vulkan end2end tests can be selectively disabled on NVIDIA. TODO(jmadill): should not be
1403 // needed after http://anglebug.com/1874 is fixed and end2end_tests use more sophisticated
1404 // driver detection.
1405 if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
1406 {
1407 strstr << GetVendorString(mPhysicalDeviceProperties.vendorID) << " ";
1408 }
1409
1410 strstr << mPhysicalDeviceProperties.deviceName;
1411 strstr << " (" << gl::FmtHex(mPhysicalDeviceProperties.deviceID) << ")";
1412
1413 strstr << ")";
1414
1415 return strstr.str();
1416 }
1417
getMaxSupportedESVersion() const1418 gl::Version RendererVk::getMaxSupportedESVersion() const
1419 {
1420 // Current highest supported version
1421 gl::Version maxVersion = gl::Version(3, 1);
1422
1423 // Early out without downgrading ES version if mock ICD enabled.
1424 // Mock ICD doesn't expose sufficient capabilities yet.
1425 // https://github.com/KhronosGroup/Vulkan-Tools/issues/84
1426 if (isMockICDEnabled())
1427 {
1428 return maxVersion;
1429 }
1430
1431 // Limit to ES3.1 if there are any blockers for 3.2.
1432 if (!vk::CanSupportGPUShader5EXT(mPhysicalDeviceFeatures))
1433 {
1434 maxVersion = LimitVersionTo(maxVersion, {3, 1});
1435 }
1436
1437 // Limit to ES3.0 if there are any blockers for 3.1.
1438
1439 // ES3.1 requires at least one atomic counter buffer and four storage buffers in compute.
1440 // Atomic counter buffers are emulated with storage buffers. For simplicity, we always support
1441 // either none or IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS atomic counter buffers. So if
1442 // Vulkan doesn't support at least that many storage buffers in compute, we don't support 3.1.
1443 const uint32_t kMinimumStorageBuffersForES31 =
1444 gl::limits::kMinimumComputeStorageBuffers + gl::IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS;
1445 if (mPhysicalDeviceProperties.limits.maxPerStageDescriptorStorageBuffers <
1446 kMinimumStorageBuffersForES31)
1447 {
1448 maxVersion = LimitVersionTo(maxVersion, {3, 0});
1449 }
1450
1451 // ES3.1 requires at least a maximum offset of at least 2047.
1452 // If the Vulkan implementation can't support that, we cannot support 3.1.
1453 if (mPhysicalDeviceProperties.limits.maxVertexInputAttributeOffset < 2047)
1454 {
1455 maxVersion = LimitVersionTo(maxVersion, {3, 0});
1456 }
1457
1458 // Limit to ES2.0 if there are any blockers for 3.0.
1459 // TODO: http://anglebug.com/3972 Limit to GLES 2.0 if flat shading can't be emulated
1460
1461 // Multisample textures (ES3.1) and multisample renderbuffers (ES3.0) require the Vulkan driver
1462 // to support the standard sample locations (in order to pass dEQP tests that check these
1463 // locations). If the Vulkan implementation can't support that, we cannot support 3.0/3.1.
1464 if (mPhysicalDeviceProperties.limits.standardSampleLocations != VK_TRUE)
1465 {
1466 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1467 }
1468
1469 // If the command buffer doesn't support queries, we can't support ES3.
1470 if (!vk::CommandBuffer::SupportsQueries(mPhysicalDeviceFeatures))
1471 {
1472 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1473 }
1474
1475 // If independentBlend is not supported, we can't have a mix of has-alpha and emulated-alpha
1476 // render targets in a framebuffer. We also cannot perform masked clears of multiple render
1477 // targets.
1478 if (!mPhysicalDeviceFeatures.independentBlend)
1479 {
1480 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1481 }
1482
1483 // If the Vulkan transform feedback extension is not present, we use an emulation path that
1484 // requires the vertexPipelineStoresAndAtomics feature. Without the extension or this feature,
1485 // we can't currently support transform feedback.
1486 if (!mFeatures.supportsTransformFeedbackExtension.enabled &&
1487 !mFeatures.emulateTransformFeedback.enabled)
1488 {
1489 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1490 }
1491
1492 // Limit to GLES 2.0 if maxPerStageDescriptorUniformBuffers is too low.
1493 // Table 6.31 MAX_VERTEX_UNIFORM_BLOCKS minimum value = 12
1494 // Table 6.32 MAX_FRAGMENT_UNIFORM_BLOCKS minimum value = 12
1495 // NOTE: We reserve some uniform buffers for emulation, so use the NativeCaps which takes this
1496 // into account, rather than the physical device maxPerStageDescriptorUniformBuffers limits.
1497 for (gl::ShaderType shaderType : gl::AllShaderTypes())
1498 {
1499 if (static_cast<GLuint>(getNativeCaps().maxShaderUniformBlocks[shaderType]) <
1500 gl::limits::kMinimumShaderUniformBlocks)
1501 {
1502 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1503 }
1504 }
1505
1506 // Limit to GLES 2.0 if maxVertexOutputComponents is too low.
1507 // Table 6.31 MAX VERTEX OUTPUT COMPONENTS minimum value = 64
1508 // NOTE: We reserve some vertex output components for emulation, so use the NativeCaps which
1509 // takes this into account, rather than the physical device maxVertexOutputComponents limits.
1510 if (static_cast<GLuint>(getNativeCaps().maxVertexOutputComponents) <
1511 gl::limits::kMinimumVertexOutputComponents)
1512 {
1513 maxVersion = LimitVersionTo(maxVersion, {2, 0});
1514 }
1515
1516 return maxVersion;
1517 }
1518
getMaxConformantESVersion() const1519 gl::Version RendererVk::getMaxConformantESVersion() const
1520 {
1521 return LimitVersionTo(getMaxSupportedESVersion(), {3, 1});
1522 }
1523
initFeatures(DisplayVk * displayVk,const ExtensionNameList & deviceExtensionNames)1524 void RendererVk::initFeatures(DisplayVk *displayVk, const ExtensionNameList &deviceExtensionNames)
1525 {
1526 if (displayVk->getState().featuresAllDisabled)
1527 {
1528 ApplyFeatureOverrides(&mFeatures, displayVk->getState());
1529 return;
1530 }
1531
1532 bool isAMD = IsAMD(mPhysicalDeviceProperties.vendorID);
1533 bool isIntel = IsIntel(mPhysicalDeviceProperties.vendorID);
1534 bool isNvidia = IsNvidia(mPhysicalDeviceProperties.vendorID);
1535 bool isQualcomm = IsQualcomm(mPhysicalDeviceProperties.vendorID);
1536 bool isARM = IsARM(mPhysicalDeviceProperties.vendorID);
1537 bool isSwS =
1538 IsSwiftshader(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
1539
1540 if (mLineRasterizationFeatures.bresenhamLines == VK_TRUE)
1541 {
1542 ASSERT(mLineRasterizationFeatures.sType ==
1543 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT);
1544 ANGLE_FEATURE_CONDITION(&mFeatures, bresenhamLineRasterization, true);
1545 }
1546 else
1547 {
1548 // Use OpenGL line rasterization rules if extension not available by default.
1549 // TODO(jmadill): Fix Android support. http://anglebug.com/2830
1550 ANGLE_FEATURE_CONDITION(&mFeatures, basicGLLineRasterization, !IsAndroid());
1551 }
1552
1553 if (mProvokingVertexFeatures.provokingVertexLast == VK_TRUE)
1554 {
1555 ASSERT(mProvokingVertexFeatures.sType ==
1556 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT);
1557 ANGLE_FEATURE_CONDITION(&mFeatures, provokingVertex, true);
1558 }
1559
1560 // TODO(lucferron): Currently disabled on Intel only since many tests are failing and need
1561 // investigation. http://anglebug.com/2728
1562 ANGLE_FEATURE_CONDITION(
1563 &mFeatures, flipViewportY,
1564 !IsIntel(mPhysicalDeviceProperties.vendorID) &&
1565 (mPhysicalDeviceProperties.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) ||
1566 ExtensionFound(VK_KHR_MAINTENANCE1_EXTENSION_NAME, deviceExtensionNames));
1567
1568 // http://anglebug.com/2838
1569 ANGLE_FEATURE_CONDITION(&mFeatures, extraCopyBufferRegion, IsWindows() && isIntel);
1570
1571 // http://anglebug.com/3055
1572 ANGLE_FEATURE_CONDITION(&mFeatures, forceCPUPathForCubeMapCopy, IsWindows() && isIntel);
1573
1574 // Work around incorrect NVIDIA point size range clamping.
1575 // http://anglebug.com/2970#c10
1576 // Clamp if driver version is:
1577 // < 430 on Windows
1578 // < 421 otherwise
1579 angle::VersionInfo nvidiaVersion;
1580 if (isNvidia)
1581 {
1582 nvidiaVersion =
1583 angle::ParseNvidiaDriverVersion(this->mPhysicalDeviceProperties.driverVersion);
1584 }
1585 ANGLE_FEATURE_CONDITION(&mFeatures, clampPointSize,
1586 isNvidia && nvidiaVersion.major < uint32_t(IsWindows() ? 430 : 421));
1587
1588 // Work around ineffective compute-graphics barriers on Nexus 5X.
1589 // TODO(syoussefi): Figure out which other vendors and driver versions are affected.
1590 // http://anglebug.com/3019
1591 ANGLE_FEATURE_CONDITION(&mFeatures, flushAfterVertexConversion,
1592 IsAndroid() && IsNexus5X(mPhysicalDeviceProperties.vendorID,
1593 mPhysicalDeviceProperties.deviceID));
1594
1595 ANGLE_FEATURE_CONDITION(
1596 &mFeatures, supportsIncrementalPresent,
1597 ExtensionFound(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, deviceExtensionNames));
1598
1599 #if defined(ANGLE_PLATFORM_ANDROID)
1600 ANGLE_FEATURE_CONDITION(
1601 &mFeatures, supportsAndroidHardwareBuffer,
1602 IsAndroid() &&
1603 ExtensionFound(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
1604 deviceExtensionNames) &&
1605 ExtensionFound(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, deviceExtensionNames));
1606 #endif
1607
1608 #if defined(ANGLE_PLATFORM_GGP)
1609 ANGLE_FEATURE_CONDITION(
1610 &mFeatures, supportsGGPFrameToken,
1611 ExtensionFound(VK_GGP_FRAME_TOKEN_EXTENSION_NAME, deviceExtensionNames));
1612 #endif
1613
1614 ANGLE_FEATURE_CONDITION(
1615 &mFeatures, supportsExternalMemoryFd,
1616 ExtensionFound(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, deviceExtensionNames));
1617
1618 ANGLE_FEATURE_CONDITION(
1619 &mFeatures, supportsExternalMemoryFuchsia,
1620 ExtensionFound(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, deviceExtensionNames));
1621
1622 ANGLE_FEATURE_CONDITION(
1623 &mFeatures, supportsFilteringPrecision,
1624 ExtensionFound(VK_GOOGLE_SAMPLER_FILTERING_PRECISION_EXTENSION_NAME, deviceExtensionNames));
1625
1626 ANGLE_FEATURE_CONDITION(
1627 &mFeatures, supportsExternalFenceCapabilities,
1628 ExtensionFound(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME, deviceExtensionNames));
1629
1630 ANGLE_FEATURE_CONDITION(&mFeatures, supportsExternalSemaphoreCapabilities,
1631 ExtensionFound(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
1632 deviceExtensionNames));
1633
1634 ANGLE_FEATURE_CONDITION(
1635 &mFeatures, supportsExternalSemaphoreFd,
1636 ExtensionFound(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, deviceExtensionNames));
1637
1638 ANGLE_FEATURE_CONDITION(
1639 &mFeatures, supportsExternalSemaphoreFuchsia,
1640 ExtensionFound(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, deviceExtensionNames));
1641
1642 ANGLE_FEATURE_CONDITION(
1643 &mFeatures, supportsExternalFenceFd,
1644 ExtensionFound(VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME, deviceExtensionNames));
1645
1646 #if defined(ANGLE_PLATFORM_ANDROID)
1647 if (mFeatures.supportsExternalFenceCapabilities.enabled &&
1648 mFeatures.supportsExternalSemaphoreCapabilities.enabled)
1649 {
1650 ANGLE_FEATURE_CONDITION(
1651 &mFeatures, supportsAndroidNativeFenceSync,
1652 (mFeatures.supportsExternalFenceFd.enabled &&
1653 FencePropertiesCompatibleWithAndroid(mExternalFenceProperties) &&
1654 mFeatures.supportsExternalSemaphoreFd.enabled &&
1655 SemaphorePropertiesCompatibleWithAndroid(mExternalSemaphoreProperties)));
1656 }
1657 else
1658 {
1659 ANGLE_FEATURE_CONDITION(&mFeatures, supportsAndroidNativeFenceSync,
1660 (mFeatures.supportsExternalFenceFd.enabled &&
1661 mFeatures.supportsExternalSemaphoreFd.enabled));
1662 }
1663 #endif // defined(ANGLE_PLATFORM_ANDROID)
1664
1665 ANGLE_FEATURE_CONDITION(
1666 &mFeatures, supportsShaderStencilExport,
1667 ExtensionFound(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, deviceExtensionNames));
1668
1669 ANGLE_FEATURE_CONDITION(&mFeatures, supportsTransformFeedbackExtension,
1670 mTransformFeedbackFeatures.transformFeedback == VK_TRUE);
1671
1672 ANGLE_FEATURE_CONDITION(&mFeatures, supportsIndexTypeUint8,
1673 mIndexTypeUint8Features.indexTypeUint8 == VK_TRUE);
1674
1675 ANGLE_FEATURE_CONDITION(&mFeatures, emulateTransformFeedback,
1676 (mFeatures.supportsTransformFeedbackExtension.enabled == VK_FALSE &&
1677 mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics == VK_TRUE));
1678
1679 ANGLE_FEATURE_CONDITION(&mFeatures, disableFifoPresentMode, IsLinux() && isIntel);
1680
1681 ANGLE_FEATURE_CONDITION(&mFeatures, bindEmptyForUnusedDescriptorSets,
1682 IsAndroid() && isQualcomm);
1683
1684 ANGLE_FEATURE_CONDITION(&mFeatures, forceOldRewriteStructSamplers, IsAndroid() && !isSwS);
1685
1686 ANGLE_FEATURE_CONDITION(&mFeatures, perFrameWindowSizeQuery,
1687 isIntel || (IsWindows() && isAMD) || IsFuchsia() || isARM);
1688
1689 // Disabled on AMD/windows due to buggy behavior.
1690 ANGLE_FEATURE_CONDITION(&mFeatures, disallowSeamfulCubeMapEmulation, IsWindows() && isAMD);
1691
1692 ANGLE_FEATURE_CONDITION(&mFeatures, padBuffersToMaxVertexAttribStride, isAMD);
1693 mMaxVertexAttribStride = std::min(static_cast<uint32_t>(gl::limits::kMaxVertexAttribStride),
1694 mPhysicalDeviceProperties.limits.maxVertexInputBindingStride);
1695
1696 ANGLE_FEATURE_CONDITION(&mFeatures, forceD16TexFilter, IsAndroid() && isQualcomm);
1697
1698 ANGLE_FEATURE_CONDITION(&mFeatures, disableFlippingBlitWithCommand, IsAndroid() && isQualcomm);
1699
1700 // Allocation sanitization disabled by default because of a heaveyweight implementation
1701 // that can cause OOM and timeouts.
1702 ANGLE_FEATURE_CONDITION(&mFeatures, allocateNonZeroMemory, false);
1703
1704 ANGLE_FEATURE_CONDITION(&mFeatures, persistentlyMappedBuffers, true);
1705
1706 ANGLE_FEATURE_CONDITION(
1707 &mFeatures, supportsExternalMemoryHost,
1708 ExtensionFound(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, deviceExtensionNames));
1709
1710 // Pre-rotation support is not fully ready to be enabled.
1711 ANGLE_FEATURE_CONDITION(&mFeatures, enablePreRotateSurfaces, false);
1712
1713 // Currently disable FramebufferVk cache on Apple: http://anglebug.com/4442
1714 ANGLE_FEATURE_CONDITION(&mFeatures, enableFramebufferVkCache, !IsApple());
1715
1716 // Currently disabled by default: http://anglebug.com/3078
1717 ANGLE_FEATURE_CONDITION(&mFeatures, enablePrecisionQualifiers, false);
1718
1719 ANGLE_FEATURE_CONDITION(&mFeatures, supportDepthStencilRenderingFeedbackLoops, true);
1720
1721 ANGLE_FEATURE_CONDITION(&mFeatures, preferAggregateBarrierCalls, isNvidia || isAMD || isIntel);
1722
1723 // Currently disabled by default: http://anglebug.com/4324
1724 ANGLE_FEATURE_CONDITION(&mFeatures, enableCommandProcessingThread, false);
1725
1726 angle::PlatformMethods *platform = ANGLEPlatformCurrent();
1727 platform->overrideFeaturesVk(platform, &mFeatures);
1728
1729 ApplyFeatureOverrides(&mFeatures, displayVk->getState());
1730 }
1731
initPipelineCacheVkKey()1732 void RendererVk::initPipelineCacheVkKey()
1733 {
1734 std::ostringstream hashStream("ANGLE Pipeline Cache: ", std::ios_base::ate);
1735 // Add the pipeline cache UUID to make sure the blob cache always gives a compatible pipeline
1736 // cache. It's not particularly necessary to write it as a hex number as done here, so long as
1737 // there is no '\0' in the result.
1738 for (const uint32_t c : mPhysicalDeviceProperties.pipelineCacheUUID)
1739 {
1740 hashStream << std::hex << c;
1741 }
1742 // Add the vendor and device id too for good measure.
1743 hashStream << std::hex << mPhysicalDeviceProperties.vendorID;
1744 hashStream << std::hex << mPhysicalDeviceProperties.deviceID;
1745
1746 const std::string &hashString = hashStream.str();
1747 angle::base::SHA1HashBytes(reinterpret_cast<const unsigned char *>(hashString.c_str()),
1748 hashString.length(), mPipelineCacheVkBlobKey.data());
1749 }
1750
initPipelineCache(DisplayVk * display,vk::PipelineCache * pipelineCache,bool * success)1751 angle::Result RendererVk::initPipelineCache(DisplayVk *display,
1752 vk::PipelineCache *pipelineCache,
1753 bool *success)
1754 {
1755 initPipelineCacheVkKey();
1756
1757 egl::BlobCache::Value initialData;
1758 *success = display->getBlobCache()->get(display->getScratchBuffer(), mPipelineCacheVkBlobKey,
1759 &initialData);
1760
1761 VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
1762
1763 pipelineCacheCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1764 pipelineCacheCreateInfo.flags = 0;
1765 pipelineCacheCreateInfo.initialDataSize = *success ? initialData.size() : 0;
1766 pipelineCacheCreateInfo.pInitialData = *success ? initialData.data() : nullptr;
1767
1768 ANGLE_VK_TRY(display, pipelineCache->init(mDevice, pipelineCacheCreateInfo));
1769
1770 return angle::Result::Continue;
1771 }
1772
getPipelineCache(vk::PipelineCache ** pipelineCache)1773 angle::Result RendererVk::getPipelineCache(vk::PipelineCache **pipelineCache)
1774 {
1775 if (mPipelineCacheInitialized)
1776 {
1777 *pipelineCache = &mPipelineCache;
1778 return angle::Result::Continue;
1779 }
1780
1781 // We should now recreate the pipeline cache with the blob cache pipeline data.
1782 vk::PipelineCache pCache;
1783 bool success = false;
1784 ANGLE_TRY(initPipelineCache(vk::GetImpl(mDisplay), &pCache, &success));
1785 if (success)
1786 {
1787 // Merge the newly created pipeline cache into the existing one.
1788 mPipelineCache.merge(mDevice, mPipelineCache.getHandle(), 1, pCache.ptr());
1789 }
1790 mPipelineCacheInitialized = true;
1791 pCache.destroy(mDevice);
1792
1793 *pipelineCache = &mPipelineCache;
1794 return angle::Result::Continue;
1795 }
1796
getNativeCaps() const1797 const gl::Caps &RendererVk::getNativeCaps() const
1798 {
1799 ensureCapsInitialized();
1800 return mNativeCaps;
1801 }
1802
getNativeTextureCaps() const1803 const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
1804 {
1805 ensureCapsInitialized();
1806 return mNativeTextureCaps;
1807 }
1808
getNativeExtensions() const1809 const gl::Extensions &RendererVk::getNativeExtensions() const
1810 {
1811 ensureCapsInitialized();
1812 return mNativeExtensions;
1813 }
1814
getNativeLimitations() const1815 const gl::Limitations &RendererVk::getNativeLimitations() const
1816 {
1817 ensureCapsInitialized();
1818 return mNativeLimitations;
1819 }
1820
getDescriptorSetLayout(vk::Context * context,const vk::DescriptorSetLayoutDesc & desc,vk::BindingPointer<vk::DescriptorSetLayout> * descriptorSetLayoutOut)1821 angle::Result RendererVk::getDescriptorSetLayout(
1822 vk::Context *context,
1823 const vk::DescriptorSetLayoutDesc &desc,
1824 vk::BindingPointer<vk::DescriptorSetLayout> *descriptorSetLayoutOut)
1825 {
1826 std::lock_guard<decltype(mDescriptorSetLayoutCacheMutex)> lock(mDescriptorSetLayoutCacheMutex);
1827 return mDescriptorSetLayoutCache.getDescriptorSetLayout(context, desc, descriptorSetLayoutOut);
1828 }
1829
getPipelineLayout(vk::Context * context,const vk::PipelineLayoutDesc & desc,const vk::DescriptorSetLayoutPointerArray & descriptorSetLayouts,vk::BindingPointer<vk::PipelineLayout> * pipelineLayoutOut)1830 angle::Result RendererVk::getPipelineLayout(
1831 vk::Context *context,
1832 const vk::PipelineLayoutDesc &desc,
1833 const vk::DescriptorSetLayoutPointerArray &descriptorSetLayouts,
1834 vk::BindingPointer<vk::PipelineLayout> *pipelineLayoutOut)
1835 {
1836 std::lock_guard<decltype(mPipelineLayoutCacheMutex)> lock(mPipelineLayoutCacheMutex);
1837 return mPipelineLayoutCache.getPipelineLayout(context, desc, descriptorSetLayouts,
1838 pipelineLayoutOut);
1839 }
1840
getPipelineCacheSize(DisplayVk * displayVk,size_t * pipelineCacheSizeOut)1841 angle::Result RendererVk::getPipelineCacheSize(DisplayVk *displayVk, size_t *pipelineCacheSizeOut)
1842 {
1843 VkResult result = mPipelineCache.getCacheData(mDevice, pipelineCacheSizeOut, nullptr);
1844 ANGLE_VK_TRY(displayVk, result);
1845
1846 return angle::Result::Continue;
1847 }
1848
syncPipelineCacheVk(DisplayVk * displayVk)1849 angle::Result RendererVk::syncPipelineCacheVk(DisplayVk *displayVk)
1850 {
1851 // TODO: Synchronize access to the pipeline/blob caches?
1852 ASSERT(mPipelineCache.valid());
1853
1854 if (--mPipelineCacheVkUpdateTimeout > 0)
1855 {
1856 return angle::Result::Continue;
1857 }
1858 if (!mPipelineCacheDirty)
1859 {
1860 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1861 return angle::Result::Continue;
1862 }
1863
1864 mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
1865
1866 size_t pipelineCacheSize = 0;
1867 ANGLE_TRY(getPipelineCacheSize(displayVk, &pipelineCacheSize));
1868 // Make sure we will receive enough data to hold the pipeline cache header
1869 // Table 7. Layout for pipeline cache header version VK_PIPELINE_CACHE_HEADER_VERSION_ONE
1870 const size_t kPipelineCacheHeaderSize = 16 + VK_UUID_SIZE;
1871 if (pipelineCacheSize < kPipelineCacheHeaderSize)
1872 {
1873 // No pipeline cache data to read, so return
1874 return angle::Result::Continue;
1875 }
1876
1877 angle::MemoryBuffer *pipelineCacheData = nullptr;
1878 ANGLE_VK_CHECK_ALLOC(displayVk,
1879 displayVk->getScratchBuffer(pipelineCacheSize, &pipelineCacheData));
1880
1881 size_t oldPipelineCacheSize = pipelineCacheSize;
1882 VkResult result =
1883 mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, pipelineCacheData->data());
1884 // We don't need all of the cache data, so just make sure we at least got the header
1885 // Vulkan Spec 9.6. Pipeline Cache
1886 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/chap9.html#pipelines-cache
1887 // If pDataSize is less than what is necessary to store this header, nothing will be written to
1888 // pData and zero will be written to pDataSize.
1889 // Any data written to pData is valid and can be provided as the pInitialData member of the
1890 // VkPipelineCacheCreateInfo structure passed to vkCreatePipelineCache.
1891 if (ANGLE_UNLIKELY(pipelineCacheSize < kPipelineCacheHeaderSize))
1892 {
1893 WARN() << "Not enough pipeline cache data read.";
1894 return angle::Result::Continue;
1895 }
1896 else if (ANGLE_UNLIKELY(result == VK_INCOMPLETE))
1897 {
1898 WARN() << "Received VK_INCOMPLETE: Old: " << oldPipelineCacheSize
1899 << ", New: " << pipelineCacheSize;
1900 }
1901 else
1902 {
1903 ANGLE_VK_TRY(displayVk, result);
1904 }
1905
1906 // If vkGetPipelineCacheData ends up writing fewer bytes than requested, zero out the rest of
1907 // the buffer to avoid leaking garbage memory.
1908 ASSERT(pipelineCacheSize <= pipelineCacheData->size());
1909 if (pipelineCacheSize < pipelineCacheData->size())
1910 {
1911 memset(pipelineCacheData->data() + pipelineCacheSize, 0,
1912 pipelineCacheData->size() - pipelineCacheSize);
1913 }
1914
1915 displayVk->getBlobCache()->putApplication(mPipelineCacheVkBlobKey, *pipelineCacheData);
1916 mPipelineCacheDirty = false;
1917
1918 return angle::Result::Continue;
1919 }
1920
issueShaderSerial()1921 Serial RendererVk::issueShaderSerial()
1922 {
1923 return mShaderSerialFactory.generate();
1924 }
1925
1926 // These functions look at the mandatory format for support, and fallback to querying the device (if
1927 // necessary) to test the availability of the bits.
hasLinearImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1928 bool RendererVk::hasLinearImageFormatFeatureBits(VkFormat format,
1929 const VkFormatFeatureFlags featureBits)
1930 {
1931 return hasFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(format, featureBits);
1932 }
1933
getImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1934 VkFormatFeatureFlags RendererVk::getImageFormatFeatureBits(VkFormat format,
1935 const VkFormatFeatureFlags featureBits)
1936 {
1937 return getFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1938 }
1939
hasImageFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1940 bool RendererVk::hasImageFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1941 {
1942 return hasFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(format, featureBits);
1943 }
1944
hasBufferFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)1945 bool RendererVk::hasBufferFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
1946 {
1947 return hasFormatFeatureBits<&VkFormatProperties::bufferFeatures>(format, featureBits);
1948 }
1949
queueSubmit(vk::Context * context,egl::ContextPriority priority,const VkSubmitInfo & submitInfo,const vk::Fence * fence,Serial * serialOut)1950 angle::Result RendererVk::queueSubmit(vk::Context *context,
1951 egl::ContextPriority priority,
1952 const VkSubmitInfo &submitInfo,
1953 const vk::Fence *fence,
1954 Serial *serialOut)
1955 {
1956 if (getFeatures().enableCommandProcessingThread.enabled)
1957 {
1958 // For initial threading phase 1 code make sure any outstanding command processing
1959 // is complete.
1960 // TODO: b/153666475 For phase2 investigate if this is required as most submits will take
1961 // place through worker thread except for one-off submits below.
1962 mCommandProcessor.waitForWorkComplete();
1963 }
1964 {
1965 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
1966 VkFence handle = fence ? fence->getHandle() : VK_NULL_HANDLE;
1967 ANGLE_VK_TRY(context, vkQueueSubmit(mQueues[priority], 1, &submitInfo, handle));
1968 }
1969
1970 ANGLE_TRY(cleanupGarbage(false));
1971
1972 *serialOut = mCurrentQueueSerial;
1973 mLastSubmittedQueueSerial = mCurrentQueueSerial;
1974 mCurrentQueueSerial = mQueueSerialFactory.generate();
1975
1976 return angle::Result::Continue;
1977 }
1978
queueSubmitOneOff(vk::Context * context,vk::PrimaryCommandBuffer && primary,egl::ContextPriority priority,const vk::Fence * fence,Serial * serialOut)1979 angle::Result RendererVk::queueSubmitOneOff(vk::Context *context,
1980 vk::PrimaryCommandBuffer &&primary,
1981 egl::ContextPriority priority,
1982 const vk::Fence *fence,
1983 Serial *serialOut)
1984 {
1985 VkSubmitInfo submitInfo = {};
1986 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
1987 submitInfo.commandBufferCount = 1;
1988 submitInfo.pCommandBuffers = primary.ptr();
1989
1990 ANGLE_TRY(queueSubmit(context, priority, submitInfo, fence, serialOut));
1991
1992 mPendingOneOffCommands.push_back({*serialOut, std::move(primary)});
1993
1994 return angle::Result::Continue;
1995 }
1996
queueWaitIdle(vk::Context * context,egl::ContextPriority priority)1997 angle::Result RendererVk::queueWaitIdle(vk::Context *context, egl::ContextPriority priority)
1998 {
1999 if (getFeatures().enableCommandProcessingThread.enabled)
2000 {
2001 // First make sure command processor is complete when waiting for queue idle.
2002 mCommandProcessor.waitForWorkComplete();
2003 }
2004 {
2005 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
2006 ANGLE_VK_TRY(context, vkQueueWaitIdle(mQueues[priority]));
2007 }
2008
2009 ANGLE_TRY(cleanupGarbage(false));
2010
2011 return angle::Result::Continue;
2012 }
2013
deviceWaitIdle(vk::Context * context)2014 angle::Result RendererVk::deviceWaitIdle(vk::Context *context)
2015 {
2016 if (getFeatures().enableCommandProcessingThread.enabled)
2017 {
2018 // First make sure command processor is complete when waiting for device idle.
2019 mCommandProcessor.waitForWorkComplete();
2020 }
2021 {
2022 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
2023 ANGLE_VK_TRY(context, vkDeviceWaitIdle(mDevice));
2024 }
2025
2026 ANGLE_TRY(cleanupGarbage(false));
2027
2028 return angle::Result::Continue;
2029 }
2030
queuePresent(egl::ContextPriority priority,const VkPresentInfoKHR & presentInfo)2031 VkResult RendererVk::queuePresent(egl::ContextPriority priority,
2032 const VkPresentInfoKHR &presentInfo)
2033 {
2034 ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::queuePresent");
2035
2036 if (getFeatures().enableCommandProcessingThread.enabled)
2037 {
2038 // First make sure command processor is complete before queue present as
2039 // present may have dependencies on that thread.
2040 mCommandProcessor.waitForWorkComplete();
2041 }
2042
2043 std::lock_guard<decltype(mQueueMutex)> lock(mQueueMutex);
2044
2045 {
2046 ANGLE_TRACE_EVENT0("gpu.angle", "vkQueuePresentKHR");
2047 return vkQueuePresentKHR(mQueues[priority], &presentInfo);
2048 }
2049 }
2050
newSharedFence(vk::Context * context,vk::Shared<vk::Fence> * sharedFenceOut)2051 angle::Result RendererVk::newSharedFence(vk::Context *context,
2052 vk::Shared<vk::Fence> *sharedFenceOut)
2053 {
2054 vk::Fence fence;
2055 if (mFenceRecycler.empty())
2056 {
2057 VkFenceCreateInfo fenceCreateInfo = {};
2058 fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
2059 fenceCreateInfo.flags = 0;
2060 ANGLE_VK_TRY(context, fence.init(mDevice, fenceCreateInfo));
2061 }
2062 else
2063 {
2064 mFenceRecycler.fetch(&fence);
2065 ANGLE_VK_TRY(context, fence.reset(mDevice));
2066 }
2067 sharedFenceOut->assign(mDevice, std::move(fence));
2068 return angle::Result::Continue;
2069 }
2070
2071 template <VkFormatFeatureFlags VkFormatProperties::*features>
getFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)2072 VkFormatFeatureFlags RendererVk::getFormatFeatureBits(VkFormat format,
2073 const VkFormatFeatureFlags featureBits)
2074 {
2075 ASSERT(static_cast<uint32_t>(format) < vk::kNumVkFormats);
2076 VkFormatProperties &deviceProperties = mFormatProperties[format];
2077
2078 if (deviceProperties.bufferFeatures == kInvalidFormatFeatureFlags)
2079 {
2080 // If we don't have the actual device features, see if the requested features are mandatory.
2081 // If so, there's no need to query the device.
2082 const VkFormatProperties &mandatoryProperties = vk::GetMandatoryFormatSupport(format);
2083 if (IsMaskFlagSet(mandatoryProperties.*features, featureBits))
2084 {
2085 return featureBits;
2086 }
2087
2088 // Otherwise query the format features and cache it.
2089 vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, format, &deviceProperties);
2090 // Workaround for some Android devices that don't indicate filtering
2091 // support on D16_UNORM and they should.
2092 if (mFeatures.forceD16TexFilter.enabled && format == VK_FORMAT_D16_UNORM)
2093 {
2094 deviceProperties.*features |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
2095 }
2096 }
2097
2098 return deviceProperties.*features & featureBits;
2099 }
2100
2101 template <VkFormatFeatureFlags VkFormatProperties::*features>
hasFormatFeatureBits(VkFormat format,const VkFormatFeatureFlags featureBits)2102 bool RendererVk::hasFormatFeatureBits(VkFormat format, const VkFormatFeatureFlags featureBits)
2103 {
2104 return IsMaskFlagSet(getFormatFeatureBits<features>(format, featureBits), featureBits);
2105 }
2106
cleanupGarbage(bool block)2107 angle::Result RendererVk::cleanupGarbage(bool block)
2108 {
2109 std::lock_guard<decltype(mGarbageMutex)> lock(mGarbageMutex);
2110
2111 for (auto garbageIter = mSharedGarbage.begin(); garbageIter != mSharedGarbage.end();)
2112 {
2113 // Possibly 'counter' should be always zero when we add the object to garbage.
2114 vk::SharedGarbage &garbage = *garbageIter;
2115 if (garbage.destroyIfComplete(this, mLastCompletedQueueSerial))
2116 {
2117 garbageIter = mSharedGarbage.erase(garbageIter);
2118 }
2119 else
2120 {
2121 garbageIter++;
2122 }
2123 }
2124
2125 return angle::Result::Continue;
2126 }
2127
onNewValidationMessage(const std::string & message)2128 void RendererVk::onNewValidationMessage(const std::string &message)
2129 {
2130 mLastValidationMessage = message;
2131 ++mValidationMessageCount;
2132 }
2133
getAndClearLastValidationMessage(uint32_t * countSinceLastClear)2134 std::string RendererVk::getAndClearLastValidationMessage(uint32_t *countSinceLastClear)
2135 {
2136 *countSinceLastClear = mValidationMessageCount;
2137 mValidationMessageCount = 0;
2138
2139 return std::move(mLastValidationMessage);
2140 }
2141
getMaxFenceWaitTimeNs() const2142 uint64_t RendererVk::getMaxFenceWaitTimeNs() const
2143 {
2144 constexpr uint64_t kMaxFenceWaitTimeNs = 120'000'000'000llu;
2145
2146 return kMaxFenceWaitTimeNs;
2147 }
2148
onCompletedSerial(Serial serial)2149 void RendererVk::onCompletedSerial(Serial serial)
2150 {
2151 if (serial > mLastCompletedQueueSerial)
2152 {
2153 mLastCompletedQueueSerial = serial;
2154 }
2155 }
2156
reloadVolkIfNeeded() const2157 void RendererVk::reloadVolkIfNeeded() const
2158 {
2159 #if defined(ANGLE_SHARED_LIBVULKAN)
2160 if ((mInstance != VK_NULL_HANDLE) && (volkGetLoadedInstance() != mInstance))
2161 {
2162 volkLoadInstance(mInstance);
2163 }
2164
2165 if ((mDevice != VK_NULL_HANDLE) && (volkGetLoadedDevice() != mDevice))
2166 {
2167 volkLoadDevice(mDevice);
2168 }
2169 #endif // defined(ANGLE_SHARED_LIBVULKAN)
2170 }
2171
getCommandBufferOneOff(vk::Context * context,vk::PrimaryCommandBuffer * commandBufferOut)2172 angle::Result RendererVk::getCommandBufferOneOff(vk::Context *context,
2173 vk::PrimaryCommandBuffer *commandBufferOut)
2174 {
2175 if (!mOneOffCommandPool.valid())
2176 {
2177 VkCommandPoolCreateInfo createInfo = {};
2178 createInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
2179 createInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
2180 ANGLE_VK_TRY(context, mOneOffCommandPool.init(mDevice, createInfo));
2181 }
2182
2183 if (!mPendingOneOffCommands.empty() &&
2184 mPendingOneOffCommands.front().serial < mLastCompletedQueueSerial)
2185 {
2186 *commandBufferOut = std::move(mPendingOneOffCommands.front().commandBuffer);
2187 mPendingOneOffCommands.pop_front();
2188 ANGLE_VK_TRY(context, commandBufferOut->reset());
2189 }
2190 else
2191 {
2192 VkCommandBufferAllocateInfo allocInfo = {};
2193 allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
2194 allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
2195 allocInfo.commandBufferCount = 1;
2196 allocInfo.commandPool = mOneOffCommandPool.getHandle();
2197
2198 ANGLE_VK_TRY(context, commandBufferOut->init(context->getDevice(), allocInfo));
2199 }
2200
2201 VkCommandBufferBeginInfo beginInfo = {};
2202 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2203 beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2204 beginInfo.pInheritanceInfo = nullptr;
2205 ANGLE_VK_TRY(context, commandBufferOut->begin(beginInfo));
2206
2207 return angle::Result::Continue;
2208 }
2209 } // namespace rx
2210