• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2016 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // RendererVk.cpp:
7 //    Implements the class methods for RendererVk.
8 //
9 
10 #include "libANGLE/renderer/vulkan/RendererVk.h"
11 
12 // Placing this first seems to solve an intellisense bug.
13 #include "libANGLE/renderer/vulkan/vk_utils.h"
14 
15 #include <EGL/eglext.h>
16 
17 #include "common/debug.h"
18 #include "common/platform.h"
19 #include "common/system_utils.h"
20 #include "common/vulkan/libvulkan_loader.h"
21 #include "common/vulkan/vk_google_filtering_precision.h"
22 #include "common/vulkan/vulkan_icd.h"
23 #include "gpu_info_util/SystemInfo.h"
24 #include "libANGLE/Context.h"
25 #include "libANGLE/Display.h"
26 #include "libANGLE/renderer/driver_utils.h"
27 #include "libANGLE/renderer/vulkan/CompilerVk.h"
28 #include "libANGLE/renderer/vulkan/ContextVk.h"
29 #include "libANGLE/renderer/vulkan/DisplayVk.h"
30 #include "libANGLE/renderer/vulkan/FramebufferVk.h"
31 #include "libANGLE/renderer/vulkan/ProgramVk.h"
32 #include "libANGLE/renderer/vulkan/ResourceVk.h"
33 #include "libANGLE/renderer/vulkan/SyncVk.h"
34 #include "libANGLE/renderer/vulkan/VertexArrayVk.h"
35 #include "libANGLE/renderer/vulkan/vk_caps_utils.h"
36 #include "libANGLE/renderer/vulkan/vk_format_utils.h"
37 #include "libANGLE/trace.h"
38 #include "platform/PlatformMethods.h"
39 
40 // Consts
41 namespace
42 {
43 constexpr VkFormatFeatureFlags kInvalidFormatFeatureFlags = static_cast<VkFormatFeatureFlags>(-1);
44 
45 #if defined(ANGLE_EXPOSE_NON_CONFORMANT_EXTENSIONS_AND_VERSIONS)
46 constexpr bool kExposeNonConformantExtensionsAndVersions = true;
47 #else
48 constexpr bool kExposeNonConformantExtensionsAndVersions = false;
49 #endif
50 
51 #if defined(ANGLE_ENABLE_CRC_FOR_PIPELINE_CACHE)
52 constexpr bool kEnableCRCForPipelineCache = true;
53 #else
54 constexpr bool kEnableCRCForPipelineCache                = false;
55 #endif
56 }  // anonymous namespace
57 
58 namespace rx
59 {
60 
61 namespace
62 {
63 constexpr uint32_t kMinDefaultUniformBufferSize = 16 * 1024u;
64 // This size is picked based on experience. Majority of devices support 64K
65 // maxUniformBufferSize. Since this is per context buffer, a bigger buffer size reduces the
66 // number of descriptor set allocations, so we picked the maxUniformBufferSize that most
67 // devices supports. It may needs further tuning based on specific device needs and balance
68 // between performance and memory usage.
69 constexpr uint32_t kPreferredDefaultUniformBufferSize = 64 * 1024u;
70 
71 // Maximum size to use VMA image suballocation. Any allocation greater than or equal to this
72 // value will use a dedicated VkDeviceMemory.
73 constexpr size_t kImageSizeThresholdForDedicatedMemoryAllocation = 4 * 1024 * 1024;
74 
75 // Pipeline cache header version. It should be incremented any time there is an update to the cache
76 // header or data structure.
77 constexpr uint16_t kPipelineCacheVersion = 1;
78 
79 // Update the pipeline cache every this many swaps.
80 constexpr uint32_t kPipelineCacheVkUpdatePeriod = 60;
81 // Per the Vulkan specification, ANGLE must indicate the highest version of Vulkan functionality
82 // that it uses.  The Vulkan validation layers will issue messages for any core functionality that
83 // requires a higher version.
84 //
85 // ANGLE specifically limits its core version to Vulkan 1.1 and relies on availability of
86 // extensions.  While implementations are not required to expose an extension that is promoted to
87 // later versions, they always do so in practice.  Avoiding later core versions helps keep the
88 // initialization logic simpler.
89 constexpr uint32_t kPreferredVulkanAPIVersion = VK_API_VERSION_1_1;
90 
IsVulkan11(uint32_t apiVersion)91 bool IsVulkan11(uint32_t apiVersion)
92 {
93     return apiVersion >= VK_API_VERSION_1_1;
94 }
95 
IsVenus(uint32_t driverId,const char * deviceName)96 bool IsVenus(uint32_t driverId, const char *deviceName)
97 {
98     // Where driver id is available, check against Venus driver id:
99     if (driverId != 0)
100     {
101         return driverId == VK_DRIVER_ID_MESA_VENUS;
102     }
103 
104     // Otherwise, look for Venus in the device name.
105     return strstr(deviceName, "Venus") != nullptr;
106 }
107 
IsQualcommOpenSource(uint32_t vendorId,uint32_t driverId,const char * deviceName)108 bool IsQualcommOpenSource(uint32_t vendorId, uint32_t driverId, const char *deviceName)
109 {
110     if (!IsQualcomm(vendorId))
111     {
112         return false;
113     }
114 
115     // Where driver id is available, distinguish by driver id:
116     if (driverId != 0)
117     {
118         return driverId != VK_DRIVER_ID_QUALCOMM_PROPRIETARY;
119     }
120 
121     // Otherwise, look for Venus or Turnip in the device name.
122     return strstr(deviceName, "Venus") != nullptr || strstr(deviceName, "Turnip") != nullptr;
123 }
124 
IsPixel()125 bool IsPixel()
126 {
127     if (!IsAndroid())
128     {
129         return false;
130     }
131 
132     angle::SystemInfo info;
133     if (!angle::GetSystemInfo(&info))
134     {
135         return false;
136     }
137 
138     return strstr(info.machineModelName.c_str(), "Pixel") != nullptr;
139 }
140 
ChooseICDFromAttribs(const egl::AttributeMap & attribs)141 angle::vk::ICD ChooseICDFromAttribs(const egl::AttributeMap &attribs)
142 {
143 #if !defined(ANGLE_PLATFORM_ANDROID)
144     // Mock ICD does not currently run on Android
145     EGLAttrib deviceType = attribs.get(EGL_PLATFORM_ANGLE_DEVICE_TYPE_ANGLE,
146                                        EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE);
147 
148     switch (deviceType)
149     {
150         case EGL_PLATFORM_ANGLE_DEVICE_TYPE_HARDWARE_ANGLE:
151             break;
152         case EGL_PLATFORM_ANGLE_DEVICE_TYPE_NULL_ANGLE:
153             return angle::vk::ICD::Mock;
154         case EGL_PLATFORM_ANGLE_DEVICE_TYPE_SWIFTSHADER_ANGLE:
155             return angle::vk::ICD::SwiftShader;
156         default:
157             UNREACHABLE();
158             break;
159     }
160 #endif  // !defined(ANGLE_PLATFORM_ANDROID)
161 
162     return angle::vk::ICD::Default;
163 }
164 
StrLess(const char * a,const char * b)165 bool StrLess(const char *a, const char *b)
166 {
167     return strcmp(a, b) < 0;
168 }
169 
ExtensionFound(const char * needle,const vk::ExtensionNameList & haystack)170 bool ExtensionFound(const char *needle, const vk::ExtensionNameList &haystack)
171 {
172     // NOTE: The list must be sorted.
173     return std::binary_search(haystack.begin(), haystack.end(), needle, StrLess);
174 }
175 
VerifyExtensionsPresent(const vk::ExtensionNameList & haystack,const vk::ExtensionNameList & needles)176 VkResult VerifyExtensionsPresent(const vk::ExtensionNameList &haystack,
177                                  const vk::ExtensionNameList &needles)
178 {
179     // NOTE: The lists must be sorted.
180     if (std::includes(haystack.begin(), haystack.end(), needles.begin(), needles.end(), StrLess))
181     {
182         return VK_SUCCESS;
183     }
184     for (const char *needle : needles)
185     {
186         if (!ExtensionFound(needle, haystack))
187         {
188             ERR() << "Extension not supported: " << needle;
189         }
190     }
191     return VK_ERROR_EXTENSION_NOT_PRESENT;
192 }
193 
194 // Array of Validation error/warning messages that will be ignored, should include bugID
195 constexpr const char *kSkippedMessages[] = {
196     // http://anglebug.com/2866
197     "UNASSIGNED-CoreValidation-Shader-OutputNotConsumed",
198     // http://anglebug.com/4928
199     "VUID-vkMapMemory-memory-00683",
200     // http://anglebug.com/5027
201     "UNASSIGNED-CoreValidation-Shader-PushConstantOutOfRange",
202     // http://anglebug.com/5304
203     "VUID-vkCmdDraw-magFilter-04553",
204     "VUID-vkCmdDrawIndexed-magFilter-04553",
205     // http://anglebug.com/5309
206     "VUID-VkImageViewCreateInfo-usage-02652",
207     // http://issuetracker.google.com/175584609
208     "VUID-vkCmdDraw-None-04584",
209     "VUID-vkCmdDrawIndexed-None-04584",
210     "VUID-vkCmdDrawIndirect-None-04584",
211     "VUID-vkCmdDrawIndirectCount-None-04584",
212     "VUID-vkCmdDrawIndexedIndirect-None-04584",
213     "VUID-vkCmdDrawIndexedIndirectCount-None-04584",
214     // http://anglebug.com/5912
215     "VUID-VkImageViewCreateInfo-pNext-01585",
216     // http://anglebug.com/6514
217     "vkEnumeratePhysicalDevices: One or more layers modified physical devices",
218     // When using Vulkan secondary command buffers, the command buffer is begun with the current
219     // framebuffer specified in pInheritanceInfo::framebuffer.  If the framebuffer is multisampled
220     // and is resolved, an optimization would change the framebuffer to add the resolve target and
221     // use a subpass resolve operation instead.  The following error complains that the framebuffer
222     // used to start the render pass and the one specified in pInheritanceInfo::framebuffer must be
223     // equal, which is not true in that case.  In practice, this is benign, as the part of the
224     // framebuffer that's accessed by the command buffer is identically laid out.
225     // http://anglebug.com/6811
226     "VUID-vkCmdExecuteCommands-pCommandBuffers-00099",
227     // http://anglebug.com/7325
228     "VUID-vkCmdBindVertexBuffers2-pStrides-06209",
229     // http://anglebug.com/7729
230     "VUID-vkDestroySemaphore-semaphore-01137",
231     // http://anglebug.com/7843
232     "VUID-VkGraphicsPipelineCreateInfo-Vertex-07722",
233     // http://anglebug.com/7861
234     "VUID-vkCmdDraw-None-06887",
235     "VUID-vkCmdDraw-None-06886",
236     "VUID-vkCmdDrawIndexed-None-06887",
237     // http://anglebug.com/7865
238     "VUID-VkDescriptorImageInfo-imageView-06711",
239     "VUID-VkDescriptorImageInfo-descriptorType-06713",
240     // http://crbug.com/1412096
241     "VUID-VkImageCreateInfo-pNext-00990",
242     // http://crbug.com/1420265
243     "VUID-vkCmdEndDebugUtilsLabelEXT-commandBuffer-01912",
244     // http://anglebug.com/8076
245     "VUID-VkGraphicsPipelineCreateInfo-None-06573",
246     // http://anglebug.com/8119
247     "VUID-VkGraphicsPipelineCreateInfo-Input-07904",
248     "VUID-VkGraphicsPipelineCreateInfo-Input-07905",
249     "VUID-vkCmdDrawIndexed-None-07835",
250     "VUID-VkGraphicsPipelineCreateInfo-Input-08733",
251     // http://anglebug.com/8151
252     "VUID-vkCmdDraw-None-07844",
253     "VUID-vkCmdDraw-None-07845",
254     "VUID-vkCmdDraw-None-07848",
255     // https://anglebug.com/8128#c3
256     "VUID-VkBufferViewCreateInfo-buffer-00934",
257     // https://anglebug.com/8203
258     "VUID-VkVertexInputBindingDivisorDescriptionEXT-divisor-01870",
259     // https://anglebug.com/8237
260     "VUID-VkGraphicsPipelineCreateInfo-topology-08890",
261     // https://anglebug.com/8242
262     "VUID-vkCmdDraw-None-08608",
263     "VUID-vkCmdDrawIndexed-None-08608",
264     "VUID-vkCmdDraw-None-08753",
265     "VUID-vkCmdDrawIndexed-None-08753",
266     "VUID-vkCmdDraw-None-09003",
267     "VUID-vkCmdDrawIndexed-None-09003",
268 };
269 
270 // Validation messages that should be ignored only when VK_EXT_primitive_topology_list_restart is
271 // not present.
272 constexpr const char *kNoListRestartSkippedMessages[] = {
273     // http://anglebug.com/3832
274     "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00428",
275 };
276 
277 // Some syncval errors are resolved in the presence of the NONE load or store render pass ops.  For
278 // those, ANGLE makes no further attempt to resolve them and expects vendor support for the
279 // extensions instead.  The list of skipped messages is split based on this support.
280 constexpr vk::SkippedSyncvalMessage kSkippedSyncvalMessages[] = {
281     // http://anglebug.com/6416
282     // http://anglebug.com/6421
283     {
284         "SYNC-HAZARD-WRITE-AFTER-WRITE",
285         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
286         "SYNC_IMAGE_LAYOUT_TRANSITION, "
287         "write_barriers: 0, command: vkCmdEndRenderPass",
288     },
289     // These errors are caused by a feedback loop tests that don't produce correct Vulkan to begin
290     // with.
291     // http://anglebug.com/6417
292     // http://anglebug.com/7070
293     //
294     // Occassionally, this is due to VVL's lack of support for some extensions.  For example,
295     // syncval doesn't properly account for VK_EXT_fragment_shader_interlock, which gives
296     // synchronization guarantees without the need for an image barrier.
297     // https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/4387
298     {
299         "SYNC-HAZARD-READ-AFTER-WRITE",
300         "imageLayout: VK_IMAGE_LAYOUT_GENERAL",
301         "usage: SYNC_FRAGMENT_SHADER_SHADER_",
302     },
303     // http://anglebug.com/6551
304     {
305         "SYNC-HAZARD-WRITE-AFTER-WRITE",
306         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
307         "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, write_barriers: "
308         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ|SYNC_EARLY_FRAGMENT_TESTS_DEPTH_"
309         "STENCIL_ATTACHMENT_WRITE|SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_READ|SYNC_LATE_"
310         "FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE|SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_"
311         "ATTACHMENT_"
312         "READ|SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, command: vkCmdEndRenderPass",
313     },
314     {
315         "SYNC-HAZARD-WRITE-AFTER-WRITE",
316         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
317         "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, write_barriers: "
318         "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ|SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_"
319         "ATTACHMENT_WRITE, command: vkCmdEndRenderPass",
320     },
321     // From: TraceTest.manhattan_31 with SwiftShader and
322     // VulkanPerformanceCounterTest.NewTextureDoesNotBreakRenderPass for both depth and stencil
323     // aspect. http://anglebug.com/6701
324     {
325         "SYNC-HAZARD-WRITE-AFTER-WRITE",
326         "Hazard WRITE_AFTER_WRITE in subpass 0 for attachment 1 aspect ",
327         "during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info (usage: "
328         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
329         "SYNC_IMAGE_LAYOUT_TRANSITION",
330     },
331     // From various tests. The validation layer does not calculate the exact vertexCounts that's
332     // being accessed. http://anglebug.com/6725
333     {
334         "SYNC-HAZARD-READ-AFTER-WRITE",
335         "vkCmdDrawIndexed: Hazard READ_AFTER_WRITE for vertex",
336         "usage: SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
337     },
338     {
339         "SYNC-HAZARD-READ-AFTER-WRITE",
340         "vkCmdDrawIndexedIndirect: Hazard READ_AFTER_WRITE for vertex",
341         "usage: SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
342     },
343     {
344         "SYNC-HAZARD-READ-AFTER-WRITE",
345         "vkCmdDrawIndirect: Hazard READ_AFTER_WRITE for vertex",
346         "usage: SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
347     },
348     {
349         "SYNC-HAZARD-READ-AFTER-WRITE",
350         "vkCmdDrawIndexedIndirect: Hazard READ_AFTER_WRITE for index",
351         "usage: SYNC_INDEX_INPUT_INDEX_READ",
352     },
353     {
354         "SYNC-HAZARD-WRITE-AFTER-READ",
355         "vkCmdDraw: Hazard WRITE_AFTER_READ for",
356         "Access info (usage: SYNC_VERTEX_SHADER_SHADER_STORAGE_WRITE, prior_usage: "
357         "SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
358     },
359     {
360         "SYNC-HAZARD-WRITE-AFTER-READ",
361         "vkCmdCopyImageToBuffer: Hazard WRITE_AFTER_READ for dstBuffer VkBuffer",
362         "Access info (usage: SYNC_COPY_TRANSFER_WRITE, prior_usage: "
363         "SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
364     },
365     {
366         "SYNC-HAZARD-WRITE-AFTER-READ",
367         "vkCmdCopyBuffer: Hazard WRITE_AFTER_READ for dstBuffer VkBuffer",
368         "Access info (usage: SYNC_COPY_TRANSFER_WRITE, prior_usage: "
369         "SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
370     },
371     {
372         "SYNC-HAZARD-WRITE-AFTER-READ",
373         "vkCmdDispatch: Hazard WRITE_AFTER_READ for VkBuffer",
374         "Access info (usage: SYNC_COMPUTE_SHADER_SHADER_STORAGE_WRITE, prior_usage: "
375         "SYNC_VERTEX_ATTRIBUTE_INPUT_VERTEX_ATTRIBUTE_READ",
376     },
377     // From: MultisampledRenderToTextureES3Test.TransformFeedbackTest. http://anglebug.com/6725
378     {
379         "SYNC-HAZARD-WRITE-AFTER-WRITE",
380         "vkCmdBeginRenderPass: Hazard WRITE_AFTER_WRITE in subpass",
381         "write_barriers: "
382         "SYNC_TRANSFORM_FEEDBACK_EXT_TRANSFORM_FEEDBACK_COUNTER_READ_EXT|SYNC_TRANSFORM_FEEDBACK_"
383         "EXT_"
384         "TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT",
385     },
386     // http://anglebug.com/8054 (VkNonDispatchableHandle on x86 bots)
387     {
388         "SYNC-HAZARD-READ-AFTER-WRITE",
389         "vkCmdDraw: Hazard READ_AFTER_WRITE for VkBuffer",
390         "usage: SYNC_VERTEX_SHADER_SHADER_STORAGE_READ",
391     },
392     {
393         "SYNC-HAZARD-READ-AFTER-WRITE",
394         "vkCmdDraw: Hazard READ_AFTER_WRITE for VkNonDispatchableHandle",
395         "usage: SYNC_VERTEX_SHADER_SHADER_STORAGE_READ",
396     },
397     // From: TraceTest.manhattan_31 with SwiftShader. These failures appears related to
398     // dynamic uniform buffers. The failures are gone if I force mUniformBufferDescriptorType to
399     // VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER. My guess is that syncval is not doing a fine grain enough
400     // range tracking with dynamic uniform buffers. http://anglebug.com/6725
401     {
402         "SYNC-HAZARD-WRITE-AFTER-READ",
403         "usage: SYNC_VERTEX_SHADER_UNIFORM_READ",
404     },
405     {
406         "SYNC-HAZARD-READ-AFTER-WRITE",
407         "usage: SYNC_VERTEX_SHADER_UNIFORM_READ",
408     },
409     {
410         "SYNC-HAZARD-WRITE-AFTER-READ",
411         "type: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC",
412     },
413     {
414         "SYNC-HAZARD-READ-AFTER-WRITE",
415         "type: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC",
416     },
417     // Coherent framebuffer fetch is enabled on some platforms that are known a priori to have the
418     // needed behavior, even though this is not specified in the Vulkan spec.  These generate
419     // syncval errors that are benign on those platforms.
420     // http://anglebug.com/6870
421     // From: TraceTest.dead_by_daylight
422     // From: TraceTest.genshin_impact
423     {"SYNC-HAZARD-READ-AFTER-WRITE",
424      "vkCmdBeginRenderPass: Hazard READ_AFTER_WRITE in subpass 0 for attachment ",
425      "aspect color during load with loadOp VK_ATTACHMENT_LOAD_OP_LOAD. Access info (usage: "
426      "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_READ, prior_usage: "
427      "SYNC_IMAGE_LAYOUT_TRANSITION, write_barriers: 0, command: vkCmdEndRenderPass",
428      true},
429     {"SYNC-HAZARD-WRITE-AFTER-WRITE",
430      "vkCmdBeginRenderPass: Hazard WRITE_AFTER_WRITE in subpass 0 for attachment ",
431      "image layout transition (old_layout: VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, new_layout: "
432      "VK_IMAGE_LAYOUT_GENERAL). Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
433      "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, write_barriers:",
434      true},
435     // From: TraceTest.special_forces_group_2 http://anglebug.com/5592
436     {
437         "SYNC-HAZARD-WRITE-AFTER-READ",
438         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
439         "SYNC_FRAGMENT_SHADER_SHADER_",
440     },
441     // http://anglebug.com/7031
442     {"SYNC-HAZARD-READ-AFTER-WRITE",
443      "type: VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, imageLayout: "
444      "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, binding #0, index 0. Access info (usage: "
445      "SYNC_COMPUTE_SHADER_SHADER_",
446      "", false},
447     // http://anglebug.com/7456
448     {
449         "SYNC-HAZARD-READ-AFTER-WRITE",
450         "type: VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, "
451         "imageLayout: VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL",
452         "Access info (usage: SYNC_FRAGMENT_SHADER_SHADER_",
453     },
454     // From: TraceTest.life_is_strange http://anglebug.com/7711
455     {"SYNC-HAZARD-WRITE-AFTER-READ",
456      "vkCmdEndRenderPass: Hazard WRITE_AFTER_READ in subpass 0 for attachment 1 "
457      "depth aspect during store with storeOp VK_ATTACHMENT_STORE_OP_DONT_CARE. "
458      "Access info (usage: SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, "
459      "prior_usage: SYNC_FRAGMENT_SHADER_SHADER_"},
460     // From: TraceTest.life_is_strange http://anglebug.com/7711
461     {"SYNC-HAZARD-READ-AFTER-WRITE",
462      "type: VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, "
463      "imageLayout: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL",
464      "usage: SYNC_FRAGMENT_SHADER_SHADER_"},
465     // From: TraceTest.diablo_immortal http://anglebug.com/7837
466     {"SYNC-HAZARD-WRITE-AFTER-WRITE", "vkCmdDrawIndexed: Hazard WRITE_AFTER_WRITE for VkImageView ",
467      "Subpass #0, and pColorAttachments #0. Access info (usage: "
468      "SYNC_COLOR_ATTACHMENT_OUTPUT_COLOR_ATTACHMENT_WRITE, prior_usage: "
469      "SYNC_IMAGE_LAYOUT_TRANSITION, write_barriers: 0, command: vkCmdEndRenderPass"},
470     // From: TraceTest.diablo_immortal http://anglebug.com/7837
471     {"SYNC-HAZARD-WRITE-AFTER-READ",
472      "load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info (usage: "
473      "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
474      "SYNC_FRAGMENT_SHADER_SHADER_"},
475     // From: TraceTest.catalyst_black http://anglebug.com/7924
476     {"SYNC-HAZARD-WRITE-AFTER-READ",
477      "store with storeOp VK_ATTACHMENT_STORE_OP_STORE. Access info (usage: "
478      "SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
479      "SYNC_FRAGMENT_SHADER_SHADER_"},
480 };
481 
482 // Messages that shouldn't be generated if storeOp=NONE is supported, otherwise they are expected.
483 constexpr vk::SkippedSyncvalMessage kSkippedSyncvalMessagesWithoutStoreOpNone[] = {
484     // These errors are generated when simultaneously using a read-only depth/stencil attachment as
485     // sampler.  This is valid Vulkan.
486     //
487     // When storeOp=NONE is not present, ANGLE uses storeOp=STORE, but considers the image read-only
488     // and produces a hazard.  ANGLE relies on storeOp=NONE and so this is not expected to be worked
489     // around.
490     //
491     // With storeOp=NONE, there is another bug where a depth/stencil attachment may use storeOp=NONE
492     // for depth while storeOp=DONT_CARE for stencil, and the latter causes a synchronization error
493     // (similarly to the previous case as DONT_CARE is also a write operation).
494     // http://anglebug.com/5962
495     {
496         "SYNC-HAZARD-WRITE-AFTER-READ",
497         "depth aspect during store with storeOp VK_ATTACHMENT_STORE_OP_STORE. Access info (usage: "
498         "SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE",
499         "usage: SYNC_FRAGMENT_SHADER_SHADER_",
500     },
501     {
502         "SYNC-HAZARD-WRITE-AFTER-READ",
503         "stencil aspect during store with stencilStoreOp VK_ATTACHMENT_STORE_OP_STORE. Access info "
504         "(usage: SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE",
505         "usage: SYNC_FRAGMENT_SHADER_SHADER_",
506     },
507     {
508         "SYNC-HAZARD-READ-AFTER-WRITE",
509         "imageLayout: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL",
510         "usage: SYNC_FRAGMENT_SHADER_SHADER_",
511     },
512     // From: TraceTest.antutu_refinery http://anglebug.com/6663
513     {
514         "SYNC-HAZARD-READ-AFTER-WRITE",
515         "imageLayout: VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL",
516         "usage: SYNC_COMPUTE_SHADER_SHADER_SAMPLED_READ",
517     },
518 };
519 
520 // Messages that shouldn't be generated if both loadOp=NONE and storeOp=NONE are supported,
521 // otherwise they are expected.
522 constexpr vk::SkippedSyncvalMessage kSkippedSyncvalMessagesWithoutLoadStoreOpNone[] = {
523     // This error is generated for multiple reasons:
524     //
525     // - http://anglebug.com/6411
526     // - http://anglebug.com/5371: This is resolved with storeOp=NONE
527     {
528         "SYNC-HAZARD-WRITE-AFTER-WRITE",
529         "Access info (usage: SYNC_IMAGE_LAYOUT_TRANSITION, prior_usage: "
530         "SYNC_LATE_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, write_barriers: 0, command: "
531         "vkCmdEndRenderPass",
532     },
533     // http://anglebug.com/6411
534     // http://anglebug.com/6584
535     {
536         "SYNC-HAZARD-WRITE-AFTER-WRITE",
537         "aspect depth during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info (usage: "
538         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
539         "SYNC_IMAGE_LAYOUT_TRANSITION",
540     },
541     {
542         "SYNC-HAZARD-WRITE-AFTER-WRITE",
543         "aspect stencil during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info "
544         "(usage: "
545         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE",
546     },
547     // http://anglebug.com/5962
548     {
549         "SYNC-HAZARD-WRITE-AFTER-WRITE",
550         "aspect stencil during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info "
551         "(usage: "
552         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
553         "SYNC_IMAGE_LAYOUT_TRANSITION",
554     },
555     {
556         "SYNC-HAZARD-WRITE-AFTER-WRITE",
557         "aspect stencil during load with loadOp VK_ATTACHMENT_LOAD_OP_DONT_CARE. Access info "
558         "(usage: "
559         "SYNC_EARLY_FRAGMENT_TESTS_DEPTH_STENCIL_ATTACHMENT_WRITE, prior_usage: "
560         "SYNC_IMAGE_LAYOUT_TRANSITION",
561     },
562 };
563 
564 enum class DebugMessageReport
565 {
566     Ignore,
567     Print,
568 };
569 
IsMessageInSkipList(const char * message,const char * const skippedList[],size_t skippedListSize)570 bool IsMessageInSkipList(const char *message,
571                          const char *const skippedList[],
572                          size_t skippedListSize)
573 {
574     for (size_t index = 0; index < skippedListSize; ++index)
575     {
576         if (strstr(message, skippedList[index]) != nullptr)
577         {
578             return true;
579         }
580     }
581 
582     return false;
583 }
584 
585 // Suppress validation errors that are known.  Returns DebugMessageReport::Ignore in that case.
ShouldReportDebugMessage(RendererVk * renderer,const char * messageId,const char * message)586 DebugMessageReport ShouldReportDebugMessage(RendererVk *renderer,
587                                             const char *messageId,
588                                             const char *message)
589 {
590     if (message == nullptr || messageId == nullptr)
591     {
592         return DebugMessageReport::Print;
593     }
594 
595     // Check with non-syncval messages:
596     const std::vector<const char *> &skippedMessages = renderer->getSkippedValidationMessages();
597     if (IsMessageInSkipList(message, skippedMessages.data(), skippedMessages.size()))
598     {
599         return DebugMessageReport::Ignore;
600     }
601 
602     // Then check with syncval messages:
603     const bool isFramebufferFetchUsed = renderer->isFramebufferFetchUsed();
604 
605     for (const vk::SkippedSyncvalMessage &msg : renderer->getSkippedSyncvalMessages())
606     {
607         if (strstr(messageId, msg.messageId) == nullptr ||
608             strstr(message, msg.messageContents1) == nullptr ||
609             strstr(message, msg.messageContents2) == nullptr)
610         {
611             continue;
612         }
613 
614         // If the error is due to exposing coherent framebuffer fetch (without
615         // VK_EXT_rasterization_order_attachment_access), but framebuffer fetch has not been used by
616         // the application, report it.
617         //
618         // Note that currently syncval doesn't support the
619         // VK_EXT_rasterization_order_attachment_access extension, so the syncval messages would
620         // continue to be produced despite the extension.
621         constexpr bool kSyncValSupportsRasterizationOrderExtension = false;
622         const bool hasRasterizationOrderExtension =
623             renderer->getFeatures().supportsRasterizationOrderAttachmentAccess.enabled &&
624             kSyncValSupportsRasterizationOrderExtension;
625         if (msg.isDueToNonConformantCoherentFramebufferFetch &&
626             (!isFramebufferFetchUsed || hasRasterizationOrderExtension))
627         {
628             return DebugMessageReport::Print;
629         }
630 
631         // Otherwise ignore the message
632         return DebugMessageReport::Ignore;
633     }
634 
635     return DebugMessageReport::Print;
636 }
637 
GetVkObjectTypeName(VkObjectType type)638 const char *GetVkObjectTypeName(VkObjectType type)
639 {
640     switch (type)
641     {
642         case VK_OBJECT_TYPE_UNKNOWN:
643             return "Unknown";
644         case VK_OBJECT_TYPE_INSTANCE:
645             return "Instance";
646         case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
647             return "Physical Device";
648         case VK_OBJECT_TYPE_DEVICE:
649             return "Device";
650         case VK_OBJECT_TYPE_QUEUE:
651             return "Queue";
652         case VK_OBJECT_TYPE_SEMAPHORE:
653             return "Semaphore";
654         case VK_OBJECT_TYPE_COMMAND_BUFFER:
655             return "Command Buffer";
656         case VK_OBJECT_TYPE_FENCE:
657             return "Fence";
658         case VK_OBJECT_TYPE_DEVICE_MEMORY:
659             return "Device Memory";
660         case VK_OBJECT_TYPE_BUFFER:
661             return "Buffer";
662         case VK_OBJECT_TYPE_IMAGE:
663             return "Image";
664         case VK_OBJECT_TYPE_EVENT:
665             return "Event";
666         case VK_OBJECT_TYPE_QUERY_POOL:
667             return "Query Pool";
668         case VK_OBJECT_TYPE_BUFFER_VIEW:
669             return "Buffer View";
670         case VK_OBJECT_TYPE_IMAGE_VIEW:
671             return "Image View";
672         case VK_OBJECT_TYPE_SHADER_MODULE:
673             return "Shader Module";
674         case VK_OBJECT_TYPE_PIPELINE_CACHE:
675             return "Pipeline Cache";
676         case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
677             return "Pipeline Layout";
678         case VK_OBJECT_TYPE_RENDER_PASS:
679             return "Render Pass";
680         case VK_OBJECT_TYPE_PIPELINE:
681             return "Pipeline";
682         case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
683             return "Descriptor Set Layout";
684         case VK_OBJECT_TYPE_SAMPLER:
685             return "Sampler";
686         case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
687             return "Descriptor Pool";
688         case VK_OBJECT_TYPE_DESCRIPTOR_SET:
689             return "Descriptor Set";
690         case VK_OBJECT_TYPE_FRAMEBUFFER:
691             return "Framebuffer";
692         case VK_OBJECT_TYPE_COMMAND_POOL:
693             return "Command Pool";
694         case VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION:
695             return "Sampler YCbCr Conversion";
696         case VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE:
697             return "Descriptor Update Template";
698         case VK_OBJECT_TYPE_SURFACE_KHR:
699             return "Surface";
700         case VK_OBJECT_TYPE_SWAPCHAIN_KHR:
701             return "Swapchain";
702         case VK_OBJECT_TYPE_DISPLAY_KHR:
703             return "Display";
704         case VK_OBJECT_TYPE_DISPLAY_MODE_KHR:
705             return "Display Mode";
706         case VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV:
707             return "Indirect Commands Layout";
708         case VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT:
709             return "Debug Utils Messenger";
710         case VK_OBJECT_TYPE_VALIDATION_CACHE_EXT:
711             return "Validation Cache";
712         case VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV:
713             return "Acceleration Structure";
714         default:
715             return "<Unrecognized>";
716     }
717 }
718 
719 VKAPI_ATTR VkBool32 VKAPI_CALL
DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,VkDebugUtilsMessageTypeFlagsEXT messageTypes,const VkDebugUtilsMessengerCallbackDataEXT * callbackData,void * userData)720 DebugUtilsMessenger(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
721                     VkDebugUtilsMessageTypeFlagsEXT messageTypes,
722                     const VkDebugUtilsMessengerCallbackDataEXT *callbackData,
723                     void *userData)
724 {
725     RendererVk *rendererVk = static_cast<RendererVk *>(userData);
726 
727     // See if it's an issue we are aware of and don't want to be spammed about.
728     if (ShouldReportDebugMessage(rendererVk, callbackData->pMessageIdName,
729                                  callbackData->pMessage) == DebugMessageReport::Ignore)
730     {
731         return VK_FALSE;
732     }
733 
734     std::ostringstream log;
735     if (callbackData->pMessageIdName)
736     {
737         log << "[ " << callbackData->pMessageIdName << " ] ";
738     }
739     log << callbackData->pMessage << std::endl;
740 
741     // Aesthetic value based on length of the function name, line number, etc.
742     constexpr size_t kStartIndent = 28;
743 
744     // Output the debug marker hierarchy under which this error has occured.
745     size_t indent = kStartIndent;
746     if (callbackData->queueLabelCount > 0)
747     {
748         log << std::string(indent++, ' ') << "<Queue Label Hierarchy:>" << std::endl;
749         for (uint32_t i = 0; i < callbackData->queueLabelCount; ++i)
750         {
751             log << std::string(indent++, ' ') << callbackData->pQueueLabels[i].pLabelName
752                 << std::endl;
753         }
754     }
755     if (callbackData->cmdBufLabelCount > 0)
756     {
757         log << std::string(indent++, ' ') << "<Command Buffer Label Hierarchy:>" << std::endl;
758         for (uint32_t i = 0; i < callbackData->cmdBufLabelCount; ++i)
759         {
760             log << std::string(indent++, ' ') << callbackData->pCmdBufLabels[i].pLabelName
761                 << std::endl;
762         }
763     }
764     // Output the objects involved in this error message.
765     if (callbackData->objectCount > 0)
766     {
767         for (uint32_t i = 0; i < callbackData->objectCount; ++i)
768         {
769             const char *objectName = callbackData->pObjects[i].pObjectName;
770             const char *objectType = GetVkObjectTypeName(callbackData->pObjects[i].objectType);
771             uint64_t objectHandle  = callbackData->pObjects[i].objectHandle;
772             log << std::string(indent, ' ') << "Object: ";
773             if (objectHandle == 0)
774             {
775                 log << "VK_NULL_HANDLE";
776             }
777             else
778             {
779                 log << "0x" << std::hex << objectHandle << std::dec;
780             }
781             log << " (type = " << objectType << "(" << callbackData->pObjects[i].objectType << "))";
782             if (objectName)
783             {
784                 log << " [" << objectName << "]";
785             }
786             log << std::endl;
787         }
788     }
789 
790     bool isError    = (messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0;
791     std::string msg = log.str();
792 
793     rendererVk->onNewValidationMessage(msg);
794 
795     if (isError)
796     {
797         ERR() << msg;
798     }
799     else
800     {
801         WARN() << msg;
802     }
803 
804     return VK_FALSE;
805 }
806 
807 VKAPI_ATTR void VKAPI_CALL
MemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT * callbackData,void * userData)808 MemoryReportCallback(const VkDeviceMemoryReportCallbackDataEXT *callbackData, void *userData)
809 {
810     RendererVk *rendererVk = static_cast<RendererVk *>(userData);
811     rendererVk->processMemoryReportCallback(*callbackData);
812 }
813 
ShouldUseValidationLayers(const egl::AttributeMap & attribs)814 bool ShouldUseValidationLayers(const egl::AttributeMap &attribs)
815 {
816 #if defined(ANGLE_ENABLE_VULKAN_VALIDATION_LAYERS_BY_DEFAULT)
817     return ShouldUseDebugLayers(attribs);
818 #else
819     EGLAttrib debugSetting =
820         attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE);
821     return debugSetting == EGL_TRUE;
822 #endif  // defined(ANGLE_ENABLE_VULKAN_VALIDATION_LAYERS_BY_DEFAULT)
823 }
824 
LimitVersionTo(const gl::Version & current,const gl::Version & lower)825 gl::Version LimitVersionTo(const gl::Version &current, const gl::Version &lower)
826 {
827     return std::min(current, lower);
828 }
829 
FencePropertiesCompatibleWithAndroid(const VkExternalFenceProperties & externalFenceProperties)830 [[maybe_unused]] bool FencePropertiesCompatibleWithAndroid(
831     const VkExternalFenceProperties &externalFenceProperties)
832 {
833     // handleType here is the external fence type -
834     // we want type compatible with creating and export/dup() Android FD
835 
836     // Imported handleType that can be exported - need for vkGetFenceFdKHR()
837     if ((externalFenceProperties.exportFromImportedHandleTypes &
838          VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) == 0)
839     {
840         return false;
841     }
842 
843     // HandleTypes which can be specified at creating a fence
844     if ((externalFenceProperties.compatibleHandleTypes &
845          VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR) == 0)
846     {
847         return false;
848     }
849 
850     constexpr VkExternalFenceFeatureFlags kFeatureFlags =
851         (VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR |
852          VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR);
853     if ((externalFenceProperties.externalFenceFeatures & kFeatureFlags) != kFeatureFlags)
854     {
855         return false;
856     }
857 
858     return true;
859 }
860 
SemaphorePropertiesCompatibleWithAndroid(const VkExternalSemaphoreProperties & externalSemaphoreProperties)861 [[maybe_unused]] bool SemaphorePropertiesCompatibleWithAndroid(
862     const VkExternalSemaphoreProperties &externalSemaphoreProperties)
863 {
864     // handleType here is the external semaphore type -
865     // we want type compatible with importing an Android FD
866 
867     constexpr VkExternalSemaphoreFeatureFlags kFeatureFlags =
868         (VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR);
869     if ((externalSemaphoreProperties.externalSemaphoreFeatures & kFeatureFlags) != kFeatureFlags)
870     {
871         return false;
872     }
873 
874     return true;
875 }
876 
877 // CRC16-CCITT is used for header before the pipeline cache key data.
ComputeCRC16(const uint8_t * data,const size_t size)878 uint16_t ComputeCRC16(const uint8_t *data, const size_t size)
879 {
880     constexpr uint16_t kPolynomialCRC16 = 0x8408;
881     uint16_t rem                        = 0;
882 
883     for (size_t i = 0; i < size; i++)
884     {
885         rem ^= data[i];
886         for (int j = 0; j < 8; j++)
887         {
888             rem = (rem & 1) ? kPolynomialCRC16 ^ (rem >> 1) : rem >> 1;
889         }
890     }
891     return rem;
892 }
893 
894 // Header data type used for the pipeline cache.
895 ANGLE_ENABLE_STRUCT_PADDING_WARNINGS
896 
897 class CacheDataHeader
898 {
899   public:
setData(uint16_t compressedDataCRC,uint32_t cacheDataSize,uint16_t numChunks,uint16_t chunkIndex)900     void setData(uint16_t compressedDataCRC,
901                  uint32_t cacheDataSize,
902                  uint16_t numChunks,
903                  uint16_t chunkIndex)
904     {
905         mVersion           = kPipelineCacheVersion;
906         mCompressedDataCRC = compressedDataCRC;
907         mCacheDataSize     = cacheDataSize;
908         mNumChunks         = numChunks;
909         mChunkIndex        = chunkIndex;
910     }
911 
getData(uint16_t * versionOut,uint16_t * compressedDataCRCOut,uint32_t * cacheDataSizeOut,size_t * numChunksOut,size_t * chunkIndexOut) const912     void getData(uint16_t *versionOut,
913                  uint16_t *compressedDataCRCOut,
914                  uint32_t *cacheDataSizeOut,
915                  size_t *numChunksOut,
916                  size_t *chunkIndexOut) const
917     {
918         *versionOut           = mVersion;
919         *compressedDataCRCOut = mCompressedDataCRC;
920         *cacheDataSizeOut     = mCacheDataSize;
921         *numChunksOut         = static_cast<size_t>(mNumChunks);
922         *chunkIndexOut        = static_cast<size_t>(mChunkIndex);
923     }
924 
925   private:
926     // For pipeline cache, the values stored in key data has the following order:
927     // {headerVersion, compressedDataCRC, originalCacheSize, numChunks, chunkIndex;
928     // chunkCompressedData}. The header values are used to validate the data. For example, if the
929     // original and compressed sizes are 70000 bytes (68k) and 68841 bytes (67k), the compressed
930     // data will be divided into two chunks: {ver,crc0,70000,2,0;34421 bytes} and
931     // {ver,crc1,70000,2,1;34420 bytes}.
932     // The version is used to keep track of the cache format. Please note that kPipelineCacheVersion
933     // must be incremented by 1 in case of any updates to the cache header or data structure. While
934     // it is possible to modify the fields in the header, it is recommended to keep the version on
935     // top and the same size unless absolutely necessary.
936 
937     uint16_t mVersion;
938     uint16_t mCompressedDataCRC;
939     uint32_t mCacheDataSize;
940     uint16_t mNumChunks;
941     uint16_t mChunkIndex;
942 };
943 
944 ANGLE_DISABLE_STRUCT_PADDING_WARNINGS
945 
946 // Pack header data for the pipeline cache key data.
PackHeaderDataForPipelineCache(uint16_t compressedDataCRC,uint32_t cacheDataSize,uint16_t numChunks,uint16_t chunkIndex,CacheDataHeader * dataOut)947 void PackHeaderDataForPipelineCache(uint16_t compressedDataCRC,
948                                     uint32_t cacheDataSize,
949                                     uint16_t numChunks,
950                                     uint16_t chunkIndex,
951                                     CacheDataHeader *dataOut)
952 {
953     dataOut->setData(compressedDataCRC, cacheDataSize, numChunks, chunkIndex);
954 }
955 
956 // Unpack header data from the pipeline cache key data.
UnpackHeaderDataForPipelineCache(CacheDataHeader * data,uint16_t * versionOut,uint16_t * compressedDataCRCOut,uint32_t * cacheDataSizeOut,size_t * numChunksOut,size_t * chunkIndexOut)957 void UnpackHeaderDataForPipelineCache(CacheDataHeader *data,
958                                       uint16_t *versionOut,
959                                       uint16_t *compressedDataCRCOut,
960                                       uint32_t *cacheDataSizeOut,
961                                       size_t *numChunksOut,
962                                       size_t *chunkIndexOut)
963 {
964     data->getData(versionOut, compressedDataCRCOut, cacheDataSizeOut, numChunksOut, chunkIndexOut);
965 }
966 
ComputePipelineCacheVkChunkKey(VkPhysicalDeviceProperties physicalDeviceProperties,const uint8_t chunkIndex,egl::BlobCache::Key * hashOut)967 void ComputePipelineCacheVkChunkKey(VkPhysicalDeviceProperties physicalDeviceProperties,
968                                     const uint8_t chunkIndex,
969                                     egl::BlobCache::Key *hashOut)
970 {
971     std::ostringstream hashStream("ANGLE Pipeline Cache: ", std::ios_base::ate);
972     // Add the pipeline cache UUID to make sure the blob cache always gives a compatible pipeline
973     // cache.  It's not particularly necessary to write it as a hex number as done here, so long as
974     // there is no '\0' in the result.
975     for (const uint32_t c : physicalDeviceProperties.pipelineCacheUUID)
976     {
977         hashStream << std::hex << c;
978     }
979     // Add the vendor and device id too for good measure.
980     hashStream << std::hex << physicalDeviceProperties.vendorID;
981     hashStream << std::hex << physicalDeviceProperties.deviceID;
982 
983     // Add chunkIndex to generate unique key for chunks.
984     hashStream << std::hex << static_cast<uint32_t>(chunkIndex);
985 
986     const std::string &hashString = hashStream.str();
987     angle::base::SHA1HashBytes(reinterpret_cast<const unsigned char *>(hashString.c_str()),
988                                hashString.length(), hashOut->data());
989 }
990 
CompressAndStorePipelineCacheVk(VkPhysicalDeviceProperties physicalDeviceProperties,DisplayVk * displayVk,ContextVk * contextVk,const std::vector<uint8_t> & cacheData,const size_t maxTotalSize)991 void CompressAndStorePipelineCacheVk(VkPhysicalDeviceProperties physicalDeviceProperties,
992                                      DisplayVk *displayVk,
993                                      ContextVk *contextVk,
994                                      const std::vector<uint8_t> &cacheData,
995                                      const size_t maxTotalSize)
996 {
997     // Though the pipeline cache will be compressed and divided into several chunks to store in blob
998     // cache, the largest total size of blob cache is only 2M in android now, so there is no use to
999     // handle big pipeline cache when android will reject it finally.
1000     if (cacheData.size() >= maxTotalSize)
1001     {
1002         // TODO: handle the big pipeline cache. http://anglebug.com/4722
1003         ANGLE_PERF_WARNING(contextVk->getDebug(), GL_DEBUG_SEVERITY_LOW,
1004                            "Skip syncing pipeline cache data when it's larger than maxTotalSize.");
1005         return;
1006     }
1007 
1008     // To make it possible to store more pipeline cache data, compress the whole pipelineCache.
1009     angle::MemoryBuffer compressedData;
1010 
1011     if (!egl::CompressBlobCacheData(cacheData.size(), cacheData.data(), &compressedData))
1012     {
1013         ANGLE_PERF_WARNING(contextVk->getDebug(), GL_DEBUG_SEVERITY_LOW,
1014                            "Skip syncing pipeline cache data as it failed compression.");
1015         return;
1016     }
1017 
1018     // If the size of compressedData is larger than (kMaxBlobCacheSize - sizeof(numChunks)),
1019     // the pipelineCache still can't be stored in blob cache. Divide the large compressed
1020     // pipelineCache into several parts to store seperately. There is no function to
1021     // query the limit size in android.
1022     constexpr size_t kMaxBlobCacheSize = 64 * 1024;
1023     size_t compressedOffset            = 0;
1024 
1025     const size_t numChunks = UnsignedCeilDivide(static_cast<unsigned int>(compressedData.size()),
1026                                                 kMaxBlobCacheSize - sizeof(CacheDataHeader));
1027     ASSERT(numChunks <= UINT16_MAX);
1028     size_t chunkSize = UnsignedCeilDivide(static_cast<unsigned int>(compressedData.size()),
1029                                           static_cast<unsigned int>(numChunks));
1030     uint16_t compressedDataCRC = 0;
1031     if (kEnableCRCForPipelineCache)
1032     {
1033         compressedDataCRC = ComputeCRC16(compressedData.data(), compressedData.size());
1034     }
1035 
1036     for (size_t chunkIndex = 0; chunkIndex < numChunks; ++chunkIndex)
1037     {
1038         if (chunkIndex == numChunks - 1)
1039         {
1040             chunkSize = compressedData.size() - compressedOffset;
1041         }
1042 
1043         angle::MemoryBuffer keyData;
1044         if (!keyData.resize(sizeof(CacheDataHeader) + chunkSize))
1045         {
1046             ANGLE_PERF_WARNING(contextVk->getDebug(), GL_DEBUG_SEVERITY_LOW,
1047                                "Skip syncing pipeline cache data due to out of memory.");
1048             return;
1049         }
1050 
1051         // Add the header data, followed by the compressed data.
1052         ASSERT(cacheData.size() <= UINT32_MAX);
1053         CacheDataHeader headerData = {};
1054         PackHeaderDataForPipelineCache(compressedDataCRC, static_cast<uint32_t>(cacheData.size()),
1055                                        static_cast<uint16_t>(numChunks),
1056                                        static_cast<uint16_t>(chunkIndex), &headerData);
1057         memcpy(keyData.data(), &headerData, sizeof(CacheDataHeader));
1058         memcpy(keyData.data() + sizeof(CacheDataHeader), compressedData.data() + compressedOffset,
1059                chunkSize);
1060         compressedOffset += chunkSize;
1061 
1062         // Create unique hash key.
1063         egl::BlobCache::Key chunkCacheHash;
1064         ComputePipelineCacheVkChunkKey(physicalDeviceProperties, chunkIndex, &chunkCacheHash);
1065 
1066         displayVk->getBlobCache()->putApplication(chunkCacheHash, keyData);
1067     }
1068 }
1069 
1070 class CompressAndStorePipelineCacheTask : public angle::Closure
1071 {
1072   public:
CompressAndStorePipelineCacheTask(DisplayVk * displayVk,ContextVk * contextVk,std::vector<uint8_t> && cacheData,size_t kMaxTotalSize)1073     CompressAndStorePipelineCacheTask(DisplayVk *displayVk,
1074                                       ContextVk *contextVk,
1075                                       std::vector<uint8_t> &&cacheData,
1076                                       size_t kMaxTotalSize)
1077         : mDisplayVk(displayVk),
1078           mContextVk(contextVk),
1079           mCacheData(std::move(cacheData)),
1080           mMaxTotalSize(kMaxTotalSize)
1081     {}
1082 
operator ()()1083     void operator()() override
1084     {
1085         ANGLE_TRACE_EVENT0("gpu.angle", "CompressAndStorePipelineCacheVk");
1086         CompressAndStorePipelineCacheVk(mContextVk->getRenderer()->getPhysicalDeviceProperties(),
1087                                         mDisplayVk, mContextVk, mCacheData, mMaxTotalSize);
1088     }
1089 
1090   private:
1091     DisplayVk *mDisplayVk;
1092     ContextVk *mContextVk;
1093     std::vector<uint8_t> mCacheData;
1094     size_t mMaxTotalSize;
1095 };
1096 
1097 class WaitableCompressEventImpl : public WaitableCompressEvent
1098 {
1099   public:
WaitableCompressEventImpl(std::shared_ptr<angle::WaitableEvent> waitableEvent,std::shared_ptr<CompressAndStorePipelineCacheTask> compressTask)1100     WaitableCompressEventImpl(std::shared_ptr<angle::WaitableEvent> waitableEvent,
1101                               std::shared_ptr<CompressAndStorePipelineCacheTask> compressTask)
1102         : WaitableCompressEvent(waitableEvent), mCompressTask(compressTask)
1103     {}
1104 
1105   private:
1106     std::shared_ptr<CompressAndStorePipelineCacheTask> mCompressTask;
1107 };
1108 
GetAndDecompressPipelineCacheVk(VkPhysicalDeviceProperties physicalDeviceProperties,DisplayVk * displayVk,angle::MemoryBuffer * uncompressedData,bool * success)1109 angle::Result GetAndDecompressPipelineCacheVk(VkPhysicalDeviceProperties physicalDeviceProperties,
1110                                               DisplayVk *displayVk,
1111                                               angle::MemoryBuffer *uncompressedData,
1112                                               bool *success)
1113 {
1114     // Make sure that the bool output is initialized to false.
1115     *success = false;
1116 
1117     // Compute the hash key of chunkIndex 0 and find the first cache data in blob cache.
1118     egl::BlobCache::Key chunkCacheHash;
1119     ComputePipelineCacheVkChunkKey(physicalDeviceProperties, 0, &chunkCacheHash);
1120     egl::BlobCache::Value keyData;
1121     size_t keySize = 0;
1122 
1123     if (!displayVk->getBlobCache()->get(displayVk->getScratchBuffer(), chunkCacheHash, &keyData,
1124                                         &keySize) ||
1125         keyData.size() < sizeof(CacheDataHeader))
1126     {
1127         // Nothing in the cache.
1128         return angle::Result::Continue;
1129     }
1130 
1131     // Get the number of chunks and other values from the header for data validation.
1132     uint16_t cacheVersion;
1133     uint16_t compressedDataCRC;
1134     uint32_t uncompressedCacheDataSize;
1135     size_t numChunks;
1136     size_t chunkIndex0;
1137 
1138     CacheDataHeader headerData = {};
1139     memcpy(&headerData, keyData.data(), sizeof(CacheDataHeader));
1140     UnpackHeaderDataForPipelineCache(&headerData, &cacheVersion, &compressedDataCRC,
1141                                      &uncompressedCacheDataSize, &numChunks, &chunkIndex0);
1142     if (cacheVersion == kPipelineCacheVersion)
1143     {
1144         // The data must not contain corruption.
1145         if (chunkIndex0 != 0 || numChunks == 0 || uncompressedCacheDataSize == 0)
1146         {
1147             FATAL() << "Unexpected values while unpacking chunk index 0: "
1148                     << "cacheVersion = " << cacheVersion << ", chunkIndex = " << chunkIndex0
1149                     << ", numChunks = " << numChunks
1150                     << ", uncompressedCacheDataSize = " << uncompressedCacheDataSize;
1151         }
1152     }
1153     else
1154     {
1155         // Either the header structure has been updated, or the header value has been changed.
1156         if (cacheVersion > kPipelineCacheVersion + (1 << 8))
1157         {
1158             // TODO(abdolrashidi): Data corruption in the version should result in a fatal error.
1159             // For now, a warning is shown instead, but it should change when the version field is
1160             // no longer new.
1161             WARN() << "Existing cache version is significantly greater than the new version"
1162                       ", possibly due to data corruption: "
1163                    << "newVersion = " << kPipelineCacheVersion
1164                    << ", existingVersion = " << cacheVersion;
1165         }
1166         else
1167         {
1168             WARN() << "Change in cache header version detected: "
1169                    << "newVersion = " << kPipelineCacheVersion
1170                    << ", existingVersion = " << cacheVersion;
1171         }
1172         return angle::Result::Continue;
1173     }
1174 
1175     size_t chunkSize      = keySize - sizeof(CacheDataHeader);
1176     size_t compressedSize = 0;
1177 
1178     // Allocate enough memory.
1179     angle::MemoryBuffer compressedData;
1180     ANGLE_VK_CHECK(displayVk, compressedData.resize(chunkSize * numChunks),
1181                    VK_ERROR_INITIALIZATION_FAILED);
1182 
1183     // To combine the parts of the pipelineCache data.
1184     for (size_t chunkIndex = 0; chunkIndex < numChunks; ++chunkIndex)
1185     {
1186         // Get the unique key by chunkIndex.
1187         ComputePipelineCacheVkChunkKey(physicalDeviceProperties, chunkIndex, &chunkCacheHash);
1188 
1189         if (!displayVk->getBlobCache()->get(displayVk->getScratchBuffer(), chunkCacheHash, &keyData,
1190                                             &keySize) ||
1191             keyData.size() < sizeof(CacheDataHeader))
1192         {
1193             // Can't find every part of the cache data.
1194             WARN() << "Failed to get pipeline cache chunk " << chunkIndex << " of " << numChunks;
1195             return angle::Result::Continue;
1196         }
1197 
1198         // Validate the header values and ensure there is enough space to store.
1199         uint16_t checkCacheVersion;
1200         uint16_t checkCompressedDataCRC;
1201         uint32_t checkUncompressedCacheDataSize;
1202         size_t checkNumChunks;
1203         size_t checkChunkIndex;
1204 
1205         memcpy(&headerData, keyData.data(), sizeof(CacheDataHeader));
1206         UnpackHeaderDataForPipelineCache(&headerData, &checkCacheVersion, &checkCompressedDataCRC,
1207                                          &checkUncompressedCacheDataSize, &checkNumChunks,
1208                                          &checkChunkIndex);
1209 
1210         chunkSize = keySize - sizeof(CacheDataHeader);
1211         bool isHeaderDataCorrupted =
1212             (checkCacheVersion != cacheVersion) || (checkNumChunks != numChunks) ||
1213             (checkUncompressedCacheDataSize != uncompressedCacheDataSize) ||
1214             (checkCompressedDataCRC != compressedDataCRC) || (checkChunkIndex != chunkIndex) ||
1215             (compressedData.size() < compressedSize + chunkSize);
1216         if (isHeaderDataCorrupted)
1217         {
1218             WARN() << "Pipeline cache chunk header corrupted: "
1219                    << "checkCacheVersion = " << checkCacheVersion
1220                    << ", cacheVersion = " << cacheVersion << ", checkNumChunks = " << checkNumChunks
1221                    << ", numChunks = " << numChunks
1222                    << ", checkUncompressedCacheDataSize = " << checkUncompressedCacheDataSize
1223                    << ", uncompressedCacheDataSize = " << uncompressedCacheDataSize
1224                    << ", checkCompressedDataCRC = " << checkCompressedDataCRC
1225                    << ", compressedDataCRC = " << compressedDataCRC
1226                    << ", checkChunkIndex = " << checkChunkIndex << ", chunkIndex = " << chunkIndex
1227                    << ", compressedData.size() = " << compressedData.size()
1228                    << ", (compressedSize + chunkSize) = " << (compressedSize + chunkSize);
1229             return angle::Result::Continue;
1230         }
1231 
1232         memcpy(compressedData.data() + compressedSize, keyData.data() + sizeof(CacheDataHeader),
1233                chunkSize);
1234         compressedSize += chunkSize;
1235     }
1236 
1237     // CRC for compressed data and size for decompressed data should match the values in the header.
1238     if (kEnableCRCForPipelineCache)
1239     {
1240         uint16_t computedCompressedDataCRC = ComputeCRC16(compressedData.data(), compressedSize);
1241         if (computedCompressedDataCRC != compressedDataCRC)
1242         {
1243             if (compressedDataCRC == 0)
1244             {
1245                 // This could be due to the cache being populated before kEnableCRCForPipelineCache
1246                 // was enabled.
1247                 WARN() << "Expected CRC = " << compressedDataCRC
1248                        << ", Actual CRC = " << computedCompressedDataCRC;
1249                 return angle::Result::Continue;
1250             }
1251 
1252             // If the expected CRC is non-zero and does not match the actual CRC from the data,
1253             // there has been an unexpected data corruption.
1254             ERR() << "Expected CRC = " << compressedDataCRC
1255                   << ", Actual CRC = " << computedCompressedDataCRC;
1256 
1257             ERR() << "Data extracted from the cache headers: " << std::hex
1258                   << ", compressedDataCRC = 0x" << compressedDataCRC << "numChunks = 0x"
1259                   << numChunks << ", uncompressedCacheDataSize = 0x" << uncompressedCacheDataSize;
1260 
1261             FATAL() << "CRC check failed; possible pipeline cache data corruption.";
1262             return angle::Result::Stop;
1263         }
1264     }
1265 
1266     ANGLE_VK_CHECK(
1267         displayVk,
1268         egl::DecompressBlobCacheData(compressedData.data(), compressedSize, uncompressedData),
1269         VK_ERROR_INITIALIZATION_FAILED);
1270 
1271     if (uncompressedData->size() != uncompressedCacheDataSize)
1272     {
1273         WARN() << "Expected uncompressed size = " << uncompressedCacheDataSize
1274                << ", Actual uncompressed size = " << uncompressedData->size();
1275         return angle::Result::Continue;
1276     }
1277 
1278     *success = true;
1279     return angle::Result::Continue;
1280 }
1281 
1282 // Environment variable (and associated Android property) to enable Vulkan debug-utils markers
1283 constexpr char kEnableDebugMarkersVarName[]      = "ANGLE_ENABLE_DEBUG_MARKERS";
1284 constexpr char kEnableDebugMarkersPropertyName[] = "debug.angle.markers";
1285 
GetShadingRateFromVkExtent(const VkExtent2D & extent)1286 ANGLE_INLINE gl::ShadingRate GetShadingRateFromVkExtent(const VkExtent2D &extent)
1287 {
1288     if (extent.width == 1 && extent.height == 2)
1289     {
1290         return gl::ShadingRate::_1x2;
1291     }
1292     else if (extent.width == 2 && extent.height == 1)
1293     {
1294         return gl::ShadingRate::_2x1;
1295     }
1296     else if (extent.width == 2 && extent.height == 2)
1297     {
1298         return gl::ShadingRate::_2x2;
1299     }
1300     else if (extent.width == 4 && extent.height == 2)
1301     {
1302         return gl::ShadingRate::_4x2;
1303     }
1304     else if (extent.width == 4 && extent.height == 4)
1305     {
1306         return gl::ShadingRate::_4x4;
1307     }
1308 
1309     return gl::ShadingRate::_1x1;
1310 }
1311 }  // namespace
1312 
1313 // OneOffCommandPool implementation.
OneOffCommandPool()1314 OneOffCommandPool::OneOffCommandPool() : mProtectionType(vk::ProtectionType::InvalidEnum) {}
1315 
init(vk::ProtectionType protectionType)1316 void OneOffCommandPool::init(vk::ProtectionType protectionType)
1317 {
1318     ASSERT(!mCommandPool.valid());
1319     mProtectionType = protectionType;
1320 }
1321 
destroy(VkDevice device)1322 void OneOffCommandPool::destroy(VkDevice device)
1323 {
1324     std::unique_lock<std::mutex> lock(mMutex);
1325     for (PendingOneOffCommands &pending : mPendingCommands)
1326     {
1327         pending.commandBuffer.releaseHandle();
1328     }
1329     mCommandPool.destroy(device);
1330     mProtectionType = vk::ProtectionType::InvalidEnum;
1331 }
1332 
getCommandBuffer(vk::Context * context,vk::PrimaryCommandBuffer * commandBufferOut)1333 angle::Result OneOffCommandPool::getCommandBuffer(vk::Context *context,
1334                                                   vk::PrimaryCommandBuffer *commandBufferOut)
1335 {
1336     std::unique_lock<std::mutex> lock(mMutex);
1337 
1338     if (!mPendingCommands.empty() &&
1339         context->getRenderer()->hasResourceUseFinished(mPendingCommands.front().use))
1340     {
1341         *commandBufferOut = std::move(mPendingCommands.front().commandBuffer);
1342         mPendingCommands.pop_front();
1343         ANGLE_VK_TRY(context, commandBufferOut->reset());
1344     }
1345     else
1346     {
1347         if (!mCommandPool.valid())
1348         {
1349             VkCommandPoolCreateInfo createInfo = {};
1350             createInfo.sType                   = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
1351             createInfo.flags                   = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT |
1352                                VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
1353             ASSERT(mProtectionType == vk::ProtectionType::Unprotected ||
1354                    mProtectionType == vk::ProtectionType::Protected);
1355             if (mProtectionType == vk::ProtectionType::Protected)
1356             {
1357                 createInfo.flags |= VK_COMMAND_POOL_CREATE_PROTECTED_BIT;
1358             }
1359             ANGLE_VK_TRY(context, mCommandPool.init(context->getDevice(), createInfo));
1360         }
1361 
1362         VkCommandBufferAllocateInfo allocInfo = {};
1363         allocInfo.sType                       = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1364         allocInfo.level                       = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1365         allocInfo.commandBufferCount          = 1;
1366         allocInfo.commandPool                 = mCommandPool.getHandle();
1367 
1368         ANGLE_VK_TRY(context, commandBufferOut->init(context->getDevice(), allocInfo));
1369     }
1370 
1371     VkCommandBufferBeginInfo beginInfo = {};
1372     beginInfo.sType                    = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
1373     beginInfo.flags                    = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1374     beginInfo.pInheritanceInfo         = nullptr;
1375     ANGLE_VK_TRY(context, commandBufferOut->begin(beginInfo));
1376 
1377     return angle::Result::Continue;
1378 }
1379 
releaseCommandBuffer(const QueueSerial & submitQueueSerial,vk::PrimaryCommandBuffer && primary)1380 void OneOffCommandPool::releaseCommandBuffer(const QueueSerial &submitQueueSerial,
1381                                              vk::PrimaryCommandBuffer &&primary)
1382 {
1383     std::unique_lock<std::mutex> lock(mMutex);
1384     mPendingCommands.push_back({vk::ResourceUse(submitQueueSerial), std::move(primary)});
1385 }
1386 
1387 // RendererVk implementation.
RendererVk()1388 RendererVk::RendererVk()
1389     : mDisplay(nullptr),
1390       mLibVulkanLibrary(nullptr),
1391       mCapsInitialized(false),
1392       mInstanceVersion(0),
1393       mDeviceVersion(0),
1394       mInstance(VK_NULL_HANDLE),
1395       mEnableValidationLayers(false),
1396       mEnableDebugUtils(false),
1397       mAngleDebuggerMode(false),
1398       mEnabledICD(angle::vk::ICD::Default),
1399       mDebugUtilsMessenger(VK_NULL_HANDLE),
1400       mPhysicalDevice(VK_NULL_HANDLE),
1401       mMaxVertexAttribDivisor(1),
1402       mCurrentQueueFamilyIndex(std::numeric_limits<uint32_t>::max()),
1403       mMaxVertexAttribStride(0),
1404       mDefaultUniformBufferSize(kPreferredDefaultUniformBufferSize),
1405       mDevice(VK_NULL_HANDLE),
1406       mDeviceLost(false),
1407       mSuballocationGarbageSizeInBytes(0),
1408       mSuballocationGarbageDestroyed(0),
1409       mSuballocationGarbageSizeInBytesCachedAtomic(0),
1410       mCoherentStagingBufferMemoryTypeIndex(kInvalidMemoryTypeIndex),
1411       mNonCoherentStagingBufferMemoryTypeIndex(kInvalidMemoryTypeIndex),
1412       mStagingBufferAlignment(1),
1413       mHostVisibleVertexConversionBufferMemoryTypeIndex(kInvalidMemoryTypeIndex),
1414       mDeviceLocalVertexConversionBufferMemoryTypeIndex(kInvalidMemoryTypeIndex),
1415       mVertexConversionBufferAlignment(1),
1416       mPipelineCacheVkUpdateTimeout(kPipelineCacheVkUpdatePeriod),
1417       mPipelineCacheSizeAtLastSync(0),
1418       mPipelineCacheInitialized(false),
1419       mValidationMessageCount(0),
1420       mCommandProcessor(this, &mCommandQueue),
1421       mSupportedVulkanPipelineStageMask(0),
1422       mSupportedVulkanShaderStageMask(0),
1423       mMemoryAllocationTracker(MemoryAllocationTracker(this))
1424 {
1425     VkFormatProperties invalid = {0, 0, kInvalidFormatFeatureFlags};
1426     mFormatProperties.fill(invalid);
1427 
1428     // We currently don't have any big-endian devices in the list of supported platforms.  There are
1429     // a number of places in the Vulkan backend that make this assumption.  This assertion is made
1430     // early to fail immediately on big-endian platforms.
1431     ASSERT(IsLittleEndian());
1432 }
1433 
~RendererVk()1434 RendererVk::~RendererVk() {}
1435 
hasSharedGarbage()1436 bool RendererVk::hasSharedGarbage()
1437 {
1438     std::unique_lock<std::mutex> lock(mGarbageMutex);
1439     return !mSharedGarbage.empty() || !mPendingSubmissionGarbage.empty() ||
1440            !mSuballocationGarbage.empty() || !mPendingSubmissionSuballocationGarbage.empty();
1441 }
1442 
onDestroy(vk::Context * context)1443 void RendererVk::onDestroy(vk::Context *context)
1444 {
1445     if (isDeviceLost())
1446     {
1447         handleDeviceLost();
1448     }
1449 
1450     mCommandProcessor.destroy(context);
1451     mCommandQueue.destroy(context);
1452 
1453     // mCommandQueue.destroy should already set "last completed" serials to infinite.
1454     cleanupGarbage();
1455     ASSERT(!hasSharedGarbage());
1456     ASSERT(mOrphanedBufferBlocks.empty());
1457 
1458     for (OneOffCommandPool &oneOffCommandPool : mOneOffCommandPoolMap)
1459     {
1460         oneOffCommandPool.destroy(mDevice);
1461     }
1462 
1463     mPipelineCache.destroy(mDevice);
1464     mSamplerCache.destroy(this);
1465     mYuvConversionCache.destroy(this);
1466     mVkFormatDescriptorCountMap.clear();
1467 
1468     mOutsideRenderPassCommandBufferRecycler.onDestroy();
1469     mRenderPassCommandBufferRecycler.onDestroy();
1470 
1471     mImageMemorySuballocator.destroy(this);
1472     mAllocator.destroy();
1473 
1474     // When the renderer is being destroyed, it is possible to check if all the allocated memory
1475     // throughout the execution has been freed.
1476     mMemoryAllocationTracker.onDestroy();
1477 
1478     if (mDevice)
1479     {
1480         vkDestroyDevice(mDevice, nullptr);
1481         mDevice = VK_NULL_HANDLE;
1482     }
1483 
1484     if (mDebugUtilsMessenger)
1485     {
1486         vkDestroyDebugUtilsMessengerEXT(mInstance, mDebugUtilsMessenger, nullptr);
1487     }
1488 
1489     logCacheStats();
1490 
1491     if (mInstance)
1492     {
1493         vkDestroyInstance(mInstance, nullptr);
1494         mInstance = VK_NULL_HANDLE;
1495     }
1496 
1497     if (mCompressEvent)
1498     {
1499         mCompressEvent->wait();
1500         mCompressEvent.reset();
1501     }
1502 
1503     mMemoryProperties.destroy();
1504     mPhysicalDevice = VK_NULL_HANDLE;
1505 
1506     mEnabledInstanceExtensions.clear();
1507     mEnabledDeviceExtensions.clear();
1508 
1509     ASSERT(!hasSharedGarbage());
1510 
1511     if (mLibVulkanLibrary)
1512     {
1513         angle::CloseSystemLibrary(mLibVulkanLibrary);
1514         mLibVulkanLibrary = nullptr;
1515     }
1516 }
1517 
notifyDeviceLost()1518 void RendererVk::notifyDeviceLost()
1519 {
1520     mDeviceLost = true;
1521     mDisplay->notifyDeviceLost();
1522 }
1523 
isDeviceLost() const1524 bool RendererVk::isDeviceLost() const
1525 {
1526     return mDeviceLost;
1527 }
1528 
isVulkan11Instance() const1529 bool RendererVk::isVulkan11Instance() const
1530 {
1531     return IsVulkan11(mInstanceVersion);
1532 }
1533 
isVulkan11Device() const1534 bool RendererVk::isVulkan11Device() const
1535 {
1536     return IsVulkan11(mDeviceVersion);
1537 }
1538 
enableInstanceExtensions(DisplayVk * displayVk,const VulkanLayerVector & enabledInstanceLayerNames,const char * wsiExtension,bool canLoadDebugUtils)1539 angle::Result RendererVk::enableInstanceExtensions(
1540     DisplayVk *displayVk,
1541     const VulkanLayerVector &enabledInstanceLayerNames,
1542     const char *wsiExtension,
1543     bool canLoadDebugUtils)
1544 {
1545     // Enumerate instance extensions that are provided by the vulkan implementation and implicit
1546     // layers.
1547     uint32_t instanceExtensionCount = 0;
1548     {
1549         ANGLE_SCOPED_DISABLE_LSAN();
1550         ANGLE_SCOPED_DISABLE_MSAN();
1551         ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
1552                                     nullptr, &instanceExtensionCount, nullptr));
1553     }
1554 
1555     std::vector<VkExtensionProperties> instanceExtensionProps(instanceExtensionCount);
1556     if (instanceExtensionCount > 0)
1557     {
1558         ANGLE_SCOPED_DISABLE_LSAN();
1559         ANGLE_SCOPED_DISABLE_MSAN();
1560         ANGLE_VK_TRY(displayVk,
1561                      vkEnumerateInstanceExtensionProperties(nullptr, &instanceExtensionCount,
1562                                                             instanceExtensionProps.data()));
1563         // In case fewer items were returned than requested, resize instanceExtensionProps to the
1564         // number of extensions returned (i.e. instanceExtensionCount).
1565         instanceExtensionProps.resize(instanceExtensionCount);
1566     }
1567 
1568     // Enumerate instance extensions that are provided by explicit layers.
1569     for (const char *layerName : enabledInstanceLayerNames)
1570     {
1571         uint32_t previousExtensionCount      = static_cast<uint32_t>(instanceExtensionProps.size());
1572         uint32_t instanceLayerExtensionCount = 0;
1573         {
1574             ANGLE_SCOPED_DISABLE_LSAN();
1575             ANGLE_SCOPED_DISABLE_MSAN();
1576             ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
1577                                         layerName, &instanceLayerExtensionCount, nullptr));
1578         }
1579         instanceExtensionProps.resize(previousExtensionCount + instanceLayerExtensionCount);
1580         {
1581             ANGLE_SCOPED_DISABLE_LSAN();
1582             ANGLE_SCOPED_DISABLE_MSAN();
1583             ANGLE_VK_TRY(displayVk, vkEnumerateInstanceExtensionProperties(
1584                                         layerName, &instanceLayerExtensionCount,
1585                                         instanceExtensionProps.data() + previousExtensionCount));
1586         }
1587         // In case fewer items were returned than requested, resize instanceExtensionProps to the
1588         // number of extensions returned (i.e. instanceLayerExtensionCount).
1589         instanceExtensionProps.resize(previousExtensionCount + instanceLayerExtensionCount);
1590     }
1591 
1592     // Get the list of instance extensions that are available.
1593     vk::ExtensionNameList instanceExtensionNames;
1594     if (!instanceExtensionProps.empty())
1595     {
1596         for (const VkExtensionProperties &i : instanceExtensionProps)
1597         {
1598             instanceExtensionNames.push_back(i.extensionName);
1599         }
1600         std::sort(instanceExtensionNames.begin(), instanceExtensionNames.end(), StrLess);
1601     }
1602 
1603     // Set ANGLE features that depend on instance extensions
1604     ANGLE_FEATURE_CONDITION(
1605         &mFeatures, supportsSurfaceCapabilities2Extension,
1606         ExtensionFound(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME, instanceExtensionNames));
1607 
1608     ANGLE_FEATURE_CONDITION(&mFeatures, supportsSurfaceProtectedCapabilitiesExtension,
1609                             ExtensionFound(VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME,
1610                                            instanceExtensionNames));
1611 
1612     // TODO: Validation layer has a bug when vkGetPhysicalDeviceSurfaceFormats2KHR is called
1613     // on Mock ICD with surface handle set as VK_NULL_HANDLE. http://anglebug.com/7631
1614     ANGLE_FEATURE_CONDITION(
1615         &mFeatures, supportsSurfacelessQueryExtension,
1616         ExtensionFound(VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME, instanceExtensionNames) &&
1617             !isMockICDEnabled());
1618 
1619     // VK_KHR_external_fence_capabilities and VK_KHR_extenral_semaphore_capabilities are promoted to
1620     // core in Vulkan 1.1
1621     ANGLE_FEATURE_CONDITION(
1622         &mFeatures, supportsExternalFenceCapabilities,
1623         isVulkan11Instance() || ExtensionFound(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
1624                                                instanceExtensionNames));
1625 
1626     ANGLE_FEATURE_CONDITION(
1627         &mFeatures, supportsExternalSemaphoreCapabilities,
1628         isVulkan11Instance() ||
1629             ExtensionFound(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
1630                            instanceExtensionNames));
1631 
1632     // On macOS, there is no native Vulkan driver, so we need to enable the
1633     // portability enumeration extension to allow use of MoltenVK.
1634     ANGLE_FEATURE_CONDITION(
1635         &mFeatures, supportsPortabilityEnumeration,
1636         ExtensionFound(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME, instanceExtensionNames));
1637 
1638     ANGLE_FEATURE_CONDITION(&mFeatures, enablePortabilityEnumeration,
1639                             mFeatures.supportsPortabilityEnumeration.enabled && IsApple());
1640 
1641     // Enable extensions that could be used
1642     if (displayVk->isUsingSwapchain())
1643     {
1644         mEnabledInstanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
1645         if (ExtensionFound(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, instanceExtensionNames))
1646         {
1647             mEnabledInstanceExtensions.push_back(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME);
1648         }
1649     }
1650 
1651     if (wsiExtension)
1652     {
1653         mEnabledInstanceExtensions.push_back(wsiExtension);
1654     }
1655 
1656     mEnableDebugUtils = canLoadDebugUtils && mEnableValidationLayers &&
1657                         ExtensionFound(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, instanceExtensionNames);
1658 
1659     if (mEnableDebugUtils)
1660     {
1661         mEnabledInstanceExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
1662     }
1663 
1664     if (mFeatures.supportsSurfaceCapabilities2Extension.enabled)
1665     {
1666         mEnabledInstanceExtensions.push_back(VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME);
1667     }
1668 
1669     if (mFeatures.supportsSurfaceProtectedCapabilitiesExtension.enabled)
1670     {
1671         mEnabledInstanceExtensions.push_back(VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME);
1672     }
1673 
1674     if (mFeatures.supportsSurfacelessQueryExtension.enabled)
1675     {
1676         mEnabledInstanceExtensions.push_back(VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME);
1677     }
1678 
1679     if (ExtensionFound(VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME, instanceExtensionNames))
1680     {
1681         mEnabledInstanceExtensions.push_back(VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME);
1682     }
1683 
1684     if (!isVulkan11Instance())
1685     {
1686         if (ExtensionFound(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
1687                            instanceExtensionNames))
1688         {
1689             mEnabledInstanceExtensions.push_back(
1690                 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
1691         }
1692 
1693         if (mFeatures.supportsExternalFenceCapabilities.enabled)
1694         {
1695             mEnabledInstanceExtensions.push_back(VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME);
1696         }
1697 
1698         if (mFeatures.supportsExternalSemaphoreCapabilities.enabled)
1699         {
1700             mEnabledInstanceExtensions.push_back(
1701                 VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
1702         }
1703     }
1704 
1705     if (mFeatures.enablePortabilityEnumeration.enabled)
1706     {
1707         mEnabledInstanceExtensions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME);
1708     }
1709 
1710     // Verify the required extensions are in the extension names set. Fail if not.
1711     std::sort(mEnabledInstanceExtensions.begin(), mEnabledInstanceExtensions.end(), StrLess);
1712     ANGLE_VK_TRY(displayVk,
1713                  VerifyExtensionsPresent(instanceExtensionNames, mEnabledInstanceExtensions));
1714 
1715     return angle::Result::Continue;
1716 }
1717 
initialize(DisplayVk * displayVk,egl::Display * display,const char * wsiExtension,const char * wsiLayer)1718 angle::Result RendererVk::initialize(DisplayVk *displayVk,
1719                                      egl::Display *display,
1720                                      const char *wsiExtension,
1721                                      const char *wsiLayer)
1722 {
1723     bool canLoadDebugUtils = true;
1724 #if defined(ANGLE_SHARED_LIBVULKAN)
1725     {
1726         ANGLE_SCOPED_DISABLE_MSAN();
1727         mLibVulkanLibrary = angle::vk::OpenLibVulkan();
1728         ANGLE_VK_CHECK(displayVk, mLibVulkanLibrary, VK_ERROR_INITIALIZATION_FAILED);
1729 
1730         PFN_vkGetInstanceProcAddr vulkanLoaderGetInstanceProcAddr =
1731             reinterpret_cast<PFN_vkGetInstanceProcAddr>(
1732                 angle::GetLibrarySymbol(mLibVulkanLibrary, "vkGetInstanceProcAddr"));
1733 
1734         // Set all vk* function ptrs
1735         volkInitializeCustom(vulkanLoaderGetInstanceProcAddr);
1736 
1737         uint32_t ver = volkGetInstanceVersion();
1738         if (!IsAndroid() && ver < VK_MAKE_VERSION(1, 1, 91))
1739         {
1740             // http://crbug.com/1205999 - non-Android Vulkan Loader versions before 1.1.91 have a
1741             // bug which prevents loading VK_EXT_debug_utils function pointers.
1742             canLoadDebugUtils = false;
1743         }
1744     }
1745 #endif  // defined(ANGLE_SHARED_LIBVULKAN)
1746 
1747     mDisplay                         = display;
1748     const egl::AttributeMap &attribs = mDisplay->getAttributeMap();
1749     angle::vk::ScopedVkLoaderEnvironment scopedEnvironment(ShouldUseValidationLayers(attribs),
1750                                                            ChooseICDFromAttribs(attribs));
1751     mEnableValidationLayers = scopedEnvironment.canEnableValidationLayers();
1752     mEnabledICD             = scopedEnvironment.getEnabledICD();
1753 
1754     // Gather global layer properties.
1755     uint32_t instanceLayerCount = 0;
1756     {
1757         ANGLE_SCOPED_DISABLE_LSAN();
1758         ANGLE_SCOPED_DISABLE_MSAN();
1759         ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
1760     }
1761 
1762     std::vector<VkLayerProperties> instanceLayerProps(instanceLayerCount);
1763     if (instanceLayerCount > 0)
1764     {
1765         ANGLE_SCOPED_DISABLE_LSAN();
1766         ANGLE_SCOPED_DISABLE_MSAN();
1767         ANGLE_VK_TRY(displayVk, vkEnumerateInstanceLayerProperties(&instanceLayerCount,
1768                                                                    instanceLayerProps.data()));
1769     }
1770 
1771     VulkanLayerVector enabledInstanceLayerNames;
1772     if (mEnableValidationLayers)
1773     {
1774         bool layersRequested =
1775             (attribs.get(EGL_PLATFORM_ANGLE_DEBUG_LAYERS_ENABLED_ANGLE, EGL_DONT_CARE) == EGL_TRUE);
1776         mEnableValidationLayers = GetAvailableValidationLayers(instanceLayerProps, layersRequested,
1777                                                                &enabledInstanceLayerNames);
1778     }
1779 
1780     if (wsiLayer)
1781     {
1782         enabledInstanceLayerNames.push_back(wsiLayer);
1783     }
1784 
1785     auto enumerateInstanceVersion = reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
1786         vkGetInstanceProcAddr(nullptr, "vkEnumerateInstanceVersion"));
1787 
1788     uint32_t highestApiVersion = mInstanceVersion = VK_API_VERSION_1_0;
1789     if (enumerateInstanceVersion)
1790     {
1791         {
1792             ANGLE_SCOPED_DISABLE_LSAN();
1793             ANGLE_SCOPED_DISABLE_MSAN();
1794             ANGLE_VK_TRY(displayVk, enumerateInstanceVersion(&mInstanceVersion));
1795         }
1796 
1797         if (isVulkan11Instance())
1798         {
1799             // This is the highest version of core Vulkan functionality that ANGLE uses.  Per the
1800             // Vulkan spec, the application is allowed to specify a higher version than supported by
1801             // the instance.  ANGLE still respects the *device's* version.
1802             highestApiVersion = kPreferredVulkanAPIVersion;
1803         }
1804     }
1805 
1806     ANGLE_TRY(enableInstanceExtensions(displayVk, enabledInstanceLayerNames, wsiExtension,
1807                                        canLoadDebugUtils));
1808 
1809     const std::string appName = angle::GetExecutableName();
1810 
1811     mApplicationInfo                    = {};
1812     mApplicationInfo.sType              = VK_STRUCTURE_TYPE_APPLICATION_INFO;
1813     mApplicationInfo.pApplicationName   = appName.c_str();
1814     mApplicationInfo.applicationVersion = 1;
1815     mApplicationInfo.pEngineName        = "ANGLE";
1816     mApplicationInfo.engineVersion      = 1;
1817     mApplicationInfo.apiVersion         = highestApiVersion;
1818 
1819     VkInstanceCreateInfo instanceInfo = {};
1820     instanceInfo.sType                = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
1821     instanceInfo.flags                = 0;
1822     instanceInfo.pApplicationInfo     = &mApplicationInfo;
1823 
1824     // Enable requested layers and extensions.
1825     instanceInfo.enabledExtensionCount = static_cast<uint32_t>(mEnabledInstanceExtensions.size());
1826     instanceInfo.ppEnabledExtensionNames =
1827         mEnabledInstanceExtensions.empty() ? nullptr : mEnabledInstanceExtensions.data();
1828 
1829     instanceInfo.enabledLayerCount   = static_cast<uint32_t>(enabledInstanceLayerNames.size());
1830     instanceInfo.ppEnabledLayerNames = enabledInstanceLayerNames.data();
1831 
1832     // On macOS, there is no native Vulkan driver, so we need to enable the
1833     // portability enumeration extension to allow use of MoltenVK.
1834     if (mFeatures.enablePortabilityEnumeration.enabled)
1835     {
1836         instanceInfo.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
1837     }
1838 
1839     // http://anglebug.com/7050 - Shader validation caching is broken on Android
1840     VkValidationFeaturesEXT validationFeatures       = {};
1841     VkValidationFeatureDisableEXT disabledFeatures[] = {
1842         VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT};
1843     if (mEnableValidationLayers && IsAndroid())
1844     {
1845         validationFeatures.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT;
1846         validationFeatures.disabledValidationFeatureCount = 1;
1847         validationFeatures.pDisabledValidationFeatures    = disabledFeatures;
1848 
1849         vk::AddToPNextChain(&instanceInfo, &validationFeatures);
1850     }
1851 
1852     {
1853         ANGLE_SCOPED_DISABLE_MSAN();
1854         ANGLE_VK_TRY(displayVk, vkCreateInstance(&instanceInfo, nullptr, &mInstance));
1855 #if defined(ANGLE_SHARED_LIBVULKAN)
1856         // Load volk if we are linking dynamically
1857         volkLoadInstance(mInstance);
1858 #endif  // defined(ANGLE_SHARED_LIBVULKAN)
1859 
1860         initInstanceExtensionEntryPoints();
1861     }
1862 
1863     if (mEnableDebugUtils)
1864     {
1865         // Use the newer EXT_debug_utils if it exists.
1866 #if !defined(ANGLE_SHARED_LIBVULKAN)
1867         InitDebugUtilsEXTFunctions(mInstance);
1868 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
1869 
1870         // Create the messenger callback.
1871         VkDebugUtilsMessengerCreateInfoEXT messengerInfo = {};
1872 
1873         constexpr VkDebugUtilsMessageSeverityFlagsEXT kSeveritiesToLog =
1874             VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT |
1875             VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
1876 
1877         constexpr VkDebugUtilsMessageTypeFlagsEXT kMessagesToLog =
1878             VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
1879             VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
1880             VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
1881 
1882         messengerInfo.sType           = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
1883         messengerInfo.messageSeverity = kSeveritiesToLog;
1884         messengerInfo.messageType     = kMessagesToLog;
1885         messengerInfo.pfnUserCallback = &DebugUtilsMessenger;
1886         messengerInfo.pUserData       = this;
1887 
1888         ANGLE_VK_TRY(displayVk, vkCreateDebugUtilsMessengerEXT(mInstance, &messengerInfo, nullptr,
1889                                                                &mDebugUtilsMessenger));
1890     }
1891 
1892     if (isVulkan11Instance() ||
1893         std::find(mEnabledInstanceExtensions.begin(), mEnabledInstanceExtensions.end(),
1894                   VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) !=
1895             mEnabledInstanceExtensions.end())
1896     {
1897 #if !defined(ANGLE_SHARED_LIBVULKAN)
1898         if (!isVulkan11Instance())
1899         {
1900             InitGetPhysicalDeviceProperties2KHRFunctions(mInstance);
1901         }
1902 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
1903 
1904         ASSERT(vkGetPhysicalDeviceProperties2KHR);
1905     }
1906 
1907     uint32_t physicalDeviceCount = 0;
1908     ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount, nullptr));
1909     ANGLE_VK_CHECK(displayVk, physicalDeviceCount > 0, VK_ERROR_INITIALIZATION_FAILED);
1910 
1911     // TODO(jmadill): Handle multiple physical devices. For now, use the first device.
1912     std::vector<VkPhysicalDevice> physicalDevices(physicalDeviceCount);
1913     ANGLE_VK_TRY(displayVk, vkEnumeratePhysicalDevices(mInstance, &physicalDeviceCount,
1914                                                        physicalDevices.data()));
1915     uint32_t preferredVendorId =
1916         static_cast<uint32_t>(attribs.get(EGL_PLATFORM_ANGLE_DEVICE_ID_HIGH_ANGLE, 0));
1917     uint32_t preferredDeviceId =
1918         static_cast<uint32_t>(attribs.get(EGL_PLATFORM_ANGLE_DEVICE_ID_LOW_ANGLE, 0));
1919     ChoosePhysicalDevice(vkGetPhysicalDeviceProperties, physicalDevices, mEnabledICD,
1920                          preferredVendorId, preferredDeviceId, &mPhysicalDevice,
1921                          &mPhysicalDeviceProperties);
1922 
1923     // The device version that is assumed by ANGLE is the minimum of the actual device version and
1924     // the highest it's allowed to use.
1925     mDeviceVersion = std::min(mPhysicalDeviceProperties.apiVersion, highestApiVersion);
1926 
1927     mGarbageCollectionFlushThreshold =
1928         static_cast<uint32_t>(mPhysicalDeviceProperties.limits.maxMemoryAllocationCount *
1929                               kPercentMaxMemoryAllocationCount);
1930     vkGetPhysicalDeviceFeatures(mPhysicalDevice, &mPhysicalDeviceFeatures);
1931 
1932     // Ensure we can find a graphics queue family.
1933     uint32_t queueFamilyCount = 0;
1934     vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueFamilyCount, nullptr);
1935 
1936     ANGLE_VK_CHECK(displayVk, queueFamilyCount > 0, VK_ERROR_INITIALIZATION_FAILED);
1937 
1938     mQueueFamilyProperties.resize(queueFamilyCount);
1939     vkGetPhysicalDeviceQueueFamilyProperties(mPhysicalDevice, &queueFamilyCount,
1940                                              mQueueFamilyProperties.data());
1941 
1942     uint32_t queueFamilyMatchCount = 0;
1943     // Try first for a protected graphics queue family
1944     uint32_t firstGraphicsQueueFamily = vk::QueueFamily::FindIndex(
1945         mQueueFamilyProperties,
1946         (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_PROTECTED_BIT), 0,
1947         &queueFamilyMatchCount);
1948     // else just a graphics queue family
1949     if (queueFamilyMatchCount == 0)
1950     {
1951         firstGraphicsQueueFamily = vk::QueueFamily::FindIndex(
1952             mQueueFamilyProperties, (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT), 0,
1953             &queueFamilyMatchCount);
1954     }
1955     ANGLE_VK_CHECK(displayVk, queueFamilyMatchCount > 0, VK_ERROR_INITIALIZATION_FAILED);
1956 
1957     // Store the physical device memory properties so we can find the right memory pools.
1958     mMemoryProperties.init(mPhysicalDevice);
1959     ANGLE_VK_CHECK(displayVk, mMemoryProperties.getMemoryTypeCount() > 0,
1960                    VK_ERROR_INITIALIZATION_FAILED);
1961 
1962     // The counters for the memory allocation tracker should be initialized.
1963     // Each memory allocation could be made in one of the available memory heaps. We initialize the
1964     // per-heap memory allocation trackers for MemoryAllocationType objects here, after
1965     // mMemoryProperties has been set up.
1966     mMemoryAllocationTracker.initMemoryTrackers();
1967 
1968     // If only one queue family, go ahead and initialize the device. If there is more than one
1969     // queue, we'll have to wait until we see a WindowSurface to know which supports present.
1970     if (queueFamilyMatchCount == 1)
1971     {
1972         ANGLE_TRY(initializeDevice(displayVk, firstGraphicsQueueFamily));
1973     }
1974 
1975     ANGLE_TRY(initializeMemoryAllocator(displayVk));
1976 
1977     // Initialize the format table.
1978     mFormatTable.initialize(this, &mNativeTextureCaps);
1979 
1980     setGlobalDebugAnnotator();
1981 
1982     // Null terminate the extension list returned for EGL_VULKAN_INSTANCE_EXTENSIONS_ANGLE.
1983     mEnabledInstanceExtensions.push_back(nullptr);
1984 
1985     for (vk::ProtectionType protectionType : angle::AllEnums<vk::ProtectionType>())
1986     {
1987         mOneOffCommandPoolMap[protectionType].init(protectionType);
1988     }
1989 
1990     return angle::Result::Continue;
1991 }
1992 
initializeMemoryAllocator(DisplayVk * displayVk)1993 angle::Result RendererVk::initializeMemoryAllocator(DisplayVk *displayVk)
1994 {
1995     // This number matches Chromium and was picked by looking at memory usage of
1996     // Android apps. The allocator will start making blocks at 1/8 the max size
1997     // and builds up block size as needed before capping at the max set here.
1998     mPreferredLargeHeapBlockSize = 4 * 1024 * 1024;
1999 
2000     // Create VMA allocator
2001     ANGLE_VK_TRY(displayVk,
2002                  mAllocator.init(mPhysicalDevice, mDevice, mInstance, mApplicationInfo.apiVersion,
2003                                  mPreferredLargeHeapBlockSize));
2004 
2005     // Figure out the alignment for default buffer allocations
2006     VkBufferCreateInfo createInfo    = {};
2007     createInfo.sType                 = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
2008     createInfo.flags                 = 0;
2009     createInfo.size                  = 4096;
2010     createInfo.usage                 = GetDefaultBufferUsageFlags(this);
2011     createInfo.sharingMode           = VK_SHARING_MODE_EXCLUSIVE;
2012     createInfo.queueFamilyIndexCount = 0;
2013     createInfo.pQueueFamilyIndices   = nullptr;
2014 
2015     vk::DeviceScoped<vk::Buffer> tempBuffer(mDevice);
2016     tempBuffer.get().init(mDevice, createInfo);
2017 
2018     VkMemoryRequirements defaultBufferMemoryRequirements;
2019     tempBuffer.get().getMemoryRequirements(mDevice, &defaultBufferMemoryRequirements);
2020     ASSERT(gl::isPow2(defaultBufferMemoryRequirements.alignment));
2021 
2022     const VkPhysicalDeviceLimits &limitsVk = getPhysicalDeviceProperties().limits;
2023     ASSERT(gl::isPow2(limitsVk.minUniformBufferOffsetAlignment));
2024     ASSERT(gl::isPow2(limitsVk.minStorageBufferOffsetAlignment));
2025     ASSERT(gl::isPow2(limitsVk.minTexelBufferOffsetAlignment));
2026     ASSERT(gl::isPow2(limitsVk.minMemoryMapAlignment));
2027 
2028     mDefaultBufferAlignment =
2029         std::max({static_cast<size_t>(limitsVk.minUniformBufferOffsetAlignment),
2030                   static_cast<size_t>(limitsVk.minStorageBufferOffsetAlignment),
2031                   static_cast<size_t>(limitsVk.minTexelBufferOffsetAlignment),
2032                   static_cast<size_t>(limitsVk.minMemoryMapAlignment),
2033                   static_cast<size_t>(defaultBufferMemoryRequirements.alignment)});
2034 
2035     // Initialize staging buffer memory type index and alignment.
2036     // These buffers will only be used as transfer sources or transfer targets.
2037     createInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2038     VkMemoryPropertyFlags requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
2039     bool persistentlyMapped             = mFeatures.persistentlyMappedBuffers.enabled;
2040 
2041     // Uncached coherent staging buffer
2042     VkMemoryPropertyFlags preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2043     ANGLE_VK_TRY(displayVk, mAllocator.findMemoryTypeIndexForBufferInfo(
2044                                 createInfo, requiredFlags, preferredFlags, persistentlyMapped,
2045                                 &mCoherentStagingBufferMemoryTypeIndex));
2046     ASSERT(mCoherentStagingBufferMemoryTypeIndex != kInvalidMemoryTypeIndex);
2047 
2048     // Cached (b/219974369) Non-coherent staging buffer
2049     preferredFlags = VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
2050     ANGLE_VK_TRY(displayVk, mAllocator.findMemoryTypeIndexForBufferInfo(
2051                                 createInfo, requiredFlags, preferredFlags, persistentlyMapped,
2052                                 &mNonCoherentStagingBufferMemoryTypeIndex));
2053     ASSERT(mNonCoherentStagingBufferMemoryTypeIndex != kInvalidMemoryTypeIndex);
2054 
2055     // Alignment
2056     mStagingBufferAlignment =
2057         static_cast<size_t>(mPhysicalDeviceProperties.limits.minMemoryMapAlignment);
2058     ASSERT(gl::isPow2(mPhysicalDeviceProperties.limits.nonCoherentAtomSize));
2059     ASSERT(gl::isPow2(mPhysicalDeviceProperties.limits.optimalBufferCopyOffsetAlignment));
2060     // Usually minTexelBufferOffsetAlignment is much smaller than  nonCoherentAtomSize
2061     ASSERT(gl::isPow2(mPhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment));
2062     mStagingBufferAlignment = std::max(
2063         {mStagingBufferAlignment,
2064          static_cast<size_t>(mPhysicalDeviceProperties.limits.optimalBufferCopyOffsetAlignment),
2065          static_cast<size_t>(mPhysicalDeviceProperties.limits.nonCoherentAtomSize),
2066          static_cast<size_t>(mPhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment)});
2067     ASSERT(gl::isPow2(mStagingBufferAlignment));
2068 
2069     // Device local vertex conversion buffer
2070     createInfo.usage = vk::kVertexBufferUsageFlags;
2071     requiredFlags    = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2072     preferredFlags   = 0;
2073     ANGLE_VK_TRY(displayVk, mAllocator.findMemoryTypeIndexForBufferInfo(
2074                                 createInfo, requiredFlags, preferredFlags, persistentlyMapped,
2075                                 &mDeviceLocalVertexConversionBufferMemoryTypeIndex));
2076     ASSERT(mDeviceLocalVertexConversionBufferMemoryTypeIndex != kInvalidMemoryTypeIndex);
2077 
2078     // Host visible and non-coherent vertex conversion buffer, which is the same as non-coherent
2079     // staging buffer
2080     mHostVisibleVertexConversionBufferMemoryTypeIndex = mNonCoherentStagingBufferMemoryTypeIndex;
2081 
2082     // We may use compute shader to do conversion, so we must meet
2083     // minStorageBufferOffsetAlignment requirement as well. Also take into account non-coherent
2084     // alignment requirements.
2085     mVertexConversionBufferAlignment = std::max(
2086         {vk::kVertexBufferAlignment,
2087          static_cast<size_t>(mPhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment),
2088          static_cast<size_t>(mPhysicalDeviceProperties.limits.nonCoherentAtomSize),
2089          static_cast<size_t>(defaultBufferMemoryRequirements.alignment)});
2090     ASSERT(gl::isPow2(mVertexConversionBufferAlignment));
2091 
2092     return angle::Result::Continue;
2093 }
2094 
2095 // The following features and properties are not promoted to any core Vulkan versions (up to Vulkan
2096 // 1.3):
2097 //
2098 // - VK_EXT_line_rasterization:                        bresenhamLines (feature)
2099 // - VK_EXT_provoking_vertex:                          provokingVertexLast (feature)
2100 // - VK_EXT_vertex_attribute_divisor:                  vertexAttributeInstanceRateDivisor (feature),
2101 //                                                     maxVertexAttribDivisor (property)
2102 // - VK_EXT_transform_feedback:                        transformFeedback (feature),
2103 //                                                     geometryStreams (feature)
2104 // - VK_EXT_index_type_uint8:                          indexTypeUint8 (feature)
2105 // - VK_EXT_device_memory_report:                      deviceMemoryReport (feature)
2106 // - VK_EXT_multisampled_render_to_single_sampled or
2107 //   VK_GOOGLEX_multisampled_render_to_single_sampled: multisampledRenderToSingleSampled (feature)
2108 // - VK_EXT_image_2d_view_of_3d:                       image2DViewOf3D (feature)
2109 //                                                     sampler2DViewOf3D (feature)
2110 // - VK_EXT_custom_border_color:                       customBorderColors (feature)
2111 //                                                     customBorderColorWithoutFormat (feature)
2112 // - VK_EXT_depth_clamp_zero_one:                      depthClampZeroOne (feature)
2113 // - VK_EXT_depth_clip_enable:                         depthClipEnable (feature)
2114 // - VK_EXT_depth_clip_control:                        depthClipControl (feature)
2115 // - VK_EXT_primitives_generated_query:                primitivesGeneratedQuery (feature),
2116 //                                                     primitivesGeneratedQueryWithRasterizerDiscard
2117 //                                                                                        (property)
2118 // - VK_EXT_primitive_topology_list_restart:           primitiveTopologyListRestart (feature)
2119 // - VK_EXT_graphics_pipeline_library:                 graphicsPipelineLibrary (feature),
2120 //                                                     graphicsPipelineLibraryFastLinking (property)
2121 // - VK_KHR_fragment_shading_rate:                     pipelineFragmentShadingRate (feature)
2122 // - VK_EXT_fragment_shader_interlock:                 fragmentShaderPixelInterlock (feature)
2123 // - VK_EXT_pipeline_robustness:                       pipelineRobustness (feature)
2124 // - VK_EXT_pipeline_protected_access:                 pipelineProtectedAccess (feature)
2125 // - VK_EXT_rasterization_order_attachment_access or
2126 //   VK_ARM_rasterization_order_attachment_access:     rasterizationOrderColorAttachmentAccess
2127 //                                                                                   (feature)
2128 // - VK_EXT_swapchain_maintenance1:                    swapchainMaintenance1 (feature)
2129 // - VK_EXT_legacy_dithering:                          supportsLegacyDithering (feature)
2130 // - VK_EXT_physical_device_drm:                       hasPrimary (property),
2131 //                                                     hasRender (property)
2132 //
appendDeviceExtensionFeaturesNotPromoted(const vk::ExtensionNameList & deviceExtensionNames,VkPhysicalDeviceFeatures2KHR * deviceFeatures,VkPhysicalDeviceProperties2 * deviceProperties)2133 void RendererVk::appendDeviceExtensionFeaturesNotPromoted(
2134     const vk::ExtensionNameList &deviceExtensionNames,
2135     VkPhysicalDeviceFeatures2KHR *deviceFeatures,
2136     VkPhysicalDeviceProperties2 *deviceProperties)
2137 {
2138     if (ExtensionFound(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME, deviceExtensionNames))
2139     {
2140         vk::AddToPNextChain(deviceFeatures, &mLineRasterizationFeatures);
2141     }
2142 
2143     if (ExtensionFound(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, deviceExtensionNames))
2144     {
2145         vk::AddToPNextChain(deviceFeatures, &mProvokingVertexFeatures);
2146     }
2147 
2148     if (ExtensionFound(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME, deviceExtensionNames))
2149     {
2150         vk::AddToPNextChain(deviceFeatures, &mVertexAttributeDivisorFeatures);
2151         vk::AddToPNextChain(deviceProperties, &mVertexAttributeDivisorProperties);
2152     }
2153 
2154     if (ExtensionFound(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME, deviceExtensionNames))
2155     {
2156         vk::AddToPNextChain(deviceFeatures, &mTransformFeedbackFeatures);
2157     }
2158 
2159     if (ExtensionFound(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME, deviceExtensionNames))
2160     {
2161         vk::AddToPNextChain(deviceFeatures, &mIndexTypeUint8Features);
2162     }
2163 
2164     if (ExtensionFound(VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME, deviceExtensionNames))
2165     {
2166         vk::AddToPNextChain(deviceFeatures, &mMemoryReportFeatures);
2167     }
2168 
2169     if (ExtensionFound(VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME,
2170                        deviceExtensionNames))
2171     {
2172         vk::AddToPNextChain(deviceFeatures, &mMultisampledRenderToSingleSampledFeatures);
2173     }
2174     else if (ExtensionFound(VK_GOOGLEX_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME,
2175                             deviceExtensionNames))
2176     {
2177         vk::AddToPNextChain(deviceFeatures, &mMultisampledRenderToSingleSampledFeaturesGOOGLEX);
2178     }
2179 
2180     if (ExtensionFound(VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME, deviceExtensionNames))
2181     {
2182         vk::AddToPNextChain(deviceFeatures, &mImage2dViewOf3dFeatures);
2183     }
2184 
2185     if (ExtensionFound(VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME, deviceExtensionNames))
2186     {
2187         vk::AddToPNextChain(deviceFeatures, &mCustomBorderColorFeatures);
2188     }
2189 
2190     if (ExtensionFound(VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME, deviceExtensionNames))
2191     {
2192         vk::AddToPNextChain(deviceFeatures, &mDepthClampZeroOneFeatures);
2193     }
2194 
2195     if (ExtensionFound(VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME, deviceExtensionNames))
2196     {
2197         vk::AddToPNextChain(deviceFeatures, &mDepthClipEnableFeatures);
2198     }
2199 
2200     if (ExtensionFound(VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME, deviceExtensionNames))
2201     {
2202         vk::AddToPNextChain(deviceFeatures, &mDepthClipControlFeatures);
2203     }
2204 
2205     if (ExtensionFound(VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME, deviceExtensionNames))
2206     {
2207         vk::AddToPNextChain(deviceFeatures, &mPrimitivesGeneratedQueryFeatures);
2208     }
2209 
2210     if (ExtensionFound(VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME, deviceExtensionNames))
2211     {
2212         vk::AddToPNextChain(deviceFeatures, &mPrimitiveTopologyListRestartFeatures);
2213     }
2214 
2215     if (ExtensionFound(VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME, deviceExtensionNames))
2216     {
2217         vk::AddToPNextChain(deviceFeatures, &mGraphicsPipelineLibraryFeatures);
2218         vk::AddToPNextChain(deviceProperties, &mGraphicsPipelineLibraryProperties);
2219     }
2220 
2221     if (ExtensionFound(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME, deviceExtensionNames))
2222     {
2223         vk::AddToPNextChain(deviceFeatures, &mFragmentShadingRateFeatures);
2224     }
2225 
2226     if (ExtensionFound(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME, deviceExtensionNames))
2227     {
2228         vk::AddToPNextChain(deviceFeatures, &mFragmentShaderInterlockFeatures);
2229     }
2230 
2231     if (ExtensionFound(VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME, deviceExtensionNames))
2232     {
2233         vk::AddToPNextChain(deviceFeatures, &mPipelineRobustnessFeatures);
2234     }
2235 
2236     if (ExtensionFound(VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME, deviceExtensionNames))
2237     {
2238         vk::AddToPNextChain(deviceFeatures, &mPipelineProtectedAccessFeatures);
2239     }
2240 
2241     // The EXT and ARM versions are interchangeable. The structs and enums alias each other.
2242     if (ExtensionFound(VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME,
2243                        deviceExtensionNames))
2244     {
2245         vk::AddToPNextChain(deviceFeatures, &mRasterizationOrderAttachmentAccessFeatures);
2246     }
2247     else if (ExtensionFound(VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME,
2248                             deviceExtensionNames))
2249     {
2250         vk::AddToPNextChain(deviceFeatures, &mRasterizationOrderAttachmentAccessFeatures);
2251     }
2252 
2253     if (ExtensionFound(VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME, deviceExtensionNames))
2254     {
2255         vk::AddToPNextChain(deviceFeatures, &mSwapchainMaintenance1Features);
2256     }
2257 
2258     if (ExtensionFound(VK_EXT_LEGACY_DITHERING_EXTENSION_NAME, deviceExtensionNames))
2259     {
2260         vk::AddToPNextChain(deviceFeatures, &mDitheringFeatures);
2261     }
2262 
2263     if (ExtensionFound(VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME, deviceExtensionNames))
2264     {
2265         vk::AddToPNextChain(deviceProperties, &mDrmProperties);
2266     }
2267 }
2268 
2269 // The following features and properties used by ANGLE have been promoted to Vulkan 1.1:
2270 //
2271 // - (unpublished VK_KHR_subgroup):         supportedStages (property),
2272 //                                          supportedOperations (property)
2273 // - (unpublished VK_KHR_protected_memory): protectedMemory (feature)
2274 // - VK_KHR_sampler_ycbcr_conversion:       samplerYcbcrConversion (feature)
2275 // - VK_KHR_multiview:                      multiview (feature),
2276 //                                          maxMultiviewViewCount (property)
2277 //
2278 //
2279 // Note that subgroup and protected memory features and properties came from unpublished extensions
2280 // and are core in Vulkan 1.1.
2281 //
appendDeviceExtensionFeaturesPromotedTo11(const vk::ExtensionNameList & deviceExtensionNames,VkPhysicalDeviceFeatures2KHR * deviceFeatures,VkPhysicalDeviceProperties2 * deviceProperties)2282 void RendererVk::appendDeviceExtensionFeaturesPromotedTo11(
2283     const vk::ExtensionNameList &deviceExtensionNames,
2284     VkPhysicalDeviceFeatures2KHR *deviceFeatures,
2285     VkPhysicalDeviceProperties2 *deviceProperties)
2286 {
2287     if (isVulkan11Device())
2288     {
2289         vk::AddToPNextChain(deviceProperties, &mSubgroupProperties);
2290         vk::AddToPNextChain(deviceFeatures, &mProtectedMemoryFeatures);
2291     }
2292 
2293     if (ExtensionFound(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, deviceExtensionNames))
2294     {
2295         vk::AddToPNextChain(deviceFeatures, &mSamplerYcbcrConversionFeatures);
2296     }
2297 
2298     if (ExtensionFound(VK_KHR_MULTIVIEW_EXTENSION_NAME, deviceExtensionNames))
2299     {
2300         vk::AddToPNextChain(deviceFeatures, &mMultiviewFeatures);
2301         vk::AddToPNextChain(deviceProperties, &mMultiviewProperties);
2302     }
2303 }
2304 
2305 // The following features and properties used by ANGLE have been promoted to Vulkan 1.2:
2306 //
2307 // - VK_KHR_shader_float16_int8:            shaderFloat16 (feature)
2308 // - VK_KHR_depth_stencil_resolve:          supportedDepthResolveModes (property),
2309 //                                          independentResolveNone (property)
2310 // - VK_KHR_driver_properties:              driverName (property),
2311 //                                          driverID (property)
2312 // - VK_KHR_shader_subgroup_extended_types: shaderSubgroupExtendedTypes (feature)
2313 // - VK_EXT_host_query_reset:               hostQueryReset (feature)
2314 // - VK_KHR_imageless_framebuffer:          imagelessFramebuffer (feature)
2315 // - VK_KHR_timeline_semaphore:             timelineSemaphore (feature)
2316 //
2317 // Note that supportedDepthResolveModes is used just to check if the property struct is populated.
2318 // ANGLE always uses VK_RESOLVE_MODE_SAMPLE_ZERO_BIT for both depth and stencil, and support for
2319 // this bit is mandatory as long as the extension (or Vulkan 1.2) exists.
2320 //
appendDeviceExtensionFeaturesPromotedTo12(const vk::ExtensionNameList & deviceExtensionNames,VkPhysicalDeviceFeatures2KHR * deviceFeatures,VkPhysicalDeviceProperties2 * deviceProperties)2321 void RendererVk::appendDeviceExtensionFeaturesPromotedTo12(
2322     const vk::ExtensionNameList &deviceExtensionNames,
2323     VkPhysicalDeviceFeatures2KHR *deviceFeatures,
2324     VkPhysicalDeviceProperties2 *deviceProperties)
2325 {
2326     if (ExtensionFound(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, deviceExtensionNames))
2327     {
2328         vk::AddToPNextChain(deviceFeatures, &mShaderFloat16Int8Features);
2329     }
2330 
2331     if (ExtensionFound(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, deviceExtensionNames))
2332     {
2333         vk::AddToPNextChain(deviceProperties, &mDepthStencilResolveProperties);
2334     }
2335 
2336     if (ExtensionFound(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, deviceExtensionNames))
2337     {
2338         vk::AddToPNextChain(deviceProperties, &mDriverProperties);
2339     }
2340 
2341     if (ExtensionFound(VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME, deviceExtensionNames))
2342     {
2343         vk::AddToPNextChain(deviceFeatures, &mSubgroupExtendedTypesFeatures);
2344     }
2345 
2346     if (ExtensionFound(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME, deviceExtensionNames))
2347     {
2348         vk::AddToPNextChain(deviceFeatures, &mHostQueryResetFeatures);
2349     }
2350 
2351     if (ExtensionFound(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME, deviceExtensionNames))
2352     {
2353         vk::AddToPNextChain(deviceFeatures, &mImagelessFramebufferFeatures);
2354     }
2355 
2356     if (ExtensionFound(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME, deviceExtensionNames))
2357     {
2358         vk::AddToPNextChain(deviceFeatures, &mTimelineSemaphoreFeatures);
2359     }
2360 }
2361 
2362 // The following features and properties used by ANGLE have been promoted to Vulkan 1.3:
2363 //
2364 // - VK_EXT_pipeline_creation_cache_control: pipelineCreationCacheControl (feature)
2365 // - VK_EXT_extended_dynamic_state:          extendedDynamicState (feature)
2366 // - VK_EXT_extended_dynamic_state2:         extendedDynamicState2 (feature),
2367 //                                           extendedDynamicState2LogicOp (feature)
2368 //
2369 // Note that VK_EXT_extended_dynamic_state2 is partially promoted to Vulkan 1.3.  If ANGLE creates a
2370 // Vulkan 1.3 device, it would still need to enable this extension separately for
2371 // extendedDynamicState2LogicOp.
2372 //
appendDeviceExtensionFeaturesPromotedTo13(const vk::ExtensionNameList & deviceExtensionNames,VkPhysicalDeviceFeatures2KHR * deviceFeatures,VkPhysicalDeviceProperties2 * deviceProperties)2373 void RendererVk::appendDeviceExtensionFeaturesPromotedTo13(
2374     const vk::ExtensionNameList &deviceExtensionNames,
2375     VkPhysicalDeviceFeatures2KHR *deviceFeatures,
2376     VkPhysicalDeviceProperties2 *deviceProperties)
2377 {
2378     if (ExtensionFound(VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME, deviceExtensionNames))
2379     {
2380         vk::AddToPNextChain(deviceFeatures, &mPipelineCreationCacheControlFeatures);
2381     }
2382 
2383     if (ExtensionFound(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME, deviceExtensionNames))
2384     {
2385         vk::AddToPNextChain(deviceFeatures, &mExtendedDynamicStateFeatures);
2386     }
2387 
2388     if (ExtensionFound(VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME, deviceExtensionNames))
2389     {
2390         vk::AddToPNextChain(deviceFeatures, &mExtendedDynamicState2Features);
2391     }
2392 }
2393 
queryDeviceExtensionFeatures(const vk::ExtensionNameList & deviceExtensionNames)2394 void RendererVk::queryDeviceExtensionFeatures(const vk::ExtensionNameList &deviceExtensionNames)
2395 {
2396     // Default initialize all extension features to false.
2397     mPhysicalDevice11Properties       = {};
2398     mPhysicalDevice11Properties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES;
2399 
2400     mPhysicalDevice11Features       = {};
2401     mPhysicalDevice11Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
2402 
2403     mLineRasterizationFeatures = {};
2404     mLineRasterizationFeatures.sType =
2405         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT;
2406 
2407     mProvokingVertexFeatures = {};
2408     mProvokingVertexFeatures.sType =
2409         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT;
2410 
2411     mVertexAttributeDivisorFeatures = {};
2412     mVertexAttributeDivisorFeatures.sType =
2413         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
2414 
2415     mVertexAttributeDivisorProperties = {};
2416     mVertexAttributeDivisorProperties.sType =
2417         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
2418 
2419     mTransformFeedbackFeatures = {};
2420     mTransformFeedbackFeatures.sType =
2421         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
2422 
2423     mIndexTypeUint8Features       = {};
2424     mIndexTypeUint8Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT;
2425 
2426     mSubgroupProperties       = {};
2427     mSubgroupProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
2428 
2429     mSubgroupExtendedTypesFeatures = {};
2430     mSubgroupExtendedTypesFeatures.sType =
2431         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES;
2432 
2433     mMemoryReportFeatures = {};
2434     mMemoryReportFeatures.sType =
2435         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT;
2436 
2437     mShaderFloat16Int8Features = {};
2438     mShaderFloat16Int8Features.sType =
2439         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES;
2440 
2441     mDepthStencilResolveProperties = {};
2442     mDepthStencilResolveProperties.sType =
2443         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES;
2444 
2445     mCustomBorderColorFeatures = {};
2446     mCustomBorderColorFeatures.sType =
2447         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT;
2448 
2449     mMultisampledRenderToSingleSampledFeatures = {};
2450     mMultisampledRenderToSingleSampledFeatures.sType =
2451         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT;
2452 
2453     mMultisampledRenderToSingleSampledFeaturesGOOGLEX = {};
2454     mMultisampledRenderToSingleSampledFeaturesGOOGLEX.sType =
2455         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_GOOGLEX;
2456 
2457     mImage2dViewOf3dFeatures = {};
2458     mImage2dViewOf3dFeatures.sType =
2459         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT;
2460 
2461     mMultiviewFeatures       = {};
2462     mMultiviewFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
2463 
2464     mMultiviewProperties       = {};
2465     mMultiviewProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
2466 
2467     mDriverProperties       = {};
2468     mDriverProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
2469 
2470     mSamplerYcbcrConversionFeatures = {};
2471     mSamplerYcbcrConversionFeatures.sType =
2472         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
2473 
2474     mProtectedMemoryFeatures       = {};
2475     mProtectedMemoryFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES;
2476 
2477     mHostQueryResetFeatures       = {};
2478     mHostQueryResetFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT;
2479 
2480     mDepthClampZeroOneFeatures = {};
2481     mDepthClampZeroOneFeatures.sType =
2482         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT;
2483 
2484     mDepthClipEnableFeatures = {};
2485     mDepthClipEnableFeatures.sType =
2486         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT;
2487 
2488     mDepthClipControlFeatures = {};
2489     mDepthClipControlFeatures.sType =
2490         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT;
2491 
2492     mPrimitivesGeneratedQueryFeatures = {};
2493     mPrimitivesGeneratedQueryFeatures.sType =
2494         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT;
2495 
2496     mPrimitiveTopologyListRestartFeatures = {};
2497     mPrimitiveTopologyListRestartFeatures.sType =
2498         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT;
2499 
2500     mPipelineCreationCacheControlFeatures = {};
2501     mPipelineCreationCacheControlFeatures.sType =
2502         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT;
2503 
2504     mExtendedDynamicStateFeatures = {};
2505     mExtendedDynamicStateFeatures.sType =
2506         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT;
2507 
2508     mExtendedDynamicState2Features = {};
2509     mExtendedDynamicState2Features.sType =
2510         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT;
2511 
2512     mGraphicsPipelineLibraryFeatures = {};
2513     mGraphicsPipelineLibraryFeatures.sType =
2514         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT;
2515 
2516     mGraphicsPipelineLibraryProperties = {};
2517     mGraphicsPipelineLibraryProperties.sType =
2518         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT;
2519 
2520     mFragmentShadingRateFeatures = {};
2521     mFragmentShadingRateFeatures.sType =
2522         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR;
2523 
2524     mFragmentShaderInterlockFeatures = {};
2525     mFragmentShaderInterlockFeatures.sType =
2526         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT;
2527 
2528     mImagelessFramebufferFeatures = {};
2529     mImagelessFramebufferFeatures.sType =
2530         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR;
2531 
2532     mPipelineRobustnessFeatures = {};
2533     mPipelineRobustnessFeatures.sType =
2534         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT;
2535 
2536     mPipelineProtectedAccessFeatures = {};
2537     mPipelineProtectedAccessFeatures.sType =
2538         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT;
2539 
2540     mRasterizationOrderAttachmentAccessFeatures = {};
2541     mRasterizationOrderAttachmentAccessFeatures.sType =
2542         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT;
2543 
2544     mSwapchainMaintenance1Features = {};
2545     mSwapchainMaintenance1Features.sType =
2546         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT;
2547 
2548     mDitheringFeatures       = {};
2549     mDitheringFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT;
2550 
2551     mDrmProperties       = {};
2552     mDrmProperties.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT;
2553 
2554     mTimelineSemaphoreFeatures = {};
2555     mTimelineSemaphoreFeatures.sType =
2556         VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR;
2557 
2558     if (!vkGetPhysicalDeviceProperties2KHR || !vkGetPhysicalDeviceFeatures2KHR)
2559     {
2560         return;
2561     }
2562 
2563     // Query features and properties.
2564     VkPhysicalDeviceFeatures2KHR deviceFeatures = {};
2565     deviceFeatures.sType                        = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
2566 
2567     VkPhysicalDeviceProperties2 deviceProperties = {};
2568     deviceProperties.sType                       = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
2569 
2570     appendDeviceExtensionFeaturesNotPromoted(deviceExtensionNames, &deviceFeatures,
2571                                              &deviceProperties);
2572     appendDeviceExtensionFeaturesPromotedTo11(deviceExtensionNames, &deviceFeatures,
2573                                               &deviceProperties);
2574     appendDeviceExtensionFeaturesPromotedTo12(deviceExtensionNames, &deviceFeatures,
2575                                               &deviceProperties);
2576     appendDeviceExtensionFeaturesPromotedTo13(deviceExtensionNames, &deviceFeatures,
2577                                               &deviceProperties);
2578 
2579     vkGetPhysicalDeviceFeatures2KHR(mPhysicalDevice, &deviceFeatures);
2580     vkGetPhysicalDeviceProperties2KHR(mPhysicalDevice, &deviceProperties);
2581 
2582     // Clean up pNext chains
2583     mPhysicalDevice11Properties.pNext                       = nullptr;
2584     mPhysicalDevice11Features.pNext                         = nullptr;
2585     mLineRasterizationFeatures.pNext                        = nullptr;
2586     mMemoryReportFeatures.pNext                             = nullptr;
2587     mProvokingVertexFeatures.pNext                          = nullptr;
2588     mVertexAttributeDivisorFeatures.pNext                   = nullptr;
2589     mVertexAttributeDivisorProperties.pNext                 = nullptr;
2590     mTransformFeedbackFeatures.pNext                        = nullptr;
2591     mIndexTypeUint8Features.pNext                           = nullptr;
2592     mSubgroupProperties.pNext                               = nullptr;
2593     mSubgroupExtendedTypesFeatures.pNext                    = nullptr;
2594     mCustomBorderColorFeatures.pNext                        = nullptr;
2595     mShaderFloat16Int8Features.pNext                        = nullptr;
2596     mDepthStencilResolveProperties.pNext                    = nullptr;
2597     mMultisampledRenderToSingleSampledFeatures.pNext        = nullptr;
2598     mMultisampledRenderToSingleSampledFeaturesGOOGLEX.pNext = nullptr;
2599     mImage2dViewOf3dFeatures.pNext                          = nullptr;
2600     mMultiviewFeatures.pNext                                = nullptr;
2601     mMultiviewProperties.pNext                              = nullptr;
2602     mDriverProperties.pNext                                 = nullptr;
2603     mSamplerYcbcrConversionFeatures.pNext                   = nullptr;
2604     mProtectedMemoryFeatures.pNext                          = nullptr;
2605     mHostQueryResetFeatures.pNext                           = nullptr;
2606     mDepthClampZeroOneFeatures.pNext                        = nullptr;
2607     mDepthClipEnableFeatures.pNext                          = nullptr;
2608     mDepthClipControlFeatures.pNext                         = nullptr;
2609     mPrimitivesGeneratedQueryFeatures.pNext                 = nullptr;
2610     mPrimitiveTopologyListRestartFeatures.pNext             = nullptr;
2611     mPipelineCreationCacheControlFeatures.pNext             = nullptr;
2612     mExtendedDynamicStateFeatures.pNext                     = nullptr;
2613     mExtendedDynamicState2Features.pNext                    = nullptr;
2614     mGraphicsPipelineLibraryFeatures.pNext                  = nullptr;
2615     mGraphicsPipelineLibraryProperties.pNext                = nullptr;
2616     mFragmentShadingRateFeatures.pNext                      = nullptr;
2617     mFragmentShaderInterlockFeatures.pNext                  = nullptr;
2618     mImagelessFramebufferFeatures.pNext                     = nullptr;
2619     mPipelineRobustnessFeatures.pNext                       = nullptr;
2620     mPipelineProtectedAccessFeatures.pNext                  = nullptr;
2621     mRasterizationOrderAttachmentAccessFeatures.pNext       = nullptr;
2622     mSwapchainMaintenance1Features.pNext                    = nullptr;
2623     mDitheringFeatures.pNext                                = nullptr;
2624     mDrmProperties.pNext                                    = nullptr;
2625     mTimelineSemaphoreFeatures.pNext                        = nullptr;
2626 }
2627 
2628 // See comment above appendDeviceExtensionFeaturesNotPromoted.  Additional extensions are enabled
2629 // here which don't have feature structs:
2630 //
2631 // - VK_KHR_shared_presentable_image
2632 // - VK_EXT_memory_budget
2633 // - VK_KHR_incremental_present
2634 // - VK_EXT_queue_family_foreign
2635 // - VK_ANDROID_external_memory_android_hardware_buffer
2636 // - VK_GGP_frame_token
2637 // - VK_KHR_external_memory_fd
2638 // - VK_KHR_external_memory_fuchsia
2639 // - VK_KHR_external_semaphore_fd
2640 // - VK_KHR_external_fence_fd
2641 // - VK_FUCHSIA_external_semaphore
2642 // - VK_EXT_shader_stencil_export
2643 // - VK_EXT_load_store_op_none
2644 // - VK_QCOM_render_pass_store_ops
2645 // - VK_GOOGLE_display_timing
2646 // - VK_EXT_external_memory_dma_buf
2647 // - VK_EXT_image_drm_format_modifier
2648 // - VK_EXT_blend_operation_advanced
2649 // - VK_EXT_full_screen_exclusive
2650 //
enableDeviceExtensionsNotPromoted(const vk::ExtensionNameList & deviceExtensionNames)2651 void RendererVk::enableDeviceExtensionsNotPromoted(
2652     const vk::ExtensionNameList &deviceExtensionNames)
2653 {
2654     if (mFeatures.supportsSharedPresentableImageExtension.enabled)
2655     {
2656         mEnabledDeviceExtensions.push_back(VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME);
2657     }
2658 
2659     if (mFeatures.supportsDepthClampZeroOne.enabled)
2660     {
2661         mEnabledDeviceExtensions.push_back(VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME);
2662         vk::AddToPNextChain(&mEnabledFeatures, &mDepthClampZeroOneFeatures);
2663     }
2664 
2665     if (mFeatures.supportsMemoryBudget.enabled)
2666     {
2667         mEnabledDeviceExtensions.push_back(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME);
2668     }
2669 
2670     if (mFeatures.supportsIncrementalPresent.enabled)
2671     {
2672         mEnabledDeviceExtensions.push_back(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME);
2673     }
2674 
2675 #if defined(ANGLE_PLATFORM_ANDROID)
2676     if (mFeatures.supportsAndroidHardwareBuffer.enabled)
2677     {
2678         mEnabledDeviceExtensions.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
2679         mEnabledDeviceExtensions.push_back(
2680             VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME);
2681     }
2682 #else
2683     ASSERT(!mFeatures.supportsAndroidHardwareBuffer.enabled);
2684 #endif
2685 
2686 #if defined(ANGLE_PLATFORM_GGP)
2687     if (mFeatures.supportsGGPFrameToken.enabled)
2688     {
2689         mEnabledDeviceExtensions.push_back(VK_GGP_FRAME_TOKEN_EXTENSION_NAME);
2690     }
2691 #else
2692     ASSERT(!mFeatures.supportsGGPFrameToken.enabled);
2693 #endif
2694 
2695     if (mFeatures.supportsExternalMemoryFd.enabled)
2696     {
2697         mEnabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
2698     }
2699 
2700     if (mFeatures.supportsExternalMemoryFuchsia.enabled)
2701     {
2702         mEnabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME);
2703     }
2704 
2705     if (mFeatures.supportsExternalSemaphoreFd.enabled)
2706     {
2707         mEnabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
2708     }
2709 
2710     if (mFeatures.supportsExternalFenceFd.enabled)
2711     {
2712         mEnabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME);
2713     }
2714 
2715     if (mFeatures.supportsExternalSemaphoreFuchsia.enabled)
2716     {
2717         mEnabledDeviceExtensions.push_back(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
2718     }
2719 
2720     if (mFeatures.supportsShaderStencilExport.enabled)
2721     {
2722         mEnabledDeviceExtensions.push_back(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME);
2723     }
2724 
2725     if (mFeatures.supportsRenderPassLoadStoreOpNone.enabled)
2726     {
2727         mEnabledDeviceExtensions.push_back(VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME);
2728     }
2729     else if (mFeatures.supportsRenderPassStoreOpNone.enabled)
2730     {
2731         mEnabledDeviceExtensions.push_back(VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME);
2732     }
2733 
2734     if (mFeatures.supportsTimestampSurfaceAttribute.enabled)
2735     {
2736         mEnabledDeviceExtensions.push_back(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME);
2737     }
2738 
2739     if (mFeatures.bresenhamLineRasterization.enabled)
2740     {
2741         mEnabledDeviceExtensions.push_back(VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME);
2742         vk::AddToPNextChain(&mEnabledFeatures, &mLineRasterizationFeatures);
2743     }
2744 
2745     if (mFeatures.provokingVertex.enabled)
2746     {
2747         mEnabledDeviceExtensions.push_back(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME);
2748         vk::AddToPNextChain(&mEnabledFeatures, &mProvokingVertexFeatures);
2749     }
2750 
2751     if (mVertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor)
2752     {
2753         mEnabledDeviceExtensions.push_back(VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME);
2754         vk::AddToPNextChain(&mEnabledFeatures, &mVertexAttributeDivisorFeatures);
2755 
2756         // We only store 8 bit divisor in GraphicsPipelineDesc so capping value & we emulate if
2757         // exceeded
2758         mMaxVertexAttribDivisor =
2759             std::min(mVertexAttributeDivisorProperties.maxVertexAttribDivisor,
2760                      static_cast<uint32_t>(std::numeric_limits<uint8_t>::max()));
2761     }
2762 
2763     if (mFeatures.supportsTransformFeedbackExtension.enabled)
2764     {
2765         mEnabledDeviceExtensions.push_back(VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME);
2766         vk::AddToPNextChain(&mEnabledFeatures, &mTransformFeedbackFeatures);
2767     }
2768 
2769     if (mFeatures.supportsCustomBorderColor.enabled)
2770     {
2771         mEnabledDeviceExtensions.push_back(VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME);
2772         vk::AddToPNextChain(&mEnabledFeatures, &mCustomBorderColorFeatures);
2773     }
2774 
2775     if (mFeatures.supportsIndexTypeUint8.enabled)
2776     {
2777         mEnabledDeviceExtensions.push_back(VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME);
2778         vk::AddToPNextChain(&mEnabledFeatures, &mIndexTypeUint8Features);
2779     }
2780 
2781     if (mFeatures.supportsMultisampledRenderToSingleSampled.enabled)
2782     {
2783         mEnabledDeviceExtensions.push_back(
2784             VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME);
2785         vk::AddToPNextChain(&mEnabledFeatures, &mMultisampledRenderToSingleSampledFeatures);
2786     }
2787 
2788     if (mFeatures.supportsMultisampledRenderToSingleSampledGOOGLEX.enabled)
2789     {
2790         ASSERT(!mFeatures.supportsMultisampledRenderToSingleSampled.enabled);
2791         mEnabledDeviceExtensions.push_back(
2792             VK_GOOGLEX_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME);
2793         vk::AddToPNextChain(&mEnabledFeatures, &mMultisampledRenderToSingleSampledFeaturesGOOGLEX);
2794     }
2795 
2796     if (mFeatures.logMemoryReportCallbacks.enabled || mFeatures.logMemoryReportStats.enabled)
2797     {
2798         ASSERT(mMemoryReportFeatures.deviceMemoryReport);
2799         mEnabledDeviceExtensions.push_back(VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME);
2800     }
2801 
2802     if (mFeatures.supportsExternalMemoryDmaBufAndModifiers.enabled)
2803     {
2804         mEnabledDeviceExtensions.push_back(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME);
2805         mEnabledDeviceExtensions.push_back(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME);
2806     }
2807 
2808     if (mFeatures.supportsDepthClipControl.enabled)
2809     {
2810         mEnabledDeviceExtensions.push_back(VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME);
2811         vk::AddToPNextChain(&mEnabledFeatures, &mDepthClipControlFeatures);
2812     }
2813 
2814     if (mFeatures.supportsPrimitivesGeneratedQuery.enabled)
2815     {
2816         mEnabledDeviceExtensions.push_back(VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME);
2817         vk::AddToPNextChain(&mEnabledFeatures, &mPrimitivesGeneratedQueryFeatures);
2818     }
2819 
2820     if (mFeatures.supportsPrimitiveTopologyListRestart.enabled)
2821     {
2822         mEnabledDeviceExtensions.push_back(VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME);
2823         vk::AddToPNextChain(&mEnabledFeatures, &mPrimitiveTopologyListRestartFeatures);
2824     }
2825 
2826     if (mFeatures.supportsBlendOperationAdvanced.enabled)
2827     {
2828         mEnabledDeviceExtensions.push_back(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME);
2829     }
2830 
2831     if (mFeatures.supportsGraphicsPipelineLibrary.enabled)
2832     {
2833         // VK_EXT_graphics_pipeline_library requires VK_KHR_pipeline_library
2834         ASSERT(ExtensionFound(VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME, deviceExtensionNames));
2835         mEnabledDeviceExtensions.push_back(VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME);
2836 
2837         mEnabledDeviceExtensions.push_back(VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME);
2838         vk::AddToPNextChain(&mEnabledFeatures, &mGraphicsPipelineLibraryFeatures);
2839     }
2840 
2841     if (mFeatures.supportsFragmentShadingRate.enabled)
2842     {
2843         mEnabledDeviceExtensions.push_back(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME);
2844         vk::AddToPNextChain(&mEnabledFeatures, &mFragmentShadingRateFeatures);
2845     }
2846 
2847     if (mFeatures.supportsFragmentShaderPixelInterlock.enabled)
2848     {
2849         mEnabledDeviceExtensions.push_back(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME);
2850         vk::AddToPNextChain(&mEnabledFeatures, &mFragmentShaderInterlockFeatures);
2851     }
2852 
2853     if (mFeatures.supportsPipelineRobustness.enabled)
2854     {
2855         mEnabledDeviceExtensions.push_back(VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME);
2856         vk::AddToPNextChain(&mEnabledFeatures, &mPipelineRobustnessFeatures);
2857     }
2858 
2859     if (mFeatures.supportsPipelineProtectedAccess.enabled)
2860     {
2861         mEnabledDeviceExtensions.push_back(VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME);
2862         vk::AddToPNextChain(&mEnabledFeatures, &mPipelineProtectedAccessFeatures);
2863     }
2864 
2865     if (mFeatures.supportsRasterizationOrderAttachmentAccess.enabled)
2866     {
2867         if (ExtensionFound(VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME,
2868                            deviceExtensionNames))
2869         {
2870             mEnabledDeviceExtensions.push_back(
2871                 VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME);
2872         }
2873         else
2874         {
2875             ASSERT(ExtensionFound(VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME,
2876                                   deviceExtensionNames));
2877             mEnabledDeviceExtensions.push_back(
2878                 VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME);
2879         }
2880         vk::AddToPNextChain(&mEnabledFeatures, &mRasterizationOrderAttachmentAccessFeatures);
2881     }
2882 
2883     if (mFeatures.supportsImage2dViewOf3d.enabled)
2884     {
2885         mEnabledDeviceExtensions.push_back(VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME);
2886         vk::AddToPNextChain(&mEnabledFeatures, &mImage2dViewOf3dFeatures);
2887     }
2888 
2889     if (mFeatures.supportsSwapchainMaintenance1.enabled)
2890     {
2891         mEnabledDeviceExtensions.push_back(VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME);
2892         vk::AddToPNextChain(&mEnabledFeatures, &mSwapchainMaintenance1Features);
2893     }
2894 
2895     if (mFeatures.supportsLegacyDithering.enabled)
2896     {
2897         mEnabledDeviceExtensions.push_back(VK_EXT_LEGACY_DITHERING_EXTENSION_NAME);
2898         vk::AddToPNextChain(&mEnabledFeatures, &mDitheringFeatures);
2899     }
2900 
2901 #if defined(ANGLE_PLATFORM_WINDOWS)
2902     // We only need the VK_EXT_full_screen_exclusive extension if we are opting
2903     // out of it via VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT (i.e. working
2904     // around driver bugs).
2905     if (getFeatures().supportsFullScreenExclusive.enabled &&
2906         getFeatures().forceDisableFullScreenExclusive.enabled)
2907     {
2908         mEnabledDeviceExtensions.push_back(VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME);
2909     }
2910 #endif
2911 }
2912 
2913 // See comment above appendDeviceExtensionFeaturesPromotedTo11.  Additional extensions are enabled
2914 // here which don't have feature structs:
2915 //
2916 // - VK_KHR_get_memory_requirements2
2917 // - VK_KHR_bind_memory2
2918 // - VK_KHR_maintenance1
2919 // - VK_KHR_external_memory
2920 // - VK_KHR_external_semaphore
2921 // - VK_KHR_external_fence
2922 //
enableDeviceExtensionsPromotedTo11(const vk::ExtensionNameList & deviceExtensionNames)2923 void RendererVk::enableDeviceExtensionsPromotedTo11(
2924     const vk::ExtensionNameList &deviceExtensionNames)
2925 {
2926     // OVR_multiview disallows multiview with geometry and tessellation, so don't request these
2927     // features.
2928     mMultiviewFeatures.multiviewGeometryShader            = VK_FALSE;
2929     mMultiviewFeatures.multiviewTessellationShader        = VK_FALSE;
2930     mPhysicalDevice11Features.multiviewGeometryShader     = VK_FALSE;
2931     mPhysicalDevice11Features.multiviewTessellationShader = VK_FALSE;
2932 
2933     // Disable protected memory if not needed as it can introduce overhead
2934     if (!mFeatures.supportsProtectedMemory.enabled)
2935     {
2936         mPhysicalDevice11Features.protectedMemory = VK_FALSE;
2937     }
2938 
2939     if (isVulkan11Device())
2940     {
2941         if (mFeatures.supportsMultiview.enabled)
2942         {
2943             vk::AddToPNextChain(&mEnabledFeatures, &mMultiviewFeatures);
2944         }
2945 
2946         if (mFeatures.supportsYUVSamplerConversion.enabled)
2947         {
2948             vk::AddToPNextChain(&mEnabledFeatures, &mSamplerYcbcrConversionFeatures);
2949         }
2950 
2951         if (mFeatures.supportsProtectedMemory.enabled)
2952         {
2953             vk::AddToPNextChain(&mEnabledFeatures, &mProtectedMemoryFeatures);
2954         }
2955 
2956         return;
2957     }
2958 
2959     if (mFeatures.supportsGetMemoryRequirements2.enabled)
2960     {
2961         mEnabledDeviceExtensions.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
2962     }
2963 
2964     if (ExtensionFound(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, deviceExtensionNames))
2965     {
2966         mEnabledDeviceExtensions.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
2967     }
2968 
2969     if (mFeatures.supportsBindMemory2.enabled)
2970     {
2971         mEnabledDeviceExtensions.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
2972     }
2973 
2974     if (mFeatures.supportsNegativeViewport.enabled)
2975     {
2976         mEnabledDeviceExtensions.push_back(VK_KHR_MAINTENANCE1_EXTENSION_NAME);
2977     }
2978 
2979     if (mFeatures.supportsAndroidHardwareBuffer.enabled ||
2980         mFeatures.supportsExternalMemoryFd.enabled ||
2981         mFeatures.supportsExternalMemoryFuchsia.enabled)
2982     {
2983         mEnabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
2984     }
2985 
2986     if (mFeatures.supportsExternalSemaphoreFd.enabled ||
2987         mFeatures.supportsExternalSemaphoreFuchsia.enabled)
2988     {
2989         mEnabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
2990     }
2991 
2992     if (mFeatures.supportsExternalFenceFd.enabled)
2993     {
2994         mEnabledDeviceExtensions.push_back(VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME);
2995     }
2996 
2997     if (mFeatures.supportsMultiview.enabled)
2998     {
2999         mEnabledDeviceExtensions.push_back(VK_KHR_MULTIVIEW_EXTENSION_NAME);
3000         vk::AddToPNextChain(&mEnabledFeatures, &mMultiviewFeatures);
3001     }
3002 
3003     if (mFeatures.supportsYUVSamplerConversion.enabled)
3004     {
3005         mEnabledDeviceExtensions.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
3006         vk::AddToPNextChain(&mEnabledFeatures, &mSamplerYcbcrConversionFeatures);
3007     }
3008 
3009     if (mFeatures.supportsProtectedMemory.enabled)
3010     {
3011         vk::AddToPNextChain(&mEnabledFeatures, &mProtectedMemoryFeatures);
3012     }
3013 }
3014 
3015 // See comment above appendDeviceExtensionFeaturesPromotedTo12.  Additional extensions are enabled
3016 // here which don't have feature structs:
3017 //
3018 // - VK_KHR_create_renderpass2
3019 // - VK_KHR_image_format_list
3020 // - VK_KHR_sampler_mirror_clamp_to_edge
3021 //
enableDeviceExtensionsPromotedTo12(const vk::ExtensionNameList & deviceExtensionNames)3022 void RendererVk::enableDeviceExtensionsPromotedTo12(
3023     const vk::ExtensionNameList &deviceExtensionNames)
3024 {
3025     if (mFeatures.supportsRenderpass2.enabled)
3026     {
3027         mEnabledDeviceExtensions.push_back(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME);
3028     }
3029 
3030     if (mFeatures.supportsImageFormatList.enabled)
3031     {
3032         mEnabledDeviceExtensions.push_back(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME);
3033     }
3034 
3035     if (mFeatures.supportsSamplerMirrorClampToEdge.enabled)
3036     {
3037         mEnabledDeviceExtensions.push_back(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME);
3038     }
3039 
3040     if (mFeatures.supportsDepthStencilResolve.enabled)
3041     {
3042         mEnabledDeviceExtensions.push_back(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME);
3043     }
3044 
3045     if (mFeatures.allowGenerateMipmapWithCompute.enabled)
3046     {
3047         mEnabledDeviceExtensions.push_back(VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME);
3048         vk::AddToPNextChain(&mEnabledFeatures, &mSubgroupExtendedTypesFeatures);
3049     }
3050 
3051     if (mFeatures.supportsShaderFloat16.enabled)
3052     {
3053         mEnabledDeviceExtensions.push_back(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME);
3054         vk::AddToPNextChain(&mEnabledFeatures, &mShaderFloat16Int8Features);
3055     }
3056 
3057     if (mFeatures.supportsHostQueryReset.enabled)
3058     {
3059         mEnabledDeviceExtensions.push_back(VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME);
3060         vk::AddToPNextChain(&mEnabledFeatures, &mHostQueryResetFeatures);
3061     }
3062 
3063     if (mFeatures.supportsImagelessFramebuffer.enabled)
3064     {
3065         mEnabledDeviceExtensions.push_back(VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME);
3066         vk::AddToPNextChain(&mEnabledFeatures, &mImagelessFramebufferFeatures);
3067     }
3068 
3069     if (mFeatures.supportsTimelineSemaphore.enabled)
3070     {
3071         mEnabledDeviceExtensions.push_back(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME);
3072         vk::AddToPNextChain(&mEnabledFeatures, &mTimelineSemaphoreFeatures);
3073     }
3074 }
3075 
3076 // See comment above appendDeviceExtensionFeaturesPromotedTo13.
enableDeviceExtensionsPromotedTo13(const vk::ExtensionNameList & deviceExtensionNames)3077 void RendererVk::enableDeviceExtensionsPromotedTo13(
3078     const vk::ExtensionNameList &deviceExtensionNames)
3079 {
3080     if (mFeatures.supportsPipelineCreationCacheControl.enabled)
3081     {
3082         mEnabledDeviceExtensions.push_back(VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME);
3083         vk::AddToPNextChain(&mEnabledFeatures, &mPipelineCreationCacheControlFeatures);
3084     }
3085 
3086     if (mFeatures.supportsPipelineCreationFeedback.enabled)
3087     {
3088         mEnabledDeviceExtensions.push_back(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME);
3089     }
3090 
3091     if (mFeatures.supportsExtendedDynamicState.enabled)
3092     {
3093         mEnabledDeviceExtensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
3094         vk::AddToPNextChain(&mEnabledFeatures, &mExtendedDynamicStateFeatures);
3095     }
3096 
3097     if (mFeatures.supportsExtendedDynamicState2.enabled)
3098     {
3099         mEnabledDeviceExtensions.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME);
3100         vk::AddToPNextChain(&mEnabledFeatures, &mExtendedDynamicState2Features);
3101     }
3102 }
3103 
enableDeviceExtensions(DisplayVk * displayVk,const VulkanLayerVector & enabledDeviceLayerNames)3104 angle::Result RendererVk::enableDeviceExtensions(DisplayVk *displayVk,
3105                                                  const VulkanLayerVector &enabledDeviceLayerNames)
3106 {
3107     // Enumerate device extensions that are provided by the vulkan
3108     // implementation and implicit layers.
3109     uint32_t deviceExtensionCount = 0;
3110     ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(mPhysicalDevice, nullptr,
3111                                                                  &deviceExtensionCount, nullptr));
3112 
3113     // Work-around a race condition in the Android platform during Android start-up, that can cause
3114     // the second call to vkEnumerateDeviceExtensionProperties to have an additional extension.  In
3115     // that case, the second call will return VK_INCOMPLETE.  To work-around that, add 1 to
3116     // deviceExtensionCount and ask for one more extension property than the first call said there
3117     // were.  See: http://anglebug.com/6715 and internal-to-Google bug: b/206733351.
3118     deviceExtensionCount++;
3119     std::vector<VkExtensionProperties> deviceExtensionProps(deviceExtensionCount);
3120     ANGLE_VK_TRY(displayVk,
3121                  vkEnumerateDeviceExtensionProperties(
3122                      mPhysicalDevice, nullptr, &deviceExtensionCount, deviceExtensionProps.data()));
3123     // In case fewer items were returned than requested, resize deviceExtensionProps to the number
3124     // of extensions returned (i.e. deviceExtensionCount).  See: b/208937840
3125     deviceExtensionProps.resize(deviceExtensionCount);
3126 
3127     // Enumerate device extensions that are provided by explicit layers.
3128     for (const char *layerName : enabledDeviceLayerNames)
3129     {
3130         uint32_t previousExtensionCount    = static_cast<uint32_t>(deviceExtensionProps.size());
3131         uint32_t deviceLayerExtensionCount = 0;
3132         ANGLE_VK_TRY(displayVk,
3133                      vkEnumerateDeviceExtensionProperties(mPhysicalDevice, layerName,
3134                                                           &deviceLayerExtensionCount, nullptr));
3135         deviceExtensionProps.resize(previousExtensionCount + deviceLayerExtensionCount);
3136         ANGLE_VK_TRY(displayVk, vkEnumerateDeviceExtensionProperties(
3137                                     mPhysicalDevice, layerName, &deviceLayerExtensionCount,
3138                                     deviceExtensionProps.data() + previousExtensionCount));
3139         // In case fewer items were returned than requested, resize deviceExtensionProps to the
3140         // number of extensions returned (i.e. deviceLayerExtensionCount).
3141         deviceExtensionProps.resize(previousExtensionCount + deviceLayerExtensionCount);
3142     }
3143 
3144     // Get the list of device extensions that are available.
3145     vk::ExtensionNameList deviceExtensionNames;
3146     if (!deviceExtensionProps.empty())
3147     {
3148         ASSERT(deviceExtensionNames.size() <= deviceExtensionProps.size());
3149         for (const VkExtensionProperties &prop : deviceExtensionProps)
3150         {
3151             deviceExtensionNames.push_back(prop.extensionName);
3152         }
3153         std::sort(deviceExtensionNames.begin(), deviceExtensionNames.end(), StrLess);
3154     }
3155 
3156     if (displayVk->isUsingSwapchain())
3157     {
3158         mEnabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
3159     }
3160 
3161     // Query extensions and their features.
3162     queryDeviceExtensionFeatures(deviceExtensionNames);
3163 
3164     // Initialize features and workarounds.
3165     initFeatures(displayVk, deviceExtensionNames);
3166 
3167     // App based feature overrides.
3168     appBasedFeatureOverrides(displayVk, deviceExtensionNames);
3169 
3170     // Enable extensions that could be used
3171     enableDeviceExtensionsNotPromoted(deviceExtensionNames);
3172     enableDeviceExtensionsPromotedTo11(deviceExtensionNames);
3173     enableDeviceExtensionsPromotedTo12(deviceExtensionNames);
3174     enableDeviceExtensionsPromotedTo13(deviceExtensionNames);
3175 
3176     std::sort(mEnabledDeviceExtensions.begin(), mEnabledDeviceExtensions.end(), StrLess);
3177     ANGLE_VK_TRY(displayVk,
3178                  VerifyExtensionsPresent(deviceExtensionNames, mEnabledDeviceExtensions));
3179 
3180     return angle::Result::Continue;
3181 }
3182 
initInstanceExtensionEntryPoints()3183 void RendererVk::initInstanceExtensionEntryPoints()
3184 {
3185 #if !defined(ANGLE_SHARED_LIBVULKAN)
3186     // Instance entry points
3187     if (mFeatures.supportsExternalSemaphoreFd.enabled ||
3188         mFeatures.supportsExternalSemaphoreFuchsia.enabled)
3189     {
3190         InitExternalSemaphoreFdFunctions(mInstance);
3191     }
3192 
3193     if (mFeatures.supportsExternalFenceFd.enabled)
3194     {
3195         InitExternalFenceFdFunctions(mInstance);
3196     }
3197 
3198 #    if defined(ANGLE_PLATFORM_ANDROID)
3199     if (mFeatures.supportsAndroidHardwareBuffer.enabled)
3200     {
3201         InitExternalMemoryHardwareBufferANDROIDFunctions(mInstance);
3202     }
3203 #    endif
3204 
3205     if (!isVulkan11Instance())
3206     {
3207         if (mFeatures.supportsExternalFenceCapabilities.enabled)
3208         {
3209             InitExternalFenceCapabilitiesFunctions(mInstance);
3210         }
3211         if (mFeatures.supportsExternalSemaphoreCapabilities.enabled)
3212         {
3213             InitExternalSemaphoreCapabilitiesFunctions(mInstance);
3214         }
3215     }
3216 #endif
3217 
3218     // For promoted extensions, initialize their entry points from the core version.
3219     initializeInstanceExtensionEntryPointsFromCore();
3220 }
3221 
initDeviceExtensionEntryPoints()3222 void RendererVk::initDeviceExtensionEntryPoints()
3223 {
3224 #if !defined(ANGLE_SHARED_LIBVULKAN)
3225     // Device entry points
3226     if (mFeatures.supportsTransformFeedbackExtension.enabled)
3227     {
3228         InitTransformFeedbackEXTFunctions(mDevice);
3229     }
3230     if (useLogicOpDynamicState())
3231     {
3232         // VK_EXT_extended_dynamic_state2 is only partially core in Vulkan 1.3.  If the logicOp
3233         // dynamic state (only from the extension) is used, need to load the entry points from the
3234         // extension
3235         InitExtendedDynamicState2EXTFunctions(mDevice);
3236     }
3237     if (mFeatures.supportsFragmentShadingRate.enabled)
3238     {
3239         InitFragmentShadingRateKHRDeviceFunction(mDevice);
3240     }
3241     if (mFeatures.supportsTimestampSurfaceAttribute.enabled)
3242     {
3243         InitGetPastPresentationTimingGoogleFunction(mDevice);
3244     }
3245     if (!isVulkan11Device())
3246     {
3247         if (mFeatures.supportsGetMemoryRequirements2.enabled)
3248         {
3249             InitGetMemoryRequirements2KHRFunctions(mDevice);
3250         }
3251         if (mFeatures.supportsBindMemory2.enabled)
3252         {
3253             InitBindMemory2KHRFunctions(mDevice);
3254         }
3255         if (mFeatures.supportsYUVSamplerConversion.enabled)
3256         {
3257             InitSamplerYcbcrKHRFunctions(mDevice);
3258         }
3259     }
3260     // Extensions promoted to Vulkan 1.2
3261     {
3262         if (mFeatures.supportsHostQueryReset.enabled)
3263         {
3264             InitHostQueryResetFunctions(mDevice);
3265         }
3266         if (mFeatures.supportsRenderpass2.enabled)
3267         {
3268             InitRenderPass2KHRFunctions(mDevice);
3269         }
3270     }
3271     // Extensions promoted to Vulkan 1.3
3272     {
3273         if (mFeatures.supportsExtendedDynamicState.enabled)
3274         {
3275             InitExtendedDynamicStateEXTFunctions(mDevice);
3276         }
3277         if (mFeatures.supportsExtendedDynamicState2.enabled)
3278         {
3279             InitExtendedDynamicState2EXTFunctions(mDevice);
3280         }
3281     }
3282 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
3283 
3284     // For promoted extensions, initialize their entry points from the core version.
3285     initializeDeviceExtensionEntryPointsFromCore();
3286 }
3287 
initializeDevice(DisplayVk * displayVk,uint32_t queueFamilyIndex)3288 angle::Result RendererVk::initializeDevice(DisplayVk *displayVk, uint32_t queueFamilyIndex)
3289 {
3290     uint32_t deviceLayerCount = 0;
3291     ANGLE_VK_TRY(displayVk,
3292                  vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount, nullptr));
3293 
3294     std::vector<VkLayerProperties> deviceLayerProps(deviceLayerCount);
3295     ANGLE_VK_TRY(displayVk, vkEnumerateDeviceLayerProperties(mPhysicalDevice, &deviceLayerCount,
3296                                                              deviceLayerProps.data()));
3297 
3298     VulkanLayerVector enabledDeviceLayerNames;
3299     if (mEnableValidationLayers)
3300     {
3301         mEnableValidationLayers =
3302             GetAvailableValidationLayers(deviceLayerProps, false, &enabledDeviceLayerNames);
3303     }
3304 
3305     const char *wsiLayer = displayVk->getWSILayer();
3306     if (wsiLayer)
3307     {
3308         enabledDeviceLayerNames.push_back(wsiLayer);
3309     }
3310 
3311     mEnabledFeatures       = {};
3312     mEnabledFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
3313 
3314     ANGLE_TRY(enableDeviceExtensions(displayVk, enabledDeviceLayerNames));
3315 
3316     // Used to support cubemap array:
3317     mEnabledFeatures.features.imageCubeArray = mFeatures.supportsImageCubeArray.enabled;
3318     // Used to support framebuffers with multiple attachments:
3319     mEnabledFeatures.features.independentBlend = mPhysicalDeviceFeatures.independentBlend;
3320     // Used to support multi_draw_indirect
3321     mEnabledFeatures.features.multiDrawIndirect = mPhysicalDeviceFeatures.multiDrawIndirect;
3322     mEnabledFeatures.features.drawIndirectFirstInstance =
3323         mPhysicalDeviceFeatures.drawIndirectFirstInstance;
3324     // Used to support robust buffer access, if VK_EXT_pipeline_robustness is not supported.
3325     if (!mFeatures.supportsPipelineRobustness.enabled)
3326     {
3327         mEnabledFeatures.features.robustBufferAccess = mPhysicalDeviceFeatures.robustBufferAccess;
3328     }
3329     // Used to support Anisotropic filtering:
3330     mEnabledFeatures.features.samplerAnisotropy = mPhysicalDeviceFeatures.samplerAnisotropy;
3331     // Used to support wide lines:
3332     mEnabledFeatures.features.wideLines = mPhysicalDeviceFeatures.wideLines;
3333     // Used to emulate transform feedback:
3334     mEnabledFeatures.features.vertexPipelineStoresAndAtomics =
3335         mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics;
3336     // Used to implement storage buffers and images in the fragment shader:
3337     mEnabledFeatures.features.fragmentStoresAndAtomics =
3338         mPhysicalDeviceFeatures.fragmentStoresAndAtomics;
3339     // Used to emulate the primitives generated query:
3340     mEnabledFeatures.features.pipelineStatisticsQuery =
3341         !mFeatures.supportsPrimitivesGeneratedQuery.enabled &&
3342         mFeatures.supportsPipelineStatisticsQuery.enabled;
3343     // Used to support geometry shaders:
3344     mEnabledFeatures.features.geometryShader = mPhysicalDeviceFeatures.geometryShader;
3345     // Used to support EXT_gpu_shader5:
3346     mEnabledFeatures.features.shaderImageGatherExtended =
3347         mPhysicalDeviceFeatures.shaderImageGatherExtended;
3348     // Used to support EXT_gpu_shader5:
3349     mEnabledFeatures.features.shaderUniformBufferArrayDynamicIndexing =
3350         mPhysicalDeviceFeatures.shaderUniformBufferArrayDynamicIndexing;
3351     mEnabledFeatures.features.shaderSampledImageArrayDynamicIndexing =
3352         mPhysicalDeviceFeatures.shaderSampledImageArrayDynamicIndexing;
3353     // Used to support APPLE_clip_distance
3354     mEnabledFeatures.features.shaderClipDistance = mPhysicalDeviceFeatures.shaderClipDistance;
3355     // Used to support OES_sample_shading
3356     mEnabledFeatures.features.sampleRateShading = mPhysicalDeviceFeatures.sampleRateShading;
3357     // Used to support EXT_depth_clamp and depth clears through draw calls
3358     mEnabledFeatures.features.depthClamp = mPhysicalDeviceFeatures.depthClamp;
3359     // Used to support EXT_polygon_offset_clamp
3360     mEnabledFeatures.features.depthBiasClamp = mPhysicalDeviceFeatures.depthBiasClamp;
3361     // Used to support NV_polygon_mode / ANGLE_polygon_mode
3362     mEnabledFeatures.features.fillModeNonSolid = mPhysicalDeviceFeatures.fillModeNonSolid;
3363     // Used to support EXT_clip_cull_distance
3364     mEnabledFeatures.features.shaderCullDistance = mPhysicalDeviceFeatures.shaderCullDistance;
3365     // Used to support tessellation Shader:
3366     mEnabledFeatures.features.tessellationShader = mPhysicalDeviceFeatures.tessellationShader;
3367     // Used to support EXT_blend_func_extended
3368     mEnabledFeatures.features.dualSrcBlend = mPhysicalDeviceFeatures.dualSrcBlend;
3369     // Used to support ANGLE_logic_op and GLES1
3370     mEnabledFeatures.features.logicOp = mPhysicalDeviceFeatures.logicOp;
3371     // Used to support EXT_multisample_compatibility
3372     mEnabledFeatures.features.alphaToOne = mPhysicalDeviceFeatures.alphaToOne;
3373 
3374     if (!vk::OutsideRenderPassCommandBuffer::ExecutesInline() ||
3375         !vk::RenderPassCommandBuffer::ExecutesInline())
3376     {
3377         mEnabledFeatures.features.inheritedQueries = mPhysicalDeviceFeatures.inheritedQueries;
3378     }
3379 
3380     // Setup device initialization struct
3381     VkDeviceCreateInfo createInfo = {};
3382 
3383     if (mFeatures.logMemoryReportCallbacks.enabled || mFeatures.logMemoryReportStats.enabled)
3384     {
3385         ASSERT(mMemoryReportFeatures.deviceMemoryReport);
3386 
3387         mMemoryReportCallback       = {};
3388         mMemoryReportCallback.sType = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT;
3389         mMemoryReportCallback.pfnUserCallback = &MemoryReportCallback;
3390         mMemoryReportCallback.pUserData       = this;
3391         vk::AddToPNextChain(&createInfo, &mMemoryReportCallback);
3392     }
3393 
3394     mCurrentQueueFamilyIndex = queueFamilyIndex;
3395 
3396     vk::QueueFamily queueFamily;
3397     queueFamily.initialize(mQueueFamilyProperties[queueFamilyIndex], queueFamilyIndex);
3398     ANGLE_VK_CHECK(displayVk, queueFamily.getDeviceQueueCount() > 0,
3399                    VK_ERROR_INITIALIZATION_FAILED);
3400 
3401     // We enable protected context only if both supportsProtectedMemory and device also supports
3402     // protected. There are cases we have to disable supportsProtectedMemory feature due to driver
3403     // bugs.
3404     bool enableProtectedContent =
3405         queueFamily.supportsProtected() && mFeatures.supportsProtectedMemory.enabled;
3406 
3407     uint32_t queueCount = std::min(queueFamily.getDeviceQueueCount(),
3408                                    static_cast<uint32_t>(egl::ContextPriority::EnumCount));
3409 
3410     uint32_t queueCreateInfoCount              = 1;
3411     VkDeviceQueueCreateInfo queueCreateInfo[1] = {};
3412     queueCreateInfo[0].sType                   = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
3413     queueCreateInfo[0].flags = enableProtectedContent ? VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT : 0;
3414     queueCreateInfo[0].queueFamilyIndex = queueFamilyIndex;
3415     queueCreateInfo[0].queueCount       = queueCount;
3416     queueCreateInfo[0].pQueuePriorities = vk::QueueFamily::kQueuePriorities;
3417 
3418     // Create Device
3419     createInfo.sType                 = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
3420     createInfo.flags                 = 0;
3421     createInfo.queueCreateInfoCount  = queueCreateInfoCount;
3422     createInfo.pQueueCreateInfos     = queueCreateInfo;
3423     createInfo.enabledLayerCount     = static_cast<uint32_t>(enabledDeviceLayerNames.size());
3424     createInfo.ppEnabledLayerNames   = enabledDeviceLayerNames.data();
3425     createInfo.enabledExtensionCount = static_cast<uint32_t>(mEnabledDeviceExtensions.size());
3426     createInfo.ppEnabledExtensionNames =
3427         mEnabledDeviceExtensions.empty() ? nullptr : mEnabledDeviceExtensions.data();
3428     mEnabledDeviceExtensions.push_back(nullptr);
3429 
3430     // Enable core features without assuming VkPhysicalDeviceFeatures2KHR is accepted in the
3431     // pNext chain of VkDeviceCreateInfo.
3432     createInfo.pEnabledFeatures = &mEnabledFeatures.features;
3433 
3434     // Append the feature structs chain to the end of createInfo structs chain.
3435     if (mEnabledFeatures.pNext)
3436     {
3437         vk::AppendToPNextChain(&createInfo, mEnabledFeatures.pNext);
3438     }
3439 
3440     // Create the list of expected VVL messages to suppress.  Done before creating the device, as it
3441     // may also generate messages.
3442     initializeValidationMessageSuppressions();
3443 
3444     ANGLE_VK_TRY(displayVk, vkCreateDevice(mPhysicalDevice, &createInfo, nullptr, &mDevice));
3445 #if defined(ANGLE_SHARED_LIBVULKAN)
3446     // Load volk if we are loading dynamically
3447     volkLoadDevice(mDevice);
3448 #endif  // defined(ANGLE_SHARED_LIBVULKAN)
3449 
3450     initDeviceExtensionEntryPoints();
3451 
3452     vk::DeviceQueueMap graphicsQueueMap =
3453         queueFamily.initializeQueueMap(mDevice, enableProtectedContent, 0, queueCount);
3454 
3455     ANGLE_TRY(mCommandQueue.init(displayVk, graphicsQueueMap));
3456     ANGLE_TRY(mCommandProcessor.init());
3457 
3458     if (mFeatures.forceMaxUniformBufferSize16KB.enabled)
3459     {
3460         mDefaultUniformBufferSize = kMinDefaultUniformBufferSize;
3461     }
3462     // Cap it with the driver limit
3463     mDefaultUniformBufferSize = std::min(
3464         mDefaultUniformBufferSize, getPhysicalDeviceProperties().limits.maxUniformBufferRange);
3465 
3466     // Initialize the vulkan pipeline cache.
3467     {
3468         std::unique_lock<std::mutex> lock(mPipelineCacheMutex);
3469         bool loadedFromBlobCache = false;
3470         ANGLE_TRY(initPipelineCache(displayVk, &mPipelineCache, &loadedFromBlobCache));
3471         if (loadedFromBlobCache)
3472         {
3473             ANGLE_TRY(getPipelineCacheSize(displayVk, &mPipelineCacheSizeAtLastSync));
3474         }
3475     }
3476 
3477     // Track the set of supported pipeline stages.  This is used when issuing image layout
3478     // transitions that cover many stages (such as AllGraphicsReadOnly) to mask out unsupported
3479     // stages, which avoids enumerating every possible combination of stages in the layouts.
3480     VkPipelineStageFlags unsupportedStages = 0;
3481     mSupportedVulkanShaderStageMask =
3482         VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
3483     if (!mPhysicalDeviceFeatures.tessellationShader)
3484     {
3485         unsupportedStages |= VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
3486                              VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT;
3487     }
3488     else
3489     {
3490         mSupportedVulkanShaderStageMask |=
3491             VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
3492     }
3493     if (!mPhysicalDeviceFeatures.geometryShader)
3494     {
3495         unsupportedStages |= VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT;
3496     }
3497     else
3498     {
3499         mSupportedVulkanShaderStageMask |= VK_SHADER_STAGE_GEOMETRY_BIT;
3500     }
3501     mSupportedVulkanPipelineStageMask = ~unsupportedStages;
3502 
3503     // Log the memory heap stats when the device has been initialized (when debugging).
3504     mMemoryAllocationTracker.onDeviceInit();
3505 
3506     return angle::Result::Continue;
3507 }
3508 
initializeValidationMessageSuppressions()3509 void RendererVk::initializeValidationMessageSuppressions()
3510 {
3511     // Build the list of validation errors that are currently expected and should be skipped.
3512     mSkippedValidationMessages.insert(mSkippedValidationMessages.end(), kSkippedMessages,
3513                                       kSkippedMessages + ArraySize(kSkippedMessages));
3514     if (!getFeatures().supportsPrimitiveTopologyListRestart.enabled)
3515     {
3516         mSkippedValidationMessages.insert(
3517             mSkippedValidationMessages.end(), kNoListRestartSkippedMessages,
3518             kNoListRestartSkippedMessages + ArraySize(kNoListRestartSkippedMessages));
3519     }
3520 
3521     // Build the list of syncval errors that are currently expected and should be skipped.
3522     mSkippedSyncvalMessages.insert(mSkippedSyncvalMessages.end(), kSkippedSyncvalMessages,
3523                                    kSkippedSyncvalMessages + ArraySize(kSkippedSyncvalMessages));
3524     if (!getFeatures().supportsRenderPassStoreOpNone.enabled &&
3525         !getFeatures().supportsRenderPassLoadStoreOpNone.enabled)
3526     {
3527         mSkippedSyncvalMessages.insert(mSkippedSyncvalMessages.end(),
3528                                        kSkippedSyncvalMessagesWithoutStoreOpNone,
3529                                        kSkippedSyncvalMessagesWithoutStoreOpNone +
3530                                            ArraySize(kSkippedSyncvalMessagesWithoutStoreOpNone));
3531     }
3532     if (!getFeatures().supportsRenderPassLoadStoreOpNone.enabled)
3533     {
3534         mSkippedSyncvalMessages.insert(
3535             mSkippedSyncvalMessages.end(), kSkippedSyncvalMessagesWithoutLoadStoreOpNone,
3536             kSkippedSyncvalMessagesWithoutLoadStoreOpNone +
3537                 ArraySize(kSkippedSyncvalMessagesWithoutLoadStoreOpNone));
3538     }
3539 }
3540 
selectPresentQueueForSurface(DisplayVk * displayVk,VkSurfaceKHR surface,uint32_t * presentQueueOut)3541 angle::Result RendererVk::selectPresentQueueForSurface(DisplayVk *displayVk,
3542                                                        VkSurfaceKHR surface,
3543                                                        uint32_t *presentQueueOut)
3544 {
3545     // We've already initialized a device, and can't re-create it unless it's never been used.
3546     // TODO(jmadill): Handle the re-creation case if necessary.
3547     if (mDevice != VK_NULL_HANDLE)
3548     {
3549         ASSERT(mCurrentQueueFamilyIndex != std::numeric_limits<uint32_t>::max());
3550 
3551         // Check if the current device supports present on this surface.
3552         VkBool32 supportsPresent = VK_FALSE;
3553         ANGLE_VK_TRY(displayVk,
3554                      vkGetPhysicalDeviceSurfaceSupportKHR(mPhysicalDevice, mCurrentQueueFamilyIndex,
3555                                                           surface, &supportsPresent));
3556 
3557         if (supportsPresent == VK_TRUE)
3558         {
3559             *presentQueueOut = mCurrentQueueFamilyIndex;
3560             return angle::Result::Continue;
3561         }
3562     }
3563 
3564     // Find a graphics and present queue.
3565     Optional<uint32_t> newPresentQueue;
3566     uint32_t queueCount = static_cast<uint32_t>(mQueueFamilyProperties.size());
3567     constexpr VkQueueFlags kGraphicsAndCompute = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
3568     for (uint32_t queueIndex = 0; queueIndex < queueCount; ++queueIndex)
3569     {
3570         const auto &queueInfo = mQueueFamilyProperties[queueIndex];
3571         if ((queueInfo.queueFlags & kGraphicsAndCompute) == kGraphicsAndCompute)
3572         {
3573             VkBool32 supportsPresent = VK_FALSE;
3574             ANGLE_VK_TRY(displayVk, vkGetPhysicalDeviceSurfaceSupportKHR(
3575                                         mPhysicalDevice, queueIndex, surface, &supportsPresent));
3576 
3577             if (supportsPresent == VK_TRUE)
3578             {
3579                 newPresentQueue = queueIndex;
3580                 break;
3581             }
3582         }
3583     }
3584 
3585     ANGLE_VK_CHECK(displayVk, newPresentQueue.valid(), VK_ERROR_INITIALIZATION_FAILED);
3586     ANGLE_TRY(initializeDevice(displayVk, newPresentQueue.value()));
3587 
3588     *presentQueueOut = newPresentQueue.value();
3589     return angle::Result::Continue;
3590 }
3591 
getVendorString() const3592 std::string RendererVk::getVendorString() const
3593 {
3594     return GetVendorString(mPhysicalDeviceProperties.vendorID);
3595 }
3596 
getRendererDescription() const3597 std::string RendererVk::getRendererDescription() const
3598 {
3599     std::stringstream strstr;
3600 
3601     uint32_t apiVersion = mPhysicalDeviceProperties.apiVersion;
3602 
3603     strstr << "Vulkan ";
3604     strstr << VK_VERSION_MAJOR(apiVersion) << ".";
3605     strstr << VK_VERSION_MINOR(apiVersion) << ".";
3606     strstr << VK_VERSION_PATCH(apiVersion);
3607 
3608     strstr << " (";
3609 
3610     // In the case of NVIDIA, deviceName does not necessarily contain "NVIDIA". Add "NVIDIA" so that
3611     // Vulkan end2end tests can be selectively disabled on NVIDIA. TODO(jmadill): should not be
3612     // needed after http://anglebug.com/1874 is fixed and end2end_tests use more sophisticated
3613     // driver detection.
3614     if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
3615     {
3616         strstr << GetVendorString(mPhysicalDeviceProperties.vendorID) << " ";
3617     }
3618 
3619     strstr << mPhysicalDeviceProperties.deviceName;
3620     strstr << " (" << gl::FmtHex(mPhysicalDeviceProperties.deviceID) << ")";
3621 
3622     strstr << ")";
3623 
3624     return strstr.str();
3625 }
3626 
getVersionString(bool includeFullVersion) const3627 std::string RendererVk::getVersionString(bool includeFullVersion) const
3628 {
3629     std::stringstream strstr;
3630 
3631     uint32_t driverVersion = mPhysicalDeviceProperties.driverVersion;
3632     std::string driverName = std::string(mDriverProperties.driverName);
3633 
3634     if (!driverName.empty())
3635     {
3636         strstr << driverName;
3637     }
3638     else
3639     {
3640         strstr << GetVendorString(mPhysicalDeviceProperties.vendorID);
3641     }
3642 
3643     if (includeFullVersion)
3644     {
3645         strstr << "-";
3646 
3647         if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_NVIDIA)
3648         {
3649             strstr << ANGLE_VK_VERSION_MAJOR_NVIDIA(driverVersion) << ".";
3650             strstr << ANGLE_VK_VERSION_MINOR_NVIDIA(driverVersion) << ".";
3651             strstr << ANGLE_VK_VERSION_SUB_MINOR_NVIDIA(driverVersion) << ".";
3652             strstr << ANGLE_VK_VERSION_PATCH_NVIDIA(driverVersion);
3653         }
3654         else if (mPhysicalDeviceProperties.vendorID == VENDOR_ID_INTEL && IsWindows())
3655         {
3656             strstr << ANGLE_VK_VERSION_MAJOR_WIN_INTEL(driverVersion) << ".";
3657             strstr << ANGLE_VK_VERSION_MAJOR_WIN_INTEL(driverVersion) << ".";
3658         }
3659         // All other drivers use the Vulkan standard
3660         else
3661         {
3662             strstr << VK_VERSION_MAJOR(driverVersion) << ".";
3663             strstr << VK_VERSION_MINOR(driverVersion) << ".";
3664             strstr << VK_VERSION_PATCH(driverVersion);
3665         }
3666     }
3667 
3668     return strstr.str();
3669 }
3670 
getMaxSupportedESVersion() const3671 gl::Version RendererVk::getMaxSupportedESVersion() const
3672 {
3673     // Current highest supported version
3674     gl::Version maxVersion = gl::Version(3, 2);
3675 
3676     // Early out without downgrading ES version if mock ICD enabled.
3677     // Mock ICD doesn't expose sufficient capabilities yet.
3678     // https://github.com/KhronosGroup/Vulkan-Tools/issues/84
3679     if (isMockICDEnabled())
3680     {
3681         return maxVersion;
3682     }
3683 
3684     // Limit to ES3.1 if there are any blockers for 3.2.
3685     if (!vk::CanSupportGPUShader5EXT(mPhysicalDeviceFeatures) &&
3686         !mFeatures.exposeNonConformantExtensionsAndVersions.enabled)
3687     {
3688         maxVersion = LimitVersionTo(maxVersion, {3, 1});
3689     }
3690 
3691     // TODO: more extension checks for 3.2.  http://anglebug.com/5366
3692     if (!mFeatures.exposeNonConformantExtensionsAndVersions.enabled)
3693     {
3694         maxVersion = LimitVersionTo(maxVersion, {3, 1});
3695     }
3696 
3697     // Limit to ES3.0 if there are any blockers for 3.1.
3698 
3699     // ES3.1 requires at least one atomic counter buffer and four storage buffers in compute.
3700     // Atomic counter buffers are emulated with storage buffers.  For simplicity, we always support
3701     // either none or IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFERS atomic counter buffers.  So if
3702     // Vulkan doesn't support at least that many storage buffers in compute, we don't support 3.1.
3703     const uint32_t kMinimumStorageBuffersForES31 =
3704         gl::limits::kMinimumComputeStorageBuffers +
3705         gl::IMPLEMENTATION_MAX_ATOMIC_COUNTER_BUFFER_BINDINGS;
3706     if (mPhysicalDeviceProperties.limits.maxPerStageDescriptorStorageBuffers <
3707         kMinimumStorageBuffersForES31)
3708     {
3709         maxVersion = LimitVersionTo(maxVersion, {3, 0});
3710     }
3711 
3712     // ES3.1 requires at least a maximum offset of at least 2047.
3713     // If the Vulkan implementation can't support that, we cannot support 3.1.
3714     if (mPhysicalDeviceProperties.limits.maxVertexInputAttributeOffset < 2047)
3715     {
3716         maxVersion = LimitVersionTo(maxVersion, {3, 0});
3717     }
3718 
3719     // Limit to ES2.0 if there are any blockers for 3.0.
3720     // TODO: http://anglebug.com/3972 Limit to GLES 2.0 if flat shading can't be emulated
3721 
3722     // Multisample textures (ES3.1) and multisample renderbuffers (ES3.0) require the Vulkan driver
3723     // to support the standard sample locations (in order to pass dEQP tests that check these
3724     // locations).  If the Vulkan implementation can't support that, we cannot support 3.0/3.1.
3725     if (mPhysicalDeviceProperties.limits.standardSampleLocations != VK_TRUE)
3726     {
3727         maxVersion = LimitVersionTo(maxVersion, {2, 0});
3728     }
3729 
3730     // If independentBlend is not supported, we can't have a mix of has-alpha and emulated-alpha
3731     // render targets in a framebuffer.  We also cannot perform masked clears of multiple render
3732     // targets.
3733     if (!mPhysicalDeviceFeatures.independentBlend)
3734     {
3735         maxVersion = LimitVersionTo(maxVersion, {2, 0});
3736     }
3737 
3738     // If the Vulkan transform feedback extension is not present, we use an emulation path that
3739     // requires the vertexPipelineStoresAndAtomics feature. Without the extension or this feature,
3740     // we can't currently support transform feedback.
3741     if (!mFeatures.supportsTransformFeedbackExtension.enabled &&
3742         !mFeatures.emulateTransformFeedback.enabled)
3743     {
3744         maxVersion = LimitVersionTo(maxVersion, {2, 0});
3745     }
3746 
3747     // Limit to GLES 2.0 if maxPerStageDescriptorUniformBuffers is too low.
3748     // Table 6.31 MAX_VERTEX_UNIFORM_BLOCKS minimum value = 12
3749     // Table 6.32 MAX_FRAGMENT_UNIFORM_BLOCKS minimum value = 12
3750     // NOTE: We reserve some uniform buffers for emulation, so use the NativeCaps which takes this
3751     // into account, rather than the physical device maxPerStageDescriptorUniformBuffers limits.
3752     for (gl::ShaderType shaderType : gl::AllShaderTypes())
3753     {
3754         if (static_cast<GLuint>(getNativeCaps().maxShaderUniformBlocks[shaderType]) <
3755             gl::limits::kMinimumShaderUniformBlocks)
3756         {
3757             maxVersion = LimitVersionTo(maxVersion, {2, 0});
3758         }
3759     }
3760 
3761     // Limit to GLES 2.0 if maxVertexOutputComponents is too low.
3762     // Table 6.31 MAX VERTEX OUTPUT COMPONENTS minimum value = 64
3763     // NOTE: We reserve some vertex output components for emulation, so use the NativeCaps which
3764     // takes this into account, rather than the physical device maxVertexOutputComponents limits.
3765     if (static_cast<GLuint>(getNativeCaps().maxVertexOutputComponents) <
3766         gl::limits::kMinimumVertexOutputComponents)
3767     {
3768         maxVersion = LimitVersionTo(maxVersion, {2, 0});
3769     }
3770 
3771     return maxVersion;
3772 }
3773 
getMaxConformantESVersion() const3774 gl::Version RendererVk::getMaxConformantESVersion() const
3775 {
3776     const gl::Version maxSupportedESVersion = getMaxSupportedESVersion();
3777     const bool hasGeometryAndTessSupport =
3778         getNativeExtensions().geometryShaderAny() && getNativeExtensions().tessellationShaderEXT;
3779 
3780     if (!hasGeometryAndTessSupport || !mFeatures.exposeNonConformantExtensionsAndVersions.enabled)
3781     {
3782         return LimitVersionTo(maxSupportedESVersion, {3, 1});
3783     }
3784 
3785     return maxSupportedESVersion;
3786 }
3787 
getDeviceVersion()3788 uint32_t RendererVk::getDeviceVersion()
3789 {
3790     return mDeviceVersion == 0 ? mInstanceVersion : mDeviceVersion;
3791 }
3792 
canSupportFragmentShadingRate(const vk::ExtensionNameList & deviceExtensionNames)3793 bool RendererVk::canSupportFragmentShadingRate(const vk::ExtensionNameList &deviceExtensionNames)
3794 {
3795     // Device needs to support VK_KHR_fragment_shading_rate and specifically
3796     // pipeline fragment shading rate.
3797     if (mFragmentShadingRateFeatures.pipelineFragmentShadingRate != VK_TRUE)
3798     {
3799         return false;
3800     }
3801 
3802     // Init required functions
3803 #if !defined(ANGLE_SHARED_LIBVULKAN)
3804     InitFragmentShadingRateKHRInstanceFunction(mInstance);
3805 #endif  // !defined(ANGLE_SHARED_LIBVULKAN)
3806     ASSERT(vkGetPhysicalDeviceFragmentShadingRatesKHR);
3807 
3808     // Query number of supported shading rates first
3809     uint32_t shadingRatesCount = 0;
3810     VkResult result =
3811         vkGetPhysicalDeviceFragmentShadingRatesKHR(mPhysicalDevice, &shadingRatesCount, nullptr);
3812     ASSERT(result == VK_SUCCESS);
3813     ASSERT(shadingRatesCount > 0);
3814 
3815     std::vector<VkPhysicalDeviceFragmentShadingRateKHR> shadingRates(
3816         shadingRatesCount,
3817         {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, nullptr, 0, {0, 0}});
3818 
3819     // Query supported shading rates
3820     result = vkGetPhysicalDeviceFragmentShadingRatesKHR(mPhysicalDevice, &shadingRatesCount,
3821                                                         shadingRates.data());
3822     ASSERT(result == VK_SUCCESS);
3823 
3824     // Cache supported fragment shading rates
3825     mSupportedFragmentShadingRates.reset();
3826     for (const VkPhysicalDeviceFragmentShadingRateKHR &shadingRate : shadingRates)
3827     {
3828         if (shadingRate.sampleCounts == 0)
3829         {
3830             continue;
3831         }
3832         mSupportedFragmentShadingRates.set(GetShadingRateFromVkExtent(shadingRate.fragmentSize));
3833     }
3834 
3835     // To implement GL_QCOM_shading_rate extension the Vulkan ICD needs to support at least the
3836     // following shading rates -
3837     //     {1, 1}
3838     //     {1, 2}
3839     //     {2, 1}
3840     //     {2, 2}
3841     return mSupportedFragmentShadingRates.test(gl::ShadingRate::_1x1) &&
3842            mSupportedFragmentShadingRates.test(gl::ShadingRate::_1x2) &&
3843            mSupportedFragmentShadingRates.test(gl::ShadingRate::_2x1) &&
3844            mSupportedFragmentShadingRates.test(gl::ShadingRate::_2x2);
3845 }
3846 
canPreferDeviceLocalMemoryHostVisible(VkPhysicalDeviceType deviceType)3847 bool RendererVk::canPreferDeviceLocalMemoryHostVisible(VkPhysicalDeviceType deviceType)
3848 {
3849     if (deviceType == VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU)
3850     {
3851         const vk::MemoryProperties &memoryProperties = getMemoryProperties();
3852         static constexpr VkMemoryPropertyFlags kHostVisiableDeviceLocalFlags =
3853             VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
3854         VkDeviceSize minHostVisiableDeviceLocalHeapSize = std::numeric_limits<VkDeviceSize>::max();
3855         VkDeviceSize maxDeviceLocalHeapSize             = 0;
3856         for (uint32_t i = 0; i < memoryProperties.getMemoryTypeCount(); ++i)
3857         {
3858             if ((memoryProperties.getMemoryType(i).propertyFlags &
3859                  VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
3860             {
3861                 maxDeviceLocalHeapSize =
3862                     std::max(maxDeviceLocalHeapSize, memoryProperties.getHeapSizeForMemoryType(i));
3863             }
3864             if ((memoryProperties.getMemoryType(i).propertyFlags & kHostVisiableDeviceLocalFlags) ==
3865                 kHostVisiableDeviceLocalFlags)
3866             {
3867                 minHostVisiableDeviceLocalHeapSize =
3868                     std::min(minHostVisiableDeviceLocalHeapSize,
3869                              memoryProperties.getHeapSizeForMemoryType(i));
3870             }
3871         }
3872         return minHostVisiableDeviceLocalHeapSize != std::numeric_limits<VkDeviceSize>::max() &&
3873                minHostVisiableDeviceLocalHeapSize >=
3874                    static_cast<VkDeviceSize>(maxDeviceLocalHeapSize * 0.8);
3875     }
3876     return deviceType != VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
3877 }
3878 
initFeatures(DisplayVk * displayVk,const vk::ExtensionNameList & deviceExtensionNames)3879 void RendererVk::initFeatures(DisplayVk *displayVk,
3880                               const vk::ExtensionNameList &deviceExtensionNames)
3881 {
3882     if (displayVk->getState().featuresAllDisabled)
3883     {
3884         ApplyFeatureOverrides(&mFeatures, displayVk->getState());
3885         return;
3886     }
3887 
3888     constexpr uint32_t kPixel2DriverWithRelaxedPrecision        = 0x801EA000;
3889     constexpr uint32_t kPixel4DriverWithWorkingSpecConstSupport = 0x80201000;
3890 
3891     const bool isAMD      = IsAMD(mPhysicalDeviceProperties.vendorID);
3892     const bool isApple    = IsAppleGPU(mPhysicalDeviceProperties.vendorID);
3893     const bool isARM      = IsARM(mPhysicalDeviceProperties.vendorID);
3894     const bool isIntel    = IsIntel(mPhysicalDeviceProperties.vendorID);
3895     const bool isNvidia   = IsNvidia(mPhysicalDeviceProperties.vendorID);
3896     const bool isPowerVR  = IsPowerVR(mPhysicalDeviceProperties.vendorID);
3897     const bool isQualcomm = IsQualcomm(mPhysicalDeviceProperties.vendorID);
3898     const bool isBroadcom = IsBroadcom(mPhysicalDeviceProperties.vendorID);
3899     const bool isSamsung  = IsSamsung(mPhysicalDeviceProperties.vendorID);
3900     const bool isSwiftShader =
3901         IsSwiftshader(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
3902 
3903     // MESA Virtio-GPU Venus driver: https://docs.mesa3d.org/drivers/venus.html
3904     const bool isVenus = IsVenus(mDriverProperties.driverID, mPhysicalDeviceProperties.deviceName);
3905 
3906     const bool isGalaxyS23 =
3907         IsGalaxyS23(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
3908 
3909     // Distinguish between the open source and proprietary Qualcomm drivers
3910     const bool isQualcommOpenSource =
3911         IsQualcommOpenSource(mPhysicalDeviceProperties.vendorID, mDriverProperties.driverID,
3912                              mPhysicalDeviceProperties.deviceName);
3913     const bool isQualcommProprietary = isQualcomm && !isQualcommOpenSource;
3914 
3915     // Lacking other explicit ways to tell if mali GPU is job manager based or command stream front
3916     // end based, we use maxDrawIndirectCount as equivalent since all JM based has
3917     // maxDrawIndirectCount==1 and all CSF based has maxDrawIndirectCount>1.
3918     bool isMaliJobManagerBasedGPU =
3919         isARM && getPhysicalDeviceProperties().limits.maxDrawIndirectCount <= 1;
3920     // Parse the ARM driver version to be readable/comparable
3921     const ARMDriverVersion armDriverVersion =
3922         ParseARMDriverVersion(mPhysicalDeviceProperties.driverVersion);
3923 
3924     // Identify Google Pixel brand Android devices
3925     const bool isPixel = IsPixel();
3926 
3927     angle::VersionInfo nvidiaVersion;
3928     if (isNvidia)
3929     {
3930         nvidiaVersion = angle::ParseNvidiaDriverVersion(mPhysicalDeviceProperties.driverVersion);
3931     }
3932 
3933     angle::VersionInfo mesaVersion;
3934     if (isIntel && IsLinux())
3935     {
3936         mesaVersion = angle::ParseMesaDriverVersion(mPhysicalDeviceProperties.driverVersion);
3937     }
3938 
3939     // Classify devices based on general architecture:
3940     //
3941     // - IMR (Immediate-Mode Rendering) devices generally progress through draw calls once and use
3942     //   the main GPU memory (accessed through caches) to store intermediate rendering results.
3943     // - TBR (Tile-Based Rendering) devices issue a pre-rendering geometry pass, then run through
3944     //   draw calls once per tile and store intermediate rendering results on the tile cache.
3945     //
3946     // Due to these key architectural differences, some operations improve performance on one while
3947     // deteriorating performance on the other.  ANGLE will accordingly make some decisions based on
3948     // the device architecture for optimal performance on both.
3949     const bool isImmediateModeRenderer = isNvidia || isAMD || isIntel || isSamsung || isSwiftShader;
3950     const bool isTileBasedRenderer     = isARM || isPowerVR || isQualcomm || isBroadcom || isApple;
3951 
3952     // Make sure all known architectures are accounted for.
3953     if (!isImmediateModeRenderer && !isTileBasedRenderer && !isMockICDEnabled())
3954     {
3955         WARN() << "Unknown GPU architecture";
3956     }
3957 
3958     bool supportsNegativeViewport =
3959         isVulkan11Device() ||
3960         ExtensionFound(VK_KHR_MAINTENANCE1_EXTENSION_NAME, deviceExtensionNames);
3961 
3962     ANGLE_FEATURE_CONDITION(&mFeatures, appendAliasedMemoryDecorations, true);
3963 
3964     ANGLE_FEATURE_CONDITION(
3965         &mFeatures, supportsSharedPresentableImageExtension,
3966         ExtensionFound(VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME, deviceExtensionNames));
3967 
3968     ANGLE_FEATURE_CONDITION(
3969         &mFeatures, supportsGetMemoryRequirements2,
3970         isVulkan11Device() ||
3971             ExtensionFound(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, deviceExtensionNames));
3972 
3973     ANGLE_FEATURE_CONDITION(
3974         &mFeatures, supportsBindMemory2,
3975         isVulkan11Device() ||
3976             ExtensionFound(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME, deviceExtensionNames));
3977 
3978     ANGLE_FEATURE_CONDITION(&mFeatures, bresenhamLineRasterization,
3979                             mLineRasterizationFeatures.bresenhamLines == VK_TRUE);
3980 
3981     ANGLE_FEATURE_CONDITION(&mFeatures, provokingVertex,
3982                             mProvokingVertexFeatures.provokingVertexLast == VK_TRUE);
3983 
3984     // http://b/208458772. ARM driver supports this protected memory extension but we are seeing
3985     // excessive load/store unit activity when this extension is enabled, even if not been used.
3986     // Disable this extension on older ARM platforms that don't support
3987     // VK_EXT_pipeline_protected_access.
3988     // http://anglebug.com/7714
3989     ANGLE_FEATURE_CONDITION(
3990         &mFeatures, supportsProtectedMemory,
3991         mProtectedMemoryFeatures.protectedMemory == VK_TRUE &&
3992             (!isARM || mPipelineProtectedAccessFeatures.pipelineProtectedAccess == VK_TRUE));
3993 
3994     ANGLE_FEATURE_CONDITION(&mFeatures, supportsHostQueryReset,
3995                             mHostQueryResetFeatures.hostQueryReset == VK_TRUE);
3996 
3997     // VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL and
3998     // VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL are introduced by
3999     // VK_KHR_maintenance2 and promoted to Vulkan 1.1.  For simplicity, this feature is only enabled
4000     // on Vulkan 1.1.
4001     ANGLE_FEATURE_CONDITION(&mFeatures, supportsMixedReadWriteDepthStencilLayouts,
4002                             isVulkan11Device());
4003 
4004     // VK_EXT_pipeline_creation_feedback is promoted to core in Vulkan 1.3.
4005     ANGLE_FEATURE_CONDITION(
4006         &mFeatures, supportsPipelineCreationFeedback,
4007         ExtensionFound(VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME, deviceExtensionNames));
4008 
4009     // Incomplete implementation on SwiftShader: http://issuetracker.google.com/234439593
4010     ANGLE_FEATURE_CONDITION(
4011         &mFeatures, supportsPipelineCreationCacheControl,
4012         mPipelineCreationCacheControlFeatures.pipelineCreationCacheControl && !isSwiftShader);
4013 
4014     // Note: Protected Swapchains is not determined until we have a VkSurface to query.
4015     // So here vendors should indicate support so that protected_content extension
4016     // is enabled.
4017     ANGLE_FEATURE_CONDITION(&mFeatures, supportsSurfaceProtectedSwapchains, IsAndroid());
4018 
4019     // Work around incorrect NVIDIA point size range clamping.
4020     // http://anglebug.com/2970#c10
4021     // Clamp if driver version is:
4022     //   < 430 on Windows
4023     //   < 421 otherwise
4024     ANGLE_FEATURE_CONDITION(&mFeatures, clampPointSize,
4025                             isNvidia && nvidiaVersion.major < uint32_t(IsWindows() ? 430 : 421));
4026 
4027     ANGLE_FEATURE_CONDITION(&mFeatures, supportsDepthClipEnable,
4028                             mDepthClipEnableFeatures.depthClipEnable == VK_TRUE);
4029 
4030     // Vulkan implementations are not required to clamp gl_FragDepth to [0, 1] by default.
4031     ANGLE_FEATURE_CONDITION(&mFeatures, supportsDepthClampZeroOne,
4032                             mDepthClampZeroOneFeatures.depthClampZeroOne == VK_TRUE);
4033 
4034     ANGLE_FEATURE_CONDITION(&mFeatures, clampFragDepth,
4035                             isNvidia && !mFeatures.supportsDepthClampZeroOne.enabled);
4036 
4037     ANGLE_FEATURE_CONDITION(
4038         &mFeatures, supportsRenderpass2,
4039         ExtensionFound(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, deviceExtensionNames));
4040 
4041     ANGLE_FEATURE_CONDITION(
4042         &mFeatures, supportsIncrementalPresent,
4043         ExtensionFound(VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, deviceExtensionNames));
4044 
4045 #if defined(ANGLE_PLATFORM_ANDROID)
4046     ANGLE_FEATURE_CONDITION(
4047         &mFeatures, supportsAndroidHardwareBuffer,
4048         IsAndroid() &&
4049             ExtensionFound(VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME,
4050                            deviceExtensionNames) &&
4051             ExtensionFound(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME, deviceExtensionNames));
4052 #endif
4053 
4054 #if defined(ANGLE_PLATFORM_GGP)
4055     ANGLE_FEATURE_CONDITION(
4056         &mFeatures, supportsGGPFrameToken,
4057         ExtensionFound(VK_GGP_FRAME_TOKEN_EXTENSION_NAME, deviceExtensionNames));
4058 #endif
4059 
4060     ANGLE_FEATURE_CONDITION(
4061         &mFeatures, supportsExternalMemoryFd,
4062         ExtensionFound(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, deviceExtensionNames));
4063 
4064 #if defined(ANGLE_PLATFORM_WINDOWS)
4065     ANGLE_FEATURE_CONDITION(
4066         &mFeatures, supportsFullScreenExclusive,
4067         ExtensionFound(VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME, deviceExtensionNames));
4068 
4069     // On Windows+AMD, drivers before version 0x800106 (2.0.262) would
4070     // implicitly enable VK_EXT_full_screen_exclusive and start returning
4071     // extension-specific error codes in swapchain functions. Since the
4072     // extension was not enabled by ANGLE, it was impossible to handle these
4073     // error codes correctly. On these earlier drivers, we want to explicitly
4074     // enable the extension and opt out of it to avoid seeing those error codes
4075     // entirely.
4076     ANGLE_FEATURE_CONDITION(&mFeatures, forceDisableFullScreenExclusive,
4077                             isAMD && mPhysicalDeviceProperties.driverVersion < 0x800106);
4078 #endif
4079 
4080     ANGLE_FEATURE_CONDITION(
4081         &mFeatures, supportsExternalMemoryFuchsia,
4082         ExtensionFound(VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME, deviceExtensionNames));
4083 
4084     ANGLE_FEATURE_CONDITION(
4085         &mFeatures, supportsFilteringPrecision,
4086         ExtensionFound(VK_GOOGLE_SAMPLER_FILTERING_PRECISION_EXTENSION_NAME, deviceExtensionNames));
4087 
4088     ANGLE_FEATURE_CONDITION(
4089         &mFeatures, supportsExternalSemaphoreFd,
4090         ExtensionFound(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, deviceExtensionNames));
4091 
4092     ANGLE_FEATURE_CONDITION(
4093         &mFeatures, supportsExternalSemaphoreFuchsia,
4094         ExtensionFound(VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME, deviceExtensionNames));
4095 
4096     ANGLE_FEATURE_CONDITION(
4097         &mFeatures, supportsExternalFenceFd,
4098         ExtensionFound(VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME, deviceExtensionNames));
4099 
4100 #if defined(ANGLE_PLATFORM_ANDROID) || defined(ANGLE_PLATFORM_LINUX)
4101     if (mFeatures.supportsExternalFenceCapabilities.enabled &&
4102         mFeatures.supportsExternalSemaphoreCapabilities.enabled)
4103     {
4104         VkExternalFenceProperties externalFenceProperties = {};
4105         externalFenceProperties.sType = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES;
4106 
4107         VkPhysicalDeviceExternalFenceInfo externalFenceInfo = {};
4108         externalFenceInfo.sType      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO;
4109         externalFenceInfo.handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
4110 
4111         vkGetPhysicalDeviceExternalFencePropertiesKHR(mPhysicalDevice, &externalFenceInfo,
4112                                                       &externalFenceProperties);
4113 
4114         VkExternalSemaphoreProperties externalSemaphoreProperties = {};
4115         externalSemaphoreProperties.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
4116 
4117         VkPhysicalDeviceExternalSemaphoreInfo externalSemaphoreInfo = {};
4118         externalSemaphoreInfo.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
4119         externalSemaphoreInfo.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR;
4120 
4121         vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(mPhysicalDevice, &externalSemaphoreInfo,
4122                                                           &externalSemaphoreProperties);
4123 
4124         ANGLE_FEATURE_CONDITION(
4125             &mFeatures, supportsAndroidNativeFenceSync,
4126             (mFeatures.supportsExternalFenceFd.enabled &&
4127              FencePropertiesCompatibleWithAndroid(externalFenceProperties) &&
4128              mFeatures.supportsExternalSemaphoreFd.enabled &&
4129              SemaphorePropertiesCompatibleWithAndroid(externalSemaphoreProperties)));
4130     }
4131     else
4132     {
4133         ANGLE_FEATURE_CONDITION(&mFeatures, supportsAndroidNativeFenceSync,
4134                                 (mFeatures.supportsExternalFenceFd.enabled &&
4135                                  mFeatures.supportsExternalSemaphoreFd.enabled));
4136     }
4137 #endif  // defined(ANGLE_PLATFORM_ANDROID) || defined(ANGLE_PLATFORM_LINUX)
4138 
4139     ANGLE_FEATURE_CONDITION(
4140         &mFeatures, supportsShaderStencilExport,
4141         ExtensionFound(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, deviceExtensionNames));
4142 
4143     ANGLE_FEATURE_CONDITION(
4144         &mFeatures, supportsRenderPassLoadStoreOpNone,
4145         ExtensionFound(VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME, deviceExtensionNames));
4146 
4147     ANGLE_FEATURE_CONDITION(&mFeatures, disallowMixedDepthStencilLoadOpNoneAndLoad,
4148                             isARM && armDriverVersion < ARMDriverVersion(38, 1, 0));
4149 
4150     ANGLE_FEATURE_CONDITION(
4151         &mFeatures, supportsRenderPassStoreOpNone,
4152         !mFeatures.supportsRenderPassLoadStoreOpNone.enabled &&
4153             ExtensionFound(VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME, deviceExtensionNames));
4154 
4155     ANGLE_FEATURE_CONDITION(&mFeatures, supportsDepthClipControl,
4156                             mDepthClipControlFeatures.depthClipControl == VK_TRUE);
4157 
4158     ANGLE_FEATURE_CONDITION(&mFeatures, supportsPrimitivesGeneratedQuery,
4159                             mPrimitivesGeneratedQueryFeatures.primitivesGeneratedQuery == VK_TRUE);
4160 
4161     ANGLE_FEATURE_CONDITION(
4162         &mFeatures, supportsPrimitiveTopologyListRestart,
4163         mPrimitiveTopologyListRestartFeatures.primitiveTopologyListRestart == VK_TRUE);
4164 
4165     ANGLE_FEATURE_CONDITION(
4166         &mFeatures, supportsBlendOperationAdvanced,
4167         ExtensionFound(VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME, deviceExtensionNames));
4168 
4169     ANGLE_FEATURE_CONDITION(&mFeatures, supportsTransformFeedbackExtension,
4170                             mTransformFeedbackFeatures.transformFeedback == VK_TRUE);
4171 
4172     ANGLE_FEATURE_CONDITION(&mFeatures, supportsGeometryStreamsCapability,
4173                             mTransformFeedbackFeatures.geometryStreams == VK_TRUE);
4174 
4175     ANGLE_FEATURE_CONDITION(&mFeatures, supportsIndexTypeUint8,
4176                             mIndexTypeUint8Features.indexTypeUint8 == VK_TRUE);
4177 
4178     ANGLE_FEATURE_CONDITION(&mFeatures, supportsDepthStencilResolve,
4179                             mFeatures.supportsRenderpass2.enabled &&
4180                                 mDepthStencilResolveProperties.supportedDepthResolveModes != 0);
4181 
4182     ANGLE_FEATURE_CONDITION(
4183         &mFeatures, supportsMultisampledRenderToSingleSampled,
4184         mFeatures.supportsRenderpass2.enabled && mFeatures.supportsDepthStencilResolve.enabled &&
4185             mMultisampledRenderToSingleSampledFeatures.multisampledRenderToSingleSampled ==
4186                 VK_TRUE);
4187 
4188     ANGLE_FEATURE_CONDITION(
4189         &mFeatures, supportsMultisampledRenderToSingleSampledGOOGLEX,
4190         !mFeatures.supportsMultisampledRenderToSingleSampled.enabled &&
4191             mFeatures.supportsRenderpass2.enabled &&
4192             mFeatures.supportsDepthStencilResolve.enabled &&
4193             mMultisampledRenderToSingleSampledFeaturesGOOGLEX.multisampledRenderToSingleSampled ==
4194                 VK_TRUE);
4195 
4196     ANGLE_FEATURE_CONDITION(&mFeatures, supportsImage2dViewOf3d,
4197                             mImage2dViewOf3dFeatures.image2DViewOf3D == VK_TRUE);
4198 
4199     // Note: sampler2DViewOf3D is only useful for supporting EGL_KHR_gl_texture_3D_image.  If the
4200     // VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT added to 3D images measurable hurts sampling
4201     // performance, it might be better to remove support for EGL_KHR_gl_texture_3D_image in favor of
4202     // faster 3D images.
4203     ANGLE_FEATURE_CONDITION(&mFeatures, supportsSampler2dViewOf3d,
4204                             mFeatures.supportsImage2dViewOf3d.enabled &&
4205                                 mImage2dViewOf3dFeatures.sampler2DViewOf3D == VK_TRUE);
4206 
4207     ANGLE_FEATURE_CONDITION(&mFeatures, supportsMultiview, mMultiviewFeatures.multiview == VK_TRUE);
4208 
4209     ANGLE_FEATURE_CONDITION(&mFeatures, emulateTransformFeedback,
4210                             (!mFeatures.supportsTransformFeedbackExtension.enabled &&
4211                              mPhysicalDeviceFeatures.vertexPipelineStoresAndAtomics == VK_TRUE));
4212 
4213     // TODO: http://anglebug.com/5927 - drop dependency on customBorderColorWithoutFormat.
4214     ANGLE_FEATURE_CONDITION(
4215         &mFeatures, supportsCustomBorderColor,
4216         mCustomBorderColorFeatures.customBorderColors == VK_TRUE &&
4217             mCustomBorderColorFeatures.customBorderColorWithoutFormat == VK_TRUE);
4218 
4219     ANGLE_FEATURE_CONDITION(&mFeatures, supportsMultiDrawIndirect,
4220                             mPhysicalDeviceFeatures.multiDrawIndirect == VK_TRUE);
4221 
4222     ANGLE_FEATURE_CONDITION(&mFeatures, perFrameWindowSizeQuery,
4223                             IsAndroid() || isIntel || (IsWindows() && isAMD) || IsFuchsia() ||
4224                                 isSamsung || displayVk->isWayland());
4225 
4226     ANGLE_FEATURE_CONDITION(&mFeatures, padBuffersToMaxVertexAttribStride, isAMD || isSamsung);
4227     mMaxVertexAttribStride = std::min(static_cast<uint32_t>(gl::limits::kMaxVertexAttribStride),
4228                                       mPhysicalDeviceProperties.limits.maxVertexInputBindingStride);
4229 
4230     ANGLE_FEATURE_CONDITION(&mFeatures, forceD16TexFilter, IsAndroid() && isQualcommProprietary);
4231 
4232     ANGLE_FEATURE_CONDITION(&mFeatures, disableFlippingBlitWithCommand,
4233                             IsAndroid() && isQualcommProprietary);
4234 
4235     // Allocation sanitization disabled by default because of a heaveyweight implementation
4236     // that can cause OOM and timeouts.
4237     ANGLE_FEATURE_CONDITION(&mFeatures, allocateNonZeroMemory, false);
4238 
4239     // ARM does buffer copy on geometry pipeline, which may create a GPU pipeline bubble that
4240     // prevents vertex shader to overlap with fragment shader on job manager based architecture. For
4241     // now we always choose CPU to do copy on ARM job manager based GPU.
4242     ANGLE_FEATURE_CONDITION(&mFeatures, preferCPUForBufferSubData, isMaliJobManagerBasedGPU);
4243 
4244     // On android, we usually are GPU limited, we try to use CPU to do data copy when other
4245     // conditions are the same. Set to zero will use GPU to do copy. This is subject to further
4246     // tuning for each platform https://issuetracker.google.com/201826021
4247     mMaxCopyBytesUsingCPUWhenPreservingBufferData =
4248         IsAndroid() ? std::numeric_limits<uint32_t>::max() : 0;
4249 
4250     ANGLE_FEATURE_CONDITION(&mFeatures, persistentlyMappedBuffers, true);
4251 
4252     ANGLE_FEATURE_CONDITION(&mFeatures, logMemoryReportCallbacks, false);
4253     ANGLE_FEATURE_CONDITION(&mFeatures, logMemoryReportStats, false);
4254 
4255     ANGLE_FEATURE_CONDITION(
4256         &mFeatures, supportsExternalMemoryDmaBufAndModifiers,
4257         ExtensionFound(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, deviceExtensionNames) &&
4258             ExtensionFound(VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, deviceExtensionNames));
4259 
4260     // Android pre-rotation support can be disabled.
4261     ANGLE_FEATURE_CONDITION(&mFeatures, enablePreRotateSurfaces,
4262                             IsAndroid() && supportsNegativeViewport);
4263 
4264     // http://anglebug.com/3078
4265     ANGLE_FEATURE_CONDITION(
4266         &mFeatures, enablePrecisionQualifiers,
4267         !(IsPixel2(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID) &&
4268           (mPhysicalDeviceProperties.driverVersion < kPixel2DriverWithRelaxedPrecision)) &&
4269             !IsPixel4(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID));
4270 
4271     // http://anglebug.com/7488
4272     ANGLE_FEATURE_CONDITION(&mFeatures, varyingsRequireMatchingPrecisionInSpirv, isPowerVR);
4273 
4274     // IMR devices are less sensitive to the src/dst stage masks in barriers, and behave more
4275     // efficiently when all barriers are aggregated, rather than individually and precisely
4276     // specified.
4277     ANGLE_FEATURE_CONDITION(&mFeatures, preferAggregateBarrierCalls, isImmediateModeRenderer);
4278 
4279     // For IMR devices, it's more efficient to ignore invalidate of framebuffer attachments with
4280     // emulated formats that have extra channels.  For TBR devices, the invalidate will be followed
4281     // by a clear to retain valid values in said extra channels.
4282     ANGLE_FEATURE_CONDITION(&mFeatures, preferSkippingInvalidateForEmulatedFormats,
4283                             isImmediateModeRenderer);
4284 
4285     // Currently disabled by default: http://anglebug.com/4324
4286     ANGLE_FEATURE_CONDITION(&mFeatures, asyncCommandQueue, false);
4287 
4288     ANGLE_FEATURE_CONDITION(&mFeatures, asyncCommandBufferReset, true);
4289 
4290     ANGLE_FEATURE_CONDITION(&mFeatures, supportsYUVSamplerConversion,
4291                             mSamplerYcbcrConversionFeatures.samplerYcbcrConversion != VK_FALSE);
4292 
4293     ANGLE_FEATURE_CONDITION(&mFeatures, supportsShaderFloat16,
4294                             mShaderFloat16Int8Features.shaderFloat16 == VK_TRUE);
4295 
4296     // Prefer driver uniforms over specialization constants in the following:
4297     //
4298     // - Older Qualcomm drivers where specialization constants severly degrade the performance of
4299     //   pipeline creation.  http://issuetracker.google.com/173636783
4300     // - ARM hardware
4301     // - Imagination hardware
4302     // - SwiftShader
4303     //
4304     ANGLE_FEATURE_CONDITION(
4305         &mFeatures, preferDriverUniformOverSpecConst,
4306         (isQualcommProprietary &&
4307          mPhysicalDeviceProperties.driverVersion < kPixel4DriverWithWorkingSpecConstSupport) ||
4308             isARM || isPowerVR || isSwiftShader);
4309 
4310     // The compute shader used to generate mipmaps needs -
4311     // 1. subgroup quad operations in compute shader stage.
4312     // 2. subgroup operations that can use extended types.
4313     // 3. 256-wide workgroup.
4314     //
4315     // Furthermore, VK_IMAGE_USAGE_STORAGE_BIT is detrimental to performance on many platforms, on
4316     // which this path is not enabled.  Platforms that are known to have better performance with
4317     // this path are:
4318     //
4319     // - AMD
4320     // - Nvidia
4321     // - Samsung
4322     //
4323     // Additionally, this path is disabled on buggy drivers:
4324     //
4325     // - AMD/Windows: Unfortunately the trybots use ancient AMD cards and drivers.
4326     const bool supportsSubgroupQuadOpsInComputeShader =
4327         (mSubgroupProperties.supportedStages & VK_SHADER_STAGE_COMPUTE_BIT) &&
4328         (mSubgroupProperties.supportedOperations & VK_SUBGROUP_FEATURE_QUAD_BIT);
4329 
4330     const uint32_t maxComputeWorkGroupInvocations =
4331         mPhysicalDeviceProperties.limits.maxComputeWorkGroupInvocations;
4332 
4333     ANGLE_FEATURE_CONDITION(&mFeatures, allowGenerateMipmapWithCompute,
4334                             supportsSubgroupQuadOpsInComputeShader &&
4335                                 mSubgroupExtendedTypesFeatures.shaderSubgroupExtendedTypes &&
4336                                 maxComputeWorkGroupInvocations >= 256 &&
4337                                 ((isAMD && !IsWindows()) || isNvidia || isSamsung));
4338 
4339     bool isAdreno540 = mPhysicalDeviceProperties.deviceID == angle::kDeviceID_Adreno540;
4340     ANGLE_FEATURE_CONDITION(&mFeatures, forceMaxUniformBufferSize16KB,
4341                             isQualcommProprietary && isAdreno540);
4342 
4343     ANGLE_FEATURE_CONDITION(
4344         &mFeatures, supportsImageFormatList,
4345         ExtensionFound(VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME, deviceExtensionNames));
4346 
4347     ANGLE_FEATURE_CONDITION(
4348         &mFeatures, supportsSamplerMirrorClampToEdge,
4349         ExtensionFound(VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME, deviceExtensionNames));
4350 
4351     // Emulation of GL_EXT_multisampled_render_to_texture is only really useful on tiling hardware,
4352     // but is exposed on any configuration deployed on Android, such as Samsung's AMD-based GPU.
4353     //
4354     // During testing, it was also discovered that emulation triggers bugs on some platforms:
4355     //
4356     // - Swiftshader:
4357     //   * Failure on mac: http://anglebug.com/4937
4358     //   * OOM: http://crbug.com/1263046
4359     // - Intel on windows: http://anglebug.com/5032
4360     // - AMD on windows: http://crbug.com/1132366
4361     //
4362     const bool supportsIndependentDepthStencilResolve =
4363         mFeatures.supportsDepthStencilResolve.enabled &&
4364         mDepthStencilResolveProperties.independentResolveNone == VK_TRUE;
4365     ANGLE_FEATURE_CONDITION(
4366         &mFeatures, enableMultisampledRenderToTexture,
4367         mFeatures.supportsMultisampledRenderToSingleSampled.enabled ||
4368             mFeatures.supportsMultisampledRenderToSingleSampledGOOGLEX.enabled ||
4369             (supportsIndependentDepthStencilResolve && (isTileBasedRenderer || isSamsung)));
4370 
4371     // Currently we enable cube map arrays based on the imageCubeArray Vk feature.
4372     // TODO: Check device caps for full cube map array support. http://anglebug.com/5143
4373     ANGLE_FEATURE_CONDITION(&mFeatures, supportsImageCubeArray,
4374                             mPhysicalDeviceFeatures.imageCubeArray == VK_TRUE);
4375 
4376     ANGLE_FEATURE_CONDITION(&mFeatures, supportsPipelineStatisticsQuery,
4377                             mPhysicalDeviceFeatures.pipelineStatisticsQuery == VK_TRUE);
4378 
4379     // Defer glFLush call causes manhattan 3.0 perf regression. Let Qualcomm driver opt out from
4380     // this optimization.
4381     ANGLE_FEATURE_CONDITION(&mFeatures, deferFlushUntilEndRenderPass, !isQualcommProprietary);
4382 
4383     // Android mistakenly destroys the old swapchain when creating a new one.
4384     ANGLE_FEATURE_CONDITION(&mFeatures, waitIdleBeforeSwapchainRecreation, IsAndroid() && isARM);
4385 
4386     // vkCmdClearAttachments races with draw calls on Qualcomm hardware as observed on Pixel2 and
4387     // Pixel4.  https://issuetracker.google.com/issues/166809097
4388     ANGLE_FEATURE_CONDITION(&mFeatures, preferDrawClearOverVkCmdClearAttachments,
4389                             isQualcommProprietary);
4390 
4391     // r32f image emulation is done unconditionally so VK_FORMAT_FEATURE_STORAGE_*_ATOMIC_BIT is not
4392     // required.
4393     ANGLE_FEATURE_CONDITION(&mFeatures, emulateR32fImageAtomicExchange, true);
4394 
4395     // Negative viewports are exposed in the Maintenance1 extension and in core Vulkan 1.1+.
4396     ANGLE_FEATURE_CONDITION(&mFeatures, supportsNegativeViewport, supportsNegativeViewport);
4397 
4398     // Whether non-conformant configurations and extensions should be exposed. Always disable for
4399     // MESA Virtio-GPU Venus driver for production purpose.
4400     ANGLE_FEATURE_CONDITION(&mFeatures, exposeNonConformantExtensionsAndVersions,
4401                             kExposeNonConformantExtensionsAndVersions && !isVenus);
4402 
4403     ANGLE_FEATURE_CONDITION(
4404         &mFeatures, supportsMemoryBudget,
4405         ExtensionFound(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME, deviceExtensionNames));
4406 
4407     // Disabled by default. Only enable it for experimental purpose, as this will cause various
4408     // tests to fail.
4409     ANGLE_FEATURE_CONDITION(&mFeatures, forceFragmentShaderPrecisionHighpToMediump, false);
4410 
4411     // Testing shows that on ARM GPU, doing implicit flush at framebuffer boundary improves
4412     // performance. Most app traces shows frame time reduced and manhattan 3.1 offscreen score
4413     // improves 7%. Disable for MESA Virtio-GPU Venus driver in virtualized environment where
4414     // batching is preferred.
4415     ANGLE_FEATURE_CONDITION(&mFeatures, preferSubmitAtFBOBoundary,
4416                             (isARM || isSwiftShader) && !isVenus);
4417 
4418     // In order to support immutable samplers tied to external formats, we need to overallocate
4419     // descriptor counts for such immutable samplers
4420     ANGLE_FEATURE_CONDITION(&mFeatures, useMultipleDescriptorsForExternalFormats, true);
4421 
4422     // http://anglebug.com/6651
4423     // When creating a surface with the format GL_RGB8, override the format to be GL_RGBA8, since
4424     // Android prevents creating swapchain images with VK_FORMAT_R8G8B8_UNORM.
4425     // Do this for all platforms, since few (none?) IHVs support 24-bit formats with their HW
4426     // natively anyway.
4427     ANGLE_FEATURE_CONDITION(&mFeatures, overrideSurfaceFormatRGB8ToRGBA8, true);
4428 
4429     // We set
4430     //
4431     // - VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT
4432     // - VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT
4433     //
4434     // when this feature is supported and there is framebuffer fetch.  But the
4435     // check for framebuffer fetch is not accurate enough and those bits can
4436     // have great impact on Qualcomm (it only affects the open source driver
4437     // because the proprietary driver does not expose the extension).  Let's
4438     // disable it on Qualcomm.
4439     //
4440     // https://issuetracker.google.com/issues/255837430
4441     ANGLE_FEATURE_CONDITION(
4442         &mFeatures, supportsRasterizationOrderAttachmentAccess,
4443         !isQualcomm &&
4444             mRasterizationOrderAttachmentAccessFeatures.rasterizationOrderColorAttachmentAccess ==
4445                 VK_TRUE);
4446 
4447     // The VK_EXT_surface_maintenance1 and VK_EXT_swapchain_maintenance1 extensions are used for a
4448     // variety of improvements:
4449     //
4450     // - Recycling present semaphores
4451     // - Avoiding swapchain recreation when present modes change
4452     // - Amortizing the cost of memory allocation for swapchain creation over multiple frames
4453     //
4454     ANGLE_FEATURE_CONDITION(&mFeatures, supportsSwapchainMaintenance1,
4455                             mSwapchainMaintenance1Features.swapchainMaintenance1 == VK_TRUE);
4456 
4457     // The VK_EXT_legacy_dithering extension enables dithering support without emulation
4458     // Disable the usage of VK_EXT_legacy_dithering on ARM until the driver bug
4459     // http://issuetracker.google.com/293136916, http://issuetracker.google.com/292282210 are fixed.
4460     ANGLE_FEATURE_CONDITION(&mFeatures, supportsLegacyDithering,
4461                             mDitheringFeatures.legacyDithering == VK_TRUE && !isARM);
4462 
4463     // Applications on Android have come to rely on hardware dithering, and visually regress without
4464     // it.  On desktop GPUs, OpenGL's dithering is a no-op.  The following setting mimics that
4465     // behavior.  Dithering is also currently not enabled on SwiftShader, but can be as needed
4466     // (which would require Chromium and Capture/Replay test expectations updates).
4467     ANGLE_FEATURE_CONDITION(&mFeatures, emulateDithering,
4468                             IsAndroid() && !mFeatures.supportsLegacyDithering.enabled);
4469 
4470     // http://anglebug.com/6872
4471     // On ARM hardware, framebuffer-fetch-like behavior on Vulkan is already coherent, so we can
4472     // expose the coherent version of the GL extension despite unofficial Vulkan support.
4473     ANGLE_FEATURE_CONDITION(
4474         &mFeatures, supportsShaderFramebufferFetch,
4475         (IsAndroid() && isARM) || mFeatures.supportsRasterizationOrderAttachmentAccess.enabled);
4476 
4477     // Important games are not checking supported extensions properly, and are confusing the
4478     // GL_EXT_shader_framebuffer_fetch_non_coherent as the GL_EXT_shader_framebuffer_fetch
4479     // extension.  Therefore, don't enable the extension on Arm and Qualcomm by default.
4480     // https://issuetracker.google.com/issues/186643966
4481     ANGLE_FEATURE_CONDITION(&mFeatures, supportsShaderFramebufferFetchNonCoherent,
4482                             (IsAndroid() && !(isARM || isQualcomm)) || isSwiftShader);
4483 
4484     // On tile-based renderers, breaking the render pass is costly.  Changing into and out of
4485     // framebuffer fetch causes the render pass to break so that the layout of the color attachments
4486     // can be adjusted.  On such hardware, the switch to framebuffer fetch mode is made permanent so
4487     // such render pass breaks don't happen.
4488     ANGLE_FEATURE_CONDITION(&mFeatures, permanentlySwitchToFramebufferFetchMode,
4489                             isTileBasedRenderer);
4490 
4491     // Support EGL_KHR_lock_surface3 extension.
4492     ANGLE_FEATURE_CONDITION(&mFeatures, supportsLockSurfaceExtension, IsAndroid());
4493 
4494     // http://anglebug.com/6878
4495     // Android needs swapbuffers to update image and present to display.
4496     ANGLE_FEATURE_CONDITION(&mFeatures, swapbuffersOnFlushOrFinishWithSingleBuffer, IsAndroid());
4497 
4498     // Workaround a Qualcomm imprecision with dithering
4499     ANGLE_FEATURE_CONDITION(&mFeatures, roundOutputAfterDithering, isQualcomm);
4500 
4501     // GL_KHR_blend_equation_advanced is emulated when the equivalent Vulkan extension is not
4502     // usable.  Additionally, the following platforms don't support INPUT_ATTACHMENT usage for the
4503     // swapchain, so they are excluded:
4504     //
4505     // - Intel
4506     //
4507     // The above platforms are not excluded if behind MESA Virtio-GPU Venus driver since WSI is
4508     // implemented with external memory there.
4509     //
4510     // Without VK_GOOGLE_surfaceless_query, there is no way to automatically deduce this support.
4511     ANGLE_FEATURE_CONDITION(
4512         &mFeatures, emulateAdvancedBlendEquations,
4513         !mFeatures.supportsBlendOperationAdvanced.enabled && (isVenus || !isIntel));
4514 
4515     // http://anglebug.com/6933
4516     // Android expects VkPresentRegionsKHR rectangles with a bottom-left origin, while spec
4517     // states they should have a top-left origin.
4518     ANGLE_FEATURE_CONDITION(&mFeatures, bottomLeftOriginPresentRegionRectangles, IsAndroid());
4519 
4520     // Use VMA for image suballocation.
4521     ANGLE_FEATURE_CONDITION(&mFeatures, useVmaForImageSuballocation, true);
4522 
4523     // Retain debug info in SPIR-V blob.
4524     ANGLE_FEATURE_CONDITION(&mFeatures, retainSPIRVDebugInfo, getEnableValidationLayers());
4525 
4526     // For discrete GPUs, most of device local memory is host invisible. We should not force the
4527     // host visible flag for them and result in allocation failure.
4528     ANGLE_FEATURE_CONDITION(
4529         &mFeatures, preferDeviceLocalMemoryHostVisible,
4530         canPreferDeviceLocalMemoryHostVisible(mPhysicalDeviceProperties.deviceType));
4531 
4532     bool dynamicStateWorks = true;
4533     if (isARM)
4534     {
4535         // Multiple dynamic state issues on ARM have been fixed.
4536         // http://issuetracker.google.com/285124778
4537         // http://issuetracker.google.com/285196249
4538         // http://issuetracker.google.com/286224923
4539         // http://issuetracker.google.com/287318431
4540 
4541         // Use it on drivers/devices known to work.
4542         if (isPixel)
4543         {
4544             // Pixel devices are working after r44
4545             dynamicStateWorks = armDriverVersion >= ARMDriverVersion(44, 0, 0);
4546         }
4547         else
4548         {
4549             // Others should work after r44p1
4550             dynamicStateWorks = armDriverVersion >= ARMDriverVersion(44, 1, 0);
4551         }
4552     }
4553 
4554     ANGLE_FEATURE_CONDITION(
4555         &mFeatures, supportsExtendedDynamicState,
4556         mExtendedDynamicStateFeatures.extendedDynamicState == VK_TRUE && dynamicStateWorks);
4557 
4558     ANGLE_FEATURE_CONDITION(&mFeatures, useVertexInputBindingStrideDynamicState,
4559                             mFeatures.supportsExtendedDynamicState.enabled && dynamicStateWorks);
4560     ANGLE_FEATURE_CONDITION(&mFeatures, useCullModeDynamicState,
4561                             mFeatures.supportsExtendedDynamicState.enabled && dynamicStateWorks);
4562     ANGLE_FEATURE_CONDITION(&mFeatures, useDepthCompareOpDynamicState,
4563                             mFeatures.supportsExtendedDynamicState.enabled);
4564     ANGLE_FEATURE_CONDITION(&mFeatures, useDepthTestEnableDynamicState,
4565                             mFeatures.supportsExtendedDynamicState.enabled);
4566     ANGLE_FEATURE_CONDITION(&mFeatures, useDepthWriteEnableDynamicState,
4567                             mFeatures.supportsExtendedDynamicState.enabled && dynamicStateWorks);
4568     ANGLE_FEATURE_CONDITION(&mFeatures, useFrontFaceDynamicState,
4569                             mFeatures.supportsExtendedDynamicState.enabled);
4570     ANGLE_FEATURE_CONDITION(&mFeatures, useStencilOpDynamicState,
4571                             mFeatures.supportsExtendedDynamicState.enabled);
4572     ANGLE_FEATURE_CONDITION(&mFeatures, useStencilTestEnableDynamicState,
4573                             mFeatures.supportsExtendedDynamicState.enabled);
4574 
4575     ANGLE_FEATURE_CONDITION(
4576         &mFeatures, supportsExtendedDynamicState2,
4577         mExtendedDynamicState2Features.extendedDynamicState2 == VK_TRUE && dynamicStateWorks);
4578 
4579     ANGLE_FEATURE_CONDITION(&mFeatures, usePrimitiveRestartEnableDynamicState,
4580                             mFeatures.supportsExtendedDynamicState2.enabled && dynamicStateWorks);
4581     ANGLE_FEATURE_CONDITION(&mFeatures, useRasterizerDiscardEnableDynamicState,
4582                             mFeatures.supportsExtendedDynamicState2.enabled);
4583     ANGLE_FEATURE_CONDITION(&mFeatures, useDepthBiasEnableDynamicState,
4584                             mFeatures.supportsExtendedDynamicState2.enabled);
4585 
4586     // Disabled on Intel/Mesa due to driver bug (crbug.com/1379201).  This bug is fixed since Mesa
4587     // 22.2.0.
4588     const bool isMesaLessThan22_2 =
4589         mesaVersion.major < 22 || (mesaVersion.major == 22 && mesaVersion.minor < 2);
4590 
4591     ANGLE_FEATURE_CONDITION(
4592         &mFeatures, supportsLogicOpDynamicState,
4593         mFeatures.supportsExtendedDynamicState2.enabled &&
4594             mExtendedDynamicState2Features.extendedDynamicState2LogicOp == VK_TRUE &&
4595             !(IsLinux() && isIntel && isMesaLessThan22_2) && !(IsAndroid() && isGalaxyS23));
4596 
4597     // Support GL_QCOM_shading_rate extension
4598     ANGLE_FEATURE_CONDITION(&mFeatures, supportsFragmentShadingRate,
4599                             canSupportFragmentShadingRate(deviceExtensionNames));
4600 
4601     // We can use the interlock to support GL_ANGLE_shader_pixel_local_storage_coherent.
4602     ANGLE_FEATURE_CONDITION(
4603         &mFeatures, supportsFragmentShaderPixelInterlock,
4604         mFragmentShaderInterlockFeatures.fragmentShaderPixelInterlock == VK_TRUE);
4605 
4606     // Samsung Vulkan driver crashes in vkCmdClearAttachments() when imageless Framebuffer
4607     // is used to begin Secondary Command Buffer before the corresponding vkCmdBeginRenderPass().
4608     ANGLE_FEATURE_CONDITION(&mFeatures, supportsImagelessFramebuffer,
4609                             mImagelessFramebufferFeatures.imagelessFramebuffer == VK_TRUE &&
4610                                 (vk::RenderPassCommandBuffer::ExecutesInline() || !isSamsung));
4611 
4612     // The VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT behavior is used by
4613     // ANGLE, which requires the robustBufferAccess feature to be available.
4614     ANGLE_FEATURE_CONDITION(&mFeatures, supportsPipelineRobustness,
4615                             mPipelineRobustnessFeatures.pipelineRobustness == VK_TRUE &&
4616                                 mPhysicalDeviceFeatures.robustBufferAccess);
4617 
4618     ANGLE_FEATURE_CONDITION(&mFeatures, supportsPipelineProtectedAccess,
4619                             mPipelineProtectedAccessFeatures.pipelineProtectedAccess == VK_TRUE &&
4620                                 mProtectedMemoryFeatures.protectedMemory == VK_TRUE);
4621 
4622     // VK_EXT_graphics_pipeline_library is available on NVIDIA drivers earlier
4623     // than version 531, but there are transient visual glitches with rendering
4624     // on those earlier versions.
4625     ANGLE_FEATURE_CONDITION(&mFeatures, supportsGraphicsPipelineLibrary,
4626                             mGraphicsPipelineLibraryFeatures.graphicsPipelineLibrary == VK_TRUE &&
4627                                 (!isNvidia || nvidiaVersion.major >= 531));
4628 
4629     // The following drivers are known to key the pipeline cache blobs with vertex input and
4630     // fragment output state, causing draw-time pipeline creation to miss the cache regardless of
4631     // warmup:
4632     //
4633     // - ARM drivers
4634     // - Imagination drivers
4635     //
4636     // The following drivers are instead known to _not_ include said state, and hit the cache at
4637     // draw time.
4638     //
4639     // - SwiftShader
4640     // - Open source Qualcomm drivers
4641     //
4642     // The situation is unknown for other drivers.
4643     //
4644     // Additionally, numerous tests that previously never created a Vulkan pipeline fail or crash on
4645     // proprietary Qualcomm drivers when they do during cache warm up.  On Intel/Linux, one trace
4646     // shows flakiness with this.
4647     const bool libraryBlobsAreReusedByMonolithicPipelines = !isARM && !isPowerVR;
4648     ANGLE_FEATURE_CONDITION(&mFeatures, warmUpPipelineCacheAtLink,
4649                             libraryBlobsAreReusedByMonolithicPipelines && !isQualcommProprietary &&
4650                                 !(IsLinux() && isIntel) && !(IsChromeOS() && isSwiftShader));
4651 
4652     // On SwiftShader, no data is retrieved from the pipeline cache, so there is no reason to
4653     // serialize it or put it in the blob cache.
4654     // For Windows Nvidia Vulkan driver older than 520, Vulkan pipeline cache will only generate one
4655     // single huge cache for one process shared by all graphics piplines in the same process, which
4656     // can be huge.
4657     const bool nvVersionLessThan520 = isNvidia && (nvidiaVersion.major < 520u);
4658     ANGLE_FEATURE_CONDITION(&mFeatures, hasEffectivePipelineCacheSerialization,
4659                             !isSwiftShader && !nvVersionLessThan520);
4660 
4661     // When the driver sets graphicsPipelineLibraryFastLinking, it means that monolithic pipelines
4662     // are just a bundle of the libraries, and that there is no benefit in creating monolithic
4663     // pipelines.
4664     //
4665     // Note: for testing purposes, this is enabled on SwiftShader despite the fact that it doesn't
4666     // need it.  This should be undone once there is at least one bot that supports
4667     // VK_EXT_graphics_pipeline_library without graphicsPipelineLibraryFastLinking
4668     ANGLE_FEATURE_CONDITION(
4669         &mFeatures, preferMonolithicPipelinesOverLibraries,
4670         !mGraphicsPipelineLibraryProperties.graphicsPipelineLibraryFastLinking || isSwiftShader);
4671 
4672     // Whether the pipeline caches should merge into the global pipeline cache.  This should only be
4673     // enabled on platforms if:
4674     //
4675     // - VK_EXT_graphics_pipeline_library is not supported.  In that case, only the program's cache
4676     //   used during warm up is merged into the global cache for later monolithic pipeline creation.
4677     // - VK_EXT_graphics_pipeline_library is supported, monolithic pipelines are preferred, and the
4678     //   driver is able to reuse blobs from partial pipelines when creating monolithic pipelines.
4679     ANGLE_FEATURE_CONDITION(&mFeatures, mergeProgramPipelineCachesToGlobalCache,
4680                             !mFeatures.supportsGraphicsPipelineLibrary.enabled ||
4681                                 (mFeatures.preferMonolithicPipelinesOverLibraries.enabled &&
4682                                  libraryBlobsAreReusedByMonolithicPipelines));
4683 
4684     ANGLE_FEATURE_CONDITION(&mFeatures, enableAsyncPipelineCacheCompression, true);
4685 
4686     // Sync monolithic pipelines to the blob cache occasionally on platforms that would benefit from
4687     // it:
4688     //
4689     // - VK_EXT_graphics_pipeline_library is not supported, and the program cache is not warmed up:
4690     //   If the pipeline cache is being warmed up at link time, the blobs corresponding to each
4691     //   program is individually retrieved and stored in the blob cache already.
4692     // - VK_EXT_graphics_pipeline_library is supported, but monolithic pipelines are still prefered,
4693     //   and the cost of syncing the large cache is acceptable.
4694     //
4695     // Otherwise monolithic pipelines are recreated on every run.
4696     const bool hasNoPipelineWarmUp = !mFeatures.supportsGraphicsPipelineLibrary.enabled &&
4697                                      !mFeatures.warmUpPipelineCacheAtLink.enabled;
4698     const bool canSyncLargeMonolithicCache =
4699         mFeatures.supportsGraphicsPipelineLibrary.enabled &&
4700         mFeatures.preferMonolithicPipelinesOverLibraries.enabled &&
4701         (!IsAndroid() || mFeatures.enableAsyncPipelineCacheCompression.enabled);
4702     ANGLE_FEATURE_CONDITION(&mFeatures, syncMonolithicPipelinesToBlobCache,
4703                             mFeatures.hasEffectivePipelineCacheSerialization.enabled &&
4704                                 (hasNoPipelineWarmUp || canSyncLargeMonolithicCache));
4705 
4706     // On ARM, dynamic state for stencil write mask doesn't work correctly in the presence of
4707     // discard or alpha to coverage, if the static state provided when creating the pipeline has a
4708     // value of 0.
4709     ANGLE_FEATURE_CONDITION(&mFeatures, useNonZeroStencilWriteMaskStaticState,
4710                             isARM && armDriverVersion < ARMDriverVersion(43, 0, 0));
4711 
4712     // On ARM, per-sample shading is not enabled despite the presence of a Sample decoration.  As a
4713     // workaround, per-sample shading is inferred by ANGLE and explicitly enabled by the API.
4714     ANGLE_FEATURE_CONDITION(&mFeatures, explicitlyEnablePerSampleShading, isARM);
4715 
4716     ANGLE_FEATURE_CONDITION(&mFeatures, explicitlyCastMediumpFloatTo16Bit, isARM && !isVenus);
4717 
4718     // Force to create swapchain with continuous refresh on shared present. Disabled by default.
4719     // Only enable it on integrations without EGL_FRONT_BUFFER_AUTO_REFRESH_ANDROID passthrough.
4720     ANGLE_FEATURE_CONDITION(&mFeatures, forceContinuousRefreshOnSharedPresent, false);
4721 
4722     // Enable setting frame timestamp surface attribute on Android platform.
4723     // Frame timestamp is enabled by calling into "vkGetPastPresentationTimingGOOGLE"
4724     // which, on Android platforms, makes the necessary ANativeWindow API calls.
4725     ANGLE_FEATURE_CONDITION(&mFeatures, supportsTimestampSurfaceAttribute,
4726                             IsAndroid() && ExtensionFound(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME,
4727                                                           deviceExtensionNames));
4728 
4729     // 1) host vk driver does not natively support ETC format.
4730     // 2) host vk driver supports BC format.
4731     // 3) host vk driver supports subgroup instructions: clustered, shuffle.
4732     //    * This limitation can be removed if necessary.
4733     // 4) host vk driver has maxTexelBufferSize >= 64M.
4734     //    * Usually on desktop device the limit is more than 128M. we may switch to dynamic
4735     //    decide cpu or gpu upload texture based on texture size.
4736     constexpr VkSubgroupFeatureFlags kRequiredSubgroupOp =
4737         VK_SUBGROUP_FEATURE_SHUFFLE_BIT | VK_SUBGROUP_FEATURE_CLUSTERED_BIT;
4738     static constexpr bool kSupportTranscodeEtcToBc = false;
4739     static constexpr uint32_t kMaxTexelBufferSize  = 64 * 1024 * 1024;
4740     const VkPhysicalDeviceLimits &limitsVk         = mPhysicalDeviceProperties.limits;
4741     ANGLE_FEATURE_CONDITION(&mFeatures, supportsComputeTranscodeEtcToBc,
4742                             !mPhysicalDeviceFeatures.textureCompressionETC2 &&
4743                                 kSupportTranscodeEtcToBc &&
4744                                 (mSubgroupProperties.supportedOperations & kRequiredSubgroupOp) ==
4745                                     kRequiredSubgroupOp &&
4746                                 (limitsVk.maxTexelBufferElements >= kMaxTexelBufferSize));
4747 
4748     // http://anglebug.com/7308
4749     // Flushing mutable textures causes flakes in perf tests using Windows/Intel GPU. Failures are
4750     // due to lost context/device.
4751     // http://b/278600575
4752     // Flushing mutable texture is disabled for discrete GPUs to mitigate possible VRAM OOM.
4753     ANGLE_FEATURE_CONDITION(
4754         &mFeatures, mutableMipmapTextureUpload,
4755         canPreferDeviceLocalMemoryHostVisible(mPhysicalDeviceProperties.deviceType));
4756 
4757     // Allow passthrough of EGL colorspace attributes on Android platform and for vendors that
4758     // are known to support wide color gamut.
4759     ANGLE_FEATURE_CONDITION(&mFeatures, eglColorspaceAttributePassthrough,
4760                             IsAndroid() && isSamsung);
4761 
4762     // GBM does not have a VkSurface hence it does not support presentation through a Vulkan queue.
4763     ANGLE_FEATURE_CONDITION(&mFeatures, supportsPresentation, !displayVk->isGBM());
4764 
4765     // For tiled renderer, the renderpass query result may not available until the entire renderpass
4766     // is completed. This may cause a bubble in the application thread waiting result to be
4767     // available. When this feature flag is enabled, we will issue an immediate flush when we detect
4768     // there is switch from query enabled draw to query disabled draw. Since most apps uses bunch of
4769     // query back to back, this should only introduce one extra flush per frame.
4770     // https://issuetracker.google.com/250706693
4771     ANGLE_FEATURE_CONDITION(&mFeatures, preferSubmitOnAnySamplesPassedQueryEnd,
4772                             isTileBasedRenderer);
4773 
4774     // ARM driver appears having a bug that if we did not wait for submission to complete, but call
4775     // vkGetQueryPoolResults(VK_QUERY_RESULT_WAIT_BIT), it may result VK_NOT_READY.
4776     // https://issuetracker.google.com/253522366
4777     //
4778     // Workaround for nvidia earlier version driver which appears having a bug that On older nvidia
4779     // driver, vkGetQueryPoolResult() with VK_QUERY_RESULT_WAIT_BIT may result in incorrect result.
4780     // In that case we force into CPU wait for submission to complete. http://anglebug.com/6692
4781     ANGLE_FEATURE_CONDITION(&mFeatures, forceWaitForSubmissionToCompleteForQueryResult,
4782                             isARM || (isNvidia && nvidiaVersion.major < 470u));
4783 
4784     // Some ARM drivers may not free memory in "vkFreeCommandBuffers()" without
4785     // VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT flag.
4786     ANGLE_FEATURE_CONDITION(&mFeatures, useResetCommandBufferBitForSecondaryPools, isARM);
4787 
4788     // Required to pass android.media.cts.DecodeAccuracyTest with MESA Virtio-GPU Venus driver in
4789     // virtualized environment. https://issuetracker.google.com/246378938
4790     ANGLE_FEATURE_CONDITION(&mFeatures, preferLinearFilterForYUV, isVenus);
4791 
4792     // Intel mesa drivers need depthBiasConstantFactor to be doubled to align with GL.
4793     ANGLE_FEATURE_CONDITION(&mFeatures, doubleDepthBiasConstantFactor, isIntel && !IsWindows());
4794 
4795     // Required to pass android.media.codec.cts.EncodeDecodeTest with MESA Virtio-GPU Venus driver
4796     // in virtualized environment. https://issuetracker.google.com/246218584
4797     ANGLE_FEATURE_CONDITION(&mFeatures, mapUnspecifiedColorSpaceToPassThrough, isVenus);
4798 
4799     ANGLE_FEATURE_CONDITION(&mFeatures, enablePipelineCacheDataCompression, true);
4800 
4801     ANGLE_FEATURE_CONDITION(&mFeatures, supportsTimelineSemaphore,
4802                             mTimelineSemaphoreFeatures.timelineSemaphore == VK_TRUE);
4803 
4804     ApplyFeatureOverrides(&mFeatures, displayVk->getState());
4805 
4806     // Disable memory report feature overrides if extension is not supported.
4807     if ((mFeatures.logMemoryReportCallbacks.enabled || mFeatures.logMemoryReportStats.enabled) &&
4808         !mMemoryReportFeatures.deviceMemoryReport)
4809     {
4810         WARN() << "Disabling the following feature(s) because driver does not support "
4811                   "VK_EXT_device_memory_report extension:";
4812         if (getFeatures().logMemoryReportStats.enabled)
4813         {
4814             WARN() << "\tlogMemoryReportStats";
4815             ANGLE_FEATURE_CONDITION(&mFeatures, logMemoryReportStats, false);
4816         }
4817         if (getFeatures().logMemoryReportCallbacks.enabled)
4818         {
4819             WARN() << "\tlogMemoryReportCallbacks";
4820             ANGLE_FEATURE_CONDITION(&mFeatures, logMemoryReportCallbacks, false);
4821         }
4822     }
4823 }
4824 
appBasedFeatureOverrides(DisplayVk * display,const vk::ExtensionNameList & extensions)4825 void RendererVk::appBasedFeatureOverrides(DisplayVk *display,
4826                                           const vk::ExtensionNameList &extensions)
4827 {
4828     // NOOP for now.
4829 }
4830 
initPipelineCache(DisplayVk * display,vk::PipelineCache * pipelineCache,bool * success)4831 angle::Result RendererVk::initPipelineCache(DisplayVk *display,
4832                                             vk::PipelineCache *pipelineCache,
4833                                             bool *success)
4834 {
4835     angle::MemoryBuffer initialData;
4836     ANGLE_TRY(
4837         GetAndDecompressPipelineCacheVk(mPhysicalDeviceProperties, display, &initialData, success));
4838 
4839     VkPipelineCacheCreateInfo pipelineCacheCreateInfo = {};
4840 
4841     pipelineCacheCreateInfo.sType           = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
4842     pipelineCacheCreateInfo.flags           = 0;
4843     pipelineCacheCreateInfo.initialDataSize = *success ? initialData.size() : 0;
4844     pipelineCacheCreateInfo.pInitialData    = *success ? initialData.data() : nullptr;
4845 
4846     if (display->getRenderer()->getFeatures().supportsPipelineCreationCacheControl.enabled)
4847     {
4848         pipelineCacheCreateInfo.flags |= VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT;
4849     }
4850 
4851     ANGLE_VK_TRY(display, pipelineCache->init(mDevice, pipelineCacheCreateInfo));
4852 
4853     return angle::Result::Continue;
4854 }
4855 
getPipelineCache(vk::PipelineCacheAccess * pipelineCacheOut)4856 angle::Result RendererVk::getPipelineCache(vk::PipelineCacheAccess *pipelineCacheOut)
4857 {
4858     DisplayVk *displayVk = vk::GetImpl(mDisplay);
4859 
4860     // Note that ANGLE externally synchronizes the pipeline cache, and uses
4861     // VK_EXT_pipeline_creation_cache_control (where available) to disable internal synchronization.
4862     std::unique_lock<std::mutex> lock(mPipelineCacheMutex);
4863 
4864     if (!mPipelineCacheInitialized)
4865     {
4866         // We should now recreate the pipeline cache with the blob cache pipeline data.
4867         vk::PipelineCache pCache;
4868         bool loadedFromBlobCache = false;
4869         ANGLE_TRY(initPipelineCache(displayVk, &pCache, &loadedFromBlobCache));
4870         if (loadedFromBlobCache)
4871         {
4872             // Merge the newly created pipeline cache into the existing one.
4873             mPipelineCache.merge(mDevice, 1, pCache.ptr());
4874 
4875             ANGLE_TRY(getPipelineCacheSize(displayVk, &mPipelineCacheSizeAtLastSync));
4876         }
4877 
4878         mPipelineCacheInitialized = true;
4879         pCache.destroy(mDevice);
4880     }
4881 
4882     pipelineCacheOut->init(&mPipelineCache, &mPipelineCacheMutex);
4883     return angle::Result::Continue;
4884 }
4885 
mergeIntoPipelineCache(const vk::PipelineCache & pipelineCache)4886 angle::Result RendererVk::mergeIntoPipelineCache(const vk::PipelineCache &pipelineCache)
4887 {
4888     vk::PipelineCacheAccess globalCache;
4889     ANGLE_TRY(getPipelineCache(&globalCache));
4890 
4891     globalCache.merge(this, pipelineCache);
4892 
4893     return angle::Result::Continue;
4894 }
4895 
getNativeCaps() const4896 const gl::Caps &RendererVk::getNativeCaps() const
4897 {
4898     ensureCapsInitialized();
4899     return mNativeCaps;
4900 }
4901 
getNativeTextureCaps() const4902 const gl::TextureCapsMap &RendererVk::getNativeTextureCaps() const
4903 {
4904     ensureCapsInitialized();
4905     return mNativeTextureCaps;
4906 }
4907 
getNativeExtensions() const4908 const gl::Extensions &RendererVk::getNativeExtensions() const
4909 {
4910     ensureCapsInitialized();
4911     return mNativeExtensions;
4912 }
4913 
getNativeLimitations() const4914 const gl::Limitations &RendererVk::getNativeLimitations() const
4915 {
4916     ensureCapsInitialized();
4917     return mNativeLimitations;
4918 }
4919 
getNativePixelLocalStorageOptions() const4920 const ShPixelLocalStorageOptions &RendererVk::getNativePixelLocalStorageOptions() const
4921 {
4922     return mNativePLSOptions;
4923 }
4924 
initializeFrontendFeatures(angle::FrontendFeatures * features) const4925 void RendererVk::initializeFrontendFeatures(angle::FrontendFeatures *features) const
4926 {
4927     const bool isSwiftShader =
4928         IsSwiftshader(mPhysicalDeviceProperties.vendorID, mPhysicalDeviceProperties.deviceID);
4929 
4930     // Hopefully-temporary work-around for a crash on SwiftShader.  An Android process is turning
4931     // off GL error checking, and then asking ANGLE to write past the end of a buffer.
4932     // https://issuetracker.google.com/issues/220069903
4933     ANGLE_FEATURE_CONDITION(features, forceGlErrorChecking, (IsAndroid() && isSwiftShader));
4934 
4935     ANGLE_FEATURE_CONDITION(features, cacheCompiledShader, true);
4936 }
4937 
getPipelineCacheSize(DisplayVk * displayVk,size_t * pipelineCacheSizeOut)4938 angle::Result RendererVk::getPipelineCacheSize(DisplayVk *displayVk, size_t *pipelineCacheSizeOut)
4939 {
4940     VkResult result = mPipelineCache.getCacheData(mDevice, pipelineCacheSizeOut, nullptr);
4941     ANGLE_VK_TRY(displayVk, result);
4942 
4943     return angle::Result::Continue;
4944 }
4945 
syncPipelineCacheVk(DisplayVk * displayVk,const gl::Context * context)4946 angle::Result RendererVk::syncPipelineCacheVk(DisplayVk *displayVk, const gl::Context *context)
4947 {
4948     ASSERT(mPipelineCache.valid());
4949 
4950     if (!mFeatures.syncMonolithicPipelinesToBlobCache.enabled)
4951     {
4952         return angle::Result::Continue;
4953     }
4954 
4955     if (--mPipelineCacheVkUpdateTimeout > 0)
4956     {
4957         return angle::Result::Continue;
4958     }
4959 
4960     mPipelineCacheVkUpdateTimeout = kPipelineCacheVkUpdatePeriod;
4961 
4962     size_t pipelineCacheSize = 0;
4963     ANGLE_TRY(getPipelineCacheSize(displayVk, &pipelineCacheSize));
4964     if (pipelineCacheSize <= mPipelineCacheSizeAtLastSync)
4965     {
4966         return angle::Result::Continue;
4967     }
4968     mPipelineCacheSizeAtLastSync = pipelineCacheSize;
4969 
4970     // Make sure we will receive enough data to hold the pipeline cache header
4971     // Table 7. Layout for pipeline cache header version VK_PIPELINE_CACHE_HEADER_VERSION_ONE
4972     const size_t kPipelineCacheHeaderSize = 16 + VK_UUID_SIZE;
4973     if (pipelineCacheSize < kPipelineCacheHeaderSize)
4974     {
4975         // No pipeline cache data to read, so return
4976         return angle::Result::Continue;
4977     }
4978 
4979     ContextVk *contextVk = vk::GetImpl(context);
4980 
4981     // Use worker thread pool to complete compression.
4982     // If the last task hasn't been finished, skip the syncing.
4983     if (mCompressEvent && !mCompressEvent->isReady())
4984     {
4985         ANGLE_PERF_WARNING(contextVk->getDebug(), GL_DEBUG_SEVERITY_LOW,
4986                            "Skip syncing pipeline cache data when the last task is not ready.");
4987         return angle::Result::Continue;
4988     }
4989 
4990     std::vector<uint8_t> pipelineCacheData(pipelineCacheSize);
4991 
4992     size_t oldPipelineCacheSize = pipelineCacheSize;
4993     VkResult result =
4994         mPipelineCache.getCacheData(mDevice, &pipelineCacheSize, pipelineCacheData.data());
4995     // We don't need all of the cache data, so just make sure we at least got the header
4996     // Vulkan Spec 9.6. Pipeline Cache
4997     // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/chap9.html#pipelines-cache
4998     // If pDataSize is less than what is necessary to store this header, nothing will be written to
4999     // pData and zero will be written to pDataSize.
5000     // Any data written to pData is valid and can be provided as the pInitialData member of the
5001     // VkPipelineCacheCreateInfo structure passed to vkCreatePipelineCache.
5002     if (ANGLE_UNLIKELY(pipelineCacheSize < kPipelineCacheHeaderSize))
5003     {
5004         WARN() << "Not enough pipeline cache data read.";
5005         return angle::Result::Continue;
5006     }
5007     else if (ANGLE_UNLIKELY(result == VK_INCOMPLETE))
5008     {
5009         WARN() << "Received VK_INCOMPLETE: Old: " << oldPipelineCacheSize
5010                << ", New: " << pipelineCacheSize;
5011     }
5012     else
5013     {
5014         ANGLE_VK_TRY(displayVk, result);
5015     }
5016 
5017     // If vkGetPipelineCacheData ends up writing fewer bytes than requested, zero out the rest of
5018     // the buffer to avoid leaking garbage memory.
5019     ASSERT(pipelineCacheSize <= pipelineCacheData.size());
5020     if (pipelineCacheSize < pipelineCacheData.size())
5021     {
5022         memset(pipelineCacheData.data() + pipelineCacheSize, 0,
5023                pipelineCacheData.size() - pipelineCacheSize);
5024     }
5025 
5026     if (mFeatures.enableAsyncPipelineCacheCompression.enabled)
5027     {
5028         // zlib compression ratio normally ranges from 2:1 to 5:1. Set kMaxTotalSize to 64M to
5029         // ensure the size can fit into the 32MB blob cache limit on supported platforms.
5030         constexpr size_t kMaxTotalSize = 64 * 1024 * 1024;
5031 
5032         // Create task to compress.
5033         auto compressAndStorePipelineCacheTask =
5034             std::make_shared<CompressAndStorePipelineCacheTask>(
5035                 displayVk, contextVk, std::move(pipelineCacheData), kMaxTotalSize);
5036         mCompressEvent = std::make_shared<WaitableCompressEventImpl>(
5037             context->getWorkerThreadPool()->postWorkerTask(compressAndStorePipelineCacheTask),
5038             compressAndStorePipelineCacheTask);
5039     }
5040     else
5041     {
5042         // If enableAsyncPipelineCacheCompression is disabled, to avoid the risk, set kMaxTotalSize
5043         // to 64k.
5044         constexpr size_t kMaxTotalSize = 64 * 1024;
5045         CompressAndStorePipelineCacheVk(mPhysicalDeviceProperties, displayVk, contextVk,
5046                                         pipelineCacheData, kMaxTotalSize);
5047     }
5048 
5049     return angle::Result::Continue;
5050 }
5051 
5052 // These functions look at the mandatory format for support, and fallback to querying the device (if
5053 // necessary) to test the availability of the bits.
hasLinearImageFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5054 bool RendererVk::hasLinearImageFormatFeatureBits(angle::FormatID formatID,
5055                                                  const VkFormatFeatureFlags featureBits) const
5056 {
5057     return hasFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(formatID, featureBits);
5058 }
5059 
getLinearImageFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5060 VkFormatFeatureFlags RendererVk::getLinearImageFormatFeatureBits(
5061     angle::FormatID formatID,
5062     const VkFormatFeatureFlags featureBits) const
5063 {
5064     return getFormatFeatureBits<&VkFormatProperties::linearTilingFeatures>(formatID, featureBits);
5065 }
5066 
getImageFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5067 VkFormatFeatureFlags RendererVk::getImageFormatFeatureBits(
5068     angle::FormatID formatID,
5069     const VkFormatFeatureFlags featureBits) const
5070 {
5071     return getFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(formatID, featureBits);
5072 }
5073 
hasImageFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5074 bool RendererVk::hasImageFormatFeatureBits(angle::FormatID formatID,
5075                                            const VkFormatFeatureFlags featureBits) const
5076 {
5077     return hasFormatFeatureBits<&VkFormatProperties::optimalTilingFeatures>(formatID, featureBits);
5078 }
5079 
hasBufferFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5080 bool RendererVk::hasBufferFormatFeatureBits(angle::FormatID formatID,
5081                                             const VkFormatFeatureFlags featureBits) const
5082 {
5083     return hasFormatFeatureBits<&VkFormatProperties::bufferFeatures>(formatID, featureBits);
5084 }
5085 
outputVmaStatString()5086 void RendererVk::outputVmaStatString()
5087 {
5088     // Output the VMA stats string
5089     // This JSON string can be passed to VmaDumpVis.py to generate a visualization of the
5090     // allocations the VMA has performed.
5091     char *statsString;
5092     mAllocator.buildStatsString(&statsString, true);
5093     INFO() << std::endl << statsString << std::endl;
5094     mAllocator.freeStatsString(statsString);
5095 }
5096 
queueSubmitOneOff(vk::Context * context,vk::PrimaryCommandBuffer && primary,vk::ProtectionType protectionType,egl::ContextPriority priority,VkSemaphore waitSemaphore,VkPipelineStageFlags waitSemaphoreStageMasks,vk::SubmitPolicy submitPolicy,QueueSerial * queueSerialOut)5097 angle::Result RendererVk::queueSubmitOneOff(vk::Context *context,
5098                                             vk::PrimaryCommandBuffer &&primary,
5099                                             vk::ProtectionType protectionType,
5100                                             egl::ContextPriority priority,
5101                                             VkSemaphore waitSemaphore,
5102                                             VkPipelineStageFlags waitSemaphoreStageMasks,
5103                                             vk::SubmitPolicy submitPolicy,
5104                                             QueueSerial *queueSerialOut)
5105 {
5106     ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::queueSubmitOneOff");
5107     // Allocate a one off SerialIndex and generate a QueueSerial and then use it and release the
5108     // index.
5109     vk::ScopedQueueSerialIndex index;
5110     ANGLE_TRY(allocateScopedQueueSerialIndex(&index));
5111     QueueSerial submitQueueSerial(index.get(), generateQueueSerial(index.get()));
5112 
5113     if (isAsyncCommandQueueEnabled())
5114     {
5115         ANGLE_TRY(mCommandProcessor.enqueueSubmitOneOffCommands(
5116             context, protectionType, priority, primary.getHandle(), waitSemaphore,
5117             waitSemaphoreStageMasks, submitPolicy, submitQueueSerial));
5118     }
5119     else
5120     {
5121         ANGLE_TRY(mCommandQueue.queueSubmitOneOff(
5122             context, protectionType, priority, primary.getHandle(), waitSemaphore,
5123             waitSemaphoreStageMasks, submitPolicy, submitQueueSerial));
5124     }
5125 
5126     *queueSerialOut = submitQueueSerial;
5127     if (primary.valid())
5128     {
5129         mOneOffCommandPoolMap[protectionType].releaseCommandBuffer(submitQueueSerial,
5130                                                                    std::move(primary));
5131     }
5132 
5133     ANGLE_TRY(mCommandQueue.postSubmitCheck(context));
5134 
5135     return angle::Result::Continue;
5136 }
5137 
queueSubmitWaitSemaphore(vk::Context * context,egl::ContextPriority priority,const vk::Semaphore & waitSemaphore,VkPipelineStageFlags waitSemaphoreStageMasks,QueueSerial submitQueueSerial)5138 angle::Result RendererVk::queueSubmitWaitSemaphore(vk::Context *context,
5139                                                    egl::ContextPriority priority,
5140                                                    const vk::Semaphore &waitSemaphore,
5141                                                    VkPipelineStageFlags waitSemaphoreStageMasks,
5142                                                    QueueSerial submitQueueSerial)
5143 {
5144     if (isAsyncCommandQueueEnabled())
5145     {
5146         ANGLE_TRY(mCommandProcessor.enqueueSubmitOneOffCommands(
5147             context, vk::ProtectionType::Unprotected, priority, VK_NULL_HANDLE,
5148             waitSemaphore.getHandle(), waitSemaphoreStageMasks, vk::SubmitPolicy::AllowDeferred,
5149             submitQueueSerial));
5150     }
5151     else
5152     {
5153         ANGLE_TRY(mCommandQueue.queueSubmitOneOff(
5154             context, vk::ProtectionType::Unprotected, priority, VK_NULL_HANDLE,
5155             waitSemaphore.getHandle(), waitSemaphoreStageMasks, vk::SubmitPolicy::AllowDeferred,
5156             submitQueueSerial));
5157     }
5158 
5159     return angle::Result::Continue;
5160 }
5161 
5162 template <VkFormatFeatureFlags VkFormatProperties::*features>
getFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5163 VkFormatFeatureFlags RendererVk::getFormatFeatureBits(angle::FormatID formatID,
5164                                                       const VkFormatFeatureFlags featureBits) const
5165 {
5166     ASSERT(formatID != angle::FormatID::NONE);
5167     VkFormatProperties &deviceProperties = mFormatProperties[formatID];
5168 
5169     if (deviceProperties.bufferFeatures == kInvalidFormatFeatureFlags)
5170     {
5171         // If we don't have the actual device features, see if the requested features are mandatory.
5172         // If so, there's no need to query the device.
5173         const VkFormatProperties &mandatoryProperties = vk::GetMandatoryFormatSupport(formatID);
5174         if (IsMaskFlagSet(mandatoryProperties.*features, featureBits))
5175         {
5176             return featureBits;
5177         }
5178 
5179         VkFormat vkFormat = vk::GetVkFormatFromFormatID(formatID);
5180         ASSERT(vkFormat != VK_FORMAT_UNDEFINED);
5181 
5182         // Otherwise query the format features and cache it.
5183         vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, vkFormat, &deviceProperties);
5184         // Workaround for some Android devices that don't indicate filtering
5185         // support on D16_UNORM and they should.
5186         if (mFeatures.forceD16TexFilter.enabled && vkFormat == VK_FORMAT_D16_UNORM)
5187         {
5188             deviceProperties.*features |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
5189         }
5190     }
5191 
5192     return deviceProperties.*features & featureBits;
5193 }
5194 
5195 template <VkFormatFeatureFlags VkFormatProperties::*features>
hasFormatFeatureBits(angle::FormatID formatID,const VkFormatFeatureFlags featureBits) const5196 bool RendererVk::hasFormatFeatureBits(angle::FormatID formatID,
5197                                       const VkFormatFeatureFlags featureBits) const
5198 {
5199     return IsMaskFlagSet(getFormatFeatureBits<features>(formatID, featureBits), featureBits);
5200 }
5201 
haveSameFormatFeatureBits(angle::FormatID formatID1,angle::FormatID formatID2) const5202 bool RendererVk::haveSameFormatFeatureBits(angle::FormatID formatID1,
5203                                            angle::FormatID formatID2) const
5204 {
5205     if (formatID1 == angle::FormatID::NONE || formatID2 == angle::FormatID::NONE)
5206     {
5207         return false;
5208     }
5209 
5210     constexpr VkFormatFeatureFlags kImageUsageFeatureBits =
5211         VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT |
5212         VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
5213 
5214     VkFormatFeatureFlags fmt1LinearFeatureBits =
5215         getLinearImageFormatFeatureBits(formatID1, kImageUsageFeatureBits);
5216     VkFormatFeatureFlags fmt1OptimalFeatureBits =
5217         getImageFormatFeatureBits(formatID1, kImageUsageFeatureBits);
5218 
5219     return hasLinearImageFormatFeatureBits(formatID2, fmt1LinearFeatureBits) &&
5220            hasImageFormatFeatureBits(formatID2, fmt1OptimalFeatureBits);
5221 }
5222 
addBufferBlockToOrphanList(vk::BufferBlock * block)5223 void RendererVk::addBufferBlockToOrphanList(vk::BufferBlock *block)
5224 {
5225     std::unique_lock<std::mutex> lock(mGarbageMutex);
5226     mOrphanedBufferBlocks.emplace_back(block);
5227 }
5228 
pruneOrphanedBufferBlocks()5229 void RendererVk::pruneOrphanedBufferBlocks()
5230 {
5231     for (auto iter = mOrphanedBufferBlocks.begin(); iter != mOrphanedBufferBlocks.end();)
5232     {
5233         if (!(*iter)->isEmpty())
5234         {
5235             ++iter;
5236             continue;
5237         }
5238         (*iter)->destroy(this);
5239         iter = mOrphanedBufferBlocks.erase(iter);
5240     }
5241 }
5242 
cleanupGarbage()5243 void RendererVk::cleanupGarbage()
5244 {
5245     std::unique_lock<std::mutex> lock(mGarbageMutex);
5246 
5247     // Clean up general garbages
5248     while (!mSharedGarbage.empty())
5249     {
5250         vk::SharedGarbage &garbage = mSharedGarbage.front();
5251         if (!garbage.destroyIfComplete(this))
5252         {
5253             break;
5254         }
5255         mSharedGarbage.pop();
5256     }
5257 
5258     // Clean up suballocation garbages
5259     VkDeviceSize suballocationBytesDestroyed = 0;
5260     while (!mSuballocationGarbage.empty())
5261     {
5262         vk::SharedBufferSuballocationGarbage &garbage = mSuballocationGarbage.front();
5263         VkDeviceSize garbageSize                      = garbage.getSize();
5264         if (!garbage.destroyIfComplete(this))
5265         {
5266             break;
5267         }
5268         // Actually destroyed.
5269         mSuballocationGarbage.pop();
5270         suballocationBytesDestroyed += garbageSize;
5271     }
5272     mSuballocationGarbageDestroyed += suballocationBytesDestroyed;
5273     mSuballocationGarbageSizeInBytes -= suballocationBytesDestroyed;
5274 
5275     // Note: do this after clean up mSuballocationGarbage so that we will have more chances to find
5276     // orphaned blocks being empty.
5277     if (!mOrphanedBufferBlocks.empty())
5278     {
5279         pruneOrphanedBufferBlocks();
5280     }
5281 
5282     // Cache the value with atomic variable for access without mGarbageMutex lock.
5283     mSuballocationGarbageSizeInBytesCachedAtomic.store(mSuballocationGarbageSizeInBytes,
5284                                                        std::memory_order_release);
5285 }
5286 
cleanupPendingSubmissionGarbage()5287 void RendererVk::cleanupPendingSubmissionGarbage()
5288 {
5289     std::unique_lock<std::mutex> lock(mGarbageMutex);
5290 
5291     // Check if pending garbage is still pending. If not, move them to the garbage list.
5292     vk::SharedGarbageList pendingGarbage;
5293     while (!mPendingSubmissionGarbage.empty())
5294     {
5295         vk::SharedGarbage &garbage = mPendingSubmissionGarbage.front();
5296         if (garbage.hasResourceUseSubmitted(this))
5297         {
5298             mSharedGarbage.push(std::move(garbage));
5299         }
5300         else
5301         {
5302             pendingGarbage.push(std::move(garbage));
5303         }
5304         mPendingSubmissionGarbage.pop();
5305     }
5306     if (!pendingGarbage.empty())
5307     {
5308         mPendingSubmissionGarbage = std::move(pendingGarbage);
5309     }
5310 
5311     vk::SharedBufferSuballocationGarbageList pendingSuballocationGarbage;
5312     while (!mPendingSubmissionSuballocationGarbage.empty())
5313     {
5314         vk::SharedBufferSuballocationGarbage &suballocationGarbage =
5315             mPendingSubmissionSuballocationGarbage.front();
5316         if (suballocationGarbage.hasResourceUseSubmitted(this))
5317         {
5318             mSuballocationGarbageSizeInBytes += suballocationGarbage.getSize();
5319             mSuballocationGarbage.push(std::move(suballocationGarbage));
5320         }
5321         else
5322         {
5323             pendingSuballocationGarbage.push(std::move(suballocationGarbage));
5324         }
5325         mPendingSubmissionSuballocationGarbage.pop();
5326     }
5327     if (!pendingSuballocationGarbage.empty())
5328     {
5329         mPendingSubmissionSuballocationGarbage = std::move(pendingSuballocationGarbage);
5330     }
5331 }
5332 
onNewValidationMessage(const std::string & message)5333 void RendererVk::onNewValidationMessage(const std::string &message)
5334 {
5335     mLastValidationMessage = message;
5336     ++mValidationMessageCount;
5337 }
5338 
onFramebufferFetchUsed()5339 void RendererVk::onFramebufferFetchUsed()
5340 {
5341     mIsFramebufferFetchUsed = true;
5342 }
5343 
getAndClearLastValidationMessage(uint32_t * countSinceLastClear)5344 std::string RendererVk::getAndClearLastValidationMessage(uint32_t *countSinceLastClear)
5345 {
5346     *countSinceLastClear    = mValidationMessageCount;
5347     mValidationMessageCount = 0;
5348 
5349     return std::move(mLastValidationMessage);
5350 }
5351 
getMaxFenceWaitTimeNs() const5352 uint64_t RendererVk::getMaxFenceWaitTimeNs() const
5353 {
5354     constexpr uint64_t kMaxFenceWaitTimeNs = std::numeric_limits<uint64_t>::max();
5355 
5356     return kMaxFenceWaitTimeNs;
5357 }
5358 
setGlobalDebugAnnotator()5359 void RendererVk::setGlobalDebugAnnotator()
5360 {
5361     // Install one of two DebugAnnotator classes:
5362     //
5363     // 1) The global class enables basic ANGLE debug functionality (e.g. Vulkan validation errors
5364     //    will cause dEQP tests to fail).
5365     //
5366     // 2) The DebugAnnotatorVk class processes OpenGL ES commands that the application uses.  It is
5367     //    installed for the following purposes:
5368     //
5369     //    1) To enable calling the vkCmd*DebugUtilsLabelEXT functions in order to communicate to
5370     //       debuggers (e.g. AGI) the OpenGL ES commands that the application uses.  In addition to
5371     //       simply installing DebugAnnotatorVk, also enable calling vkCmd*DebugUtilsLabelEXT.
5372     //
5373     //    2) To enable logging to Android logcat the OpenGL ES commands that the application uses.
5374     bool installDebugAnnotatorVk = false;
5375 
5376     // Enable calling the vkCmd*DebugUtilsLabelEXT functions if the vkCmd*DebugUtilsLabelEXT
5377     // functions exist, and if the kEnableDebugMarkersVarName environment variable is set.
5378     if (vkCmdBeginDebugUtilsLabelEXT)
5379     {
5380         // Use the GetAndSet variant to improve future lookup times
5381         std::string enabled = angle::GetAndSetEnvironmentVarOrUnCachedAndroidProperty(
5382             kEnableDebugMarkersVarName, kEnableDebugMarkersPropertyName);
5383         if (!enabled.empty() && enabled.compare("0") != 0)
5384         {
5385             mAngleDebuggerMode      = true;
5386             installDebugAnnotatorVk = true;
5387         }
5388     }
5389 #if defined(ANGLE_ENABLE_TRACE_ANDROID_LOGCAT)
5390     // Only install DebugAnnotatorVk to log all API commands to Android's logcat.
5391     installDebugAnnotatorVk = true;
5392 #endif
5393 
5394     {
5395         std::unique_lock<std::mutex> lock(gl::GetDebugMutex());
5396         if (installDebugAnnotatorVk)
5397         {
5398             gl::InitializeDebugAnnotations(&mAnnotator);
5399         }
5400         else
5401         {
5402             mDisplay->setGlobalDebugAnnotator();
5403         }
5404     }
5405 }
5406 
reloadVolkIfNeeded() const5407 void RendererVk::reloadVolkIfNeeded() const
5408 {
5409 #if defined(ANGLE_SHARED_LIBVULKAN)
5410     if ((mInstance != VK_NULL_HANDLE) && (volkGetLoadedInstance() != mInstance))
5411     {
5412         volkLoadInstance(mInstance);
5413     }
5414 
5415     if ((mDevice != VK_NULL_HANDLE) && (volkGetLoadedDevice() != mDevice))
5416     {
5417         volkLoadDevice(mDevice);
5418     }
5419 
5420     initializeInstanceExtensionEntryPointsFromCore();
5421     initializeDeviceExtensionEntryPointsFromCore();
5422 #endif  // defined(ANGLE_SHARED_LIBVULKAN)
5423 }
5424 
initializeInstanceExtensionEntryPointsFromCore() const5425 void RendererVk::initializeInstanceExtensionEntryPointsFromCore() const
5426 {
5427     if (isVulkan11Instance())
5428     {
5429         InitGetPhysicalDeviceProperties2KHRFunctionsFromCore();
5430         if (mFeatures.supportsExternalFenceCapabilities.enabled)
5431         {
5432             InitExternalFenceCapabilitiesFunctionsFromCore();
5433         }
5434         if (mFeatures.supportsExternalSemaphoreCapabilities.enabled)
5435         {
5436             InitExternalSemaphoreCapabilitiesFunctionsFromCore();
5437         }
5438     }
5439 }
5440 
initializeDeviceExtensionEntryPointsFromCore() const5441 void RendererVk::initializeDeviceExtensionEntryPointsFromCore() const
5442 {
5443     if (isVulkan11Device())
5444     {
5445         if (mFeatures.supportsGetMemoryRequirements2.enabled)
5446         {
5447             InitGetMemoryRequirements2KHRFunctionsFromCore();
5448         }
5449         if (mFeatures.supportsBindMemory2.enabled)
5450         {
5451             InitBindMemory2KHRFunctionsFromCore();
5452         }
5453         if (mFeatures.supportsYUVSamplerConversion.enabled)
5454         {
5455             InitSamplerYcbcrKHRFunctionsFromCore();
5456         }
5457     }
5458 }
5459 
submitCommands(vk::Context * context,vk::ProtectionType protectionType,egl::ContextPriority contextPriority,const vk::Semaphore * signalSemaphore,const vk::SharedExternalFence * externalFence,const QueueSerial & submitQueueSerial)5460 angle::Result RendererVk::submitCommands(vk::Context *context,
5461                                          vk::ProtectionType protectionType,
5462                                          egl::ContextPriority contextPriority,
5463                                          const vk::Semaphore *signalSemaphore,
5464                                          const vk::SharedExternalFence *externalFence,
5465                                          const QueueSerial &submitQueueSerial)
5466 {
5467     ASSERT(signalSemaphore == nullptr || signalSemaphore->valid());
5468     const VkSemaphore signalVkSemaphore =
5469         signalSemaphore ? signalSemaphore->getHandle() : VK_NULL_HANDLE;
5470 
5471     vk::SharedExternalFence externalFenceCopy;
5472     if (externalFence != nullptr)
5473     {
5474         externalFenceCopy = *externalFence;
5475     }
5476 
5477     if (isAsyncCommandQueueEnabled())
5478     {
5479         ANGLE_TRY(mCommandProcessor.enqueueSubmitCommands(
5480             context, protectionType, contextPriority, signalVkSemaphore,
5481             std::move(externalFenceCopy), submitQueueSerial));
5482     }
5483     else
5484     {
5485         ANGLE_TRY(mCommandQueue.submitCommands(context, protectionType, contextPriority,
5486                                                signalVkSemaphore, std::move(externalFenceCopy),
5487                                                submitQueueSerial));
5488     }
5489 
5490     ANGLE_TRY(mCommandQueue.postSubmitCheck(context));
5491 
5492     return angle::Result::Continue;
5493 }
5494 
submitPriorityDependency(vk::Context * context,vk::ProtectionTypes protectionTypes,egl::ContextPriority srcContextPriority,egl::ContextPriority dstContextPriority,SerialIndex index)5495 angle::Result RendererVk::submitPriorityDependency(vk::Context *context,
5496                                                    vk::ProtectionTypes protectionTypes,
5497                                                    egl::ContextPriority srcContextPriority,
5498                                                    egl::ContextPriority dstContextPriority,
5499                                                    SerialIndex index)
5500 {
5501     vk::RendererScoped<vk::ReleasableResource<vk::Semaphore>> semaphore(this);
5502     ANGLE_VK_TRY(context, semaphore.get().get().init(mDevice));
5503 
5504     // First, submit already flushed commands / wait semaphores into the source Priority VkQueue.
5505     // Commands that are in the Secondary Command Buffers will be flushed into the new VkQueue.
5506 
5507     // Submit commands and attach Signal Semaphore.
5508     ASSERT(protectionTypes.any());
5509     while (protectionTypes.any())
5510     {
5511         vk::ProtectionType protectionType = protectionTypes.first();
5512         protectionTypes.reset(protectionType);
5513 
5514         QueueSerial queueSerial(index, generateQueueSerial(index));
5515         // Submit semaphore only if this is the last submission (all into the same VkQueue).
5516         const vk::Semaphore *signalSemaphore = nullptr;
5517         if (protectionTypes.none())
5518         {
5519             // Update QueueSerial to collect semaphore using the latest possible queueSerial.
5520             semaphore.get().setQueueSerial(queueSerial);
5521             signalSemaphore = &semaphore.get().get();
5522         }
5523         ANGLE_TRY(submitCommands(context, protectionType, srcContextPriority, signalSemaphore,
5524                                  nullptr, queueSerial));
5525     }
5526 
5527     // Submit only Wait Semaphore into the destination Priority (VkQueue).
5528     QueueSerial queueSerial(index, generateQueueSerial(index));
5529     semaphore.get().setQueueSerial(queueSerial);
5530     ANGLE_TRY(queueSubmitWaitSemaphore(context, dstContextPriority, semaphore.get().get(),
5531                                        VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, queueSerial));
5532 
5533     return angle::Result::Continue;
5534 }
5535 
handleDeviceLost()5536 void RendererVk::handleDeviceLost()
5537 {
5538     if (isAsyncCommandQueueEnabled())
5539     {
5540         mCommandProcessor.handleDeviceLost(this);
5541     }
5542     else
5543     {
5544         mCommandQueue.handleDeviceLost(this);
5545     }
5546 }
5547 
finishResourceUse(vk::Context * context,const vk::ResourceUse & use)5548 angle::Result RendererVk::finishResourceUse(vk::Context *context, const vk::ResourceUse &use)
5549 {
5550     if (isAsyncCommandQueueEnabled())
5551     {
5552         ANGLE_TRY(mCommandProcessor.waitForResourceUseToBeSubmitted(context, use));
5553     }
5554     return mCommandQueue.finishResourceUse(context, use, getMaxFenceWaitTimeNs());
5555 }
5556 
finishQueueSerial(vk::Context * context,const QueueSerial & queueSerial)5557 angle::Result RendererVk::finishQueueSerial(vk::Context *context, const QueueSerial &queueSerial)
5558 {
5559     ASSERT(queueSerial.valid());
5560     if (isAsyncCommandQueueEnabled())
5561     {
5562         ANGLE_TRY(mCommandProcessor.waitForQueueSerialToBeSubmitted(context, queueSerial));
5563     }
5564     return mCommandQueue.finishQueueSerial(context, queueSerial, getMaxFenceWaitTimeNs());
5565 }
5566 
waitForResourceUseToFinishWithUserTimeout(vk::Context * context,const vk::ResourceUse & use,uint64_t timeout,VkResult * result)5567 angle::Result RendererVk::waitForResourceUseToFinishWithUserTimeout(vk::Context *context,
5568                                                                     const vk::ResourceUse &use,
5569                                                                     uint64_t timeout,
5570                                                                     VkResult *result)
5571 {
5572     ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::waitForResourceUseToFinishWithUserTimeout");
5573     if (isAsyncCommandQueueEnabled())
5574     {
5575         ANGLE_TRY(mCommandProcessor.waitForResourceUseToBeSubmitted(context, use));
5576     }
5577     return mCommandQueue.waitForResourceUseToFinishWithUserTimeout(context, use, timeout, result);
5578 }
5579 
flushWaitSemaphores(vk::ProtectionType protectionType,egl::ContextPriority priority,std::vector<VkSemaphore> && waitSemaphores,std::vector<VkPipelineStageFlags> && waitSemaphoreStageMasks)5580 angle::Result RendererVk::flushWaitSemaphores(
5581     vk::ProtectionType protectionType,
5582     egl::ContextPriority priority,
5583     std::vector<VkSemaphore> &&waitSemaphores,
5584     std::vector<VkPipelineStageFlags> &&waitSemaphoreStageMasks)
5585 {
5586     ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::flushWaitSemaphores");
5587     if (isAsyncCommandQueueEnabled())
5588     {
5589         ANGLE_TRY(mCommandProcessor.enqueueFlushWaitSemaphores(protectionType, priority,
5590                                                                std::move(waitSemaphores),
5591                                                                std::move(waitSemaphoreStageMasks)));
5592     }
5593     else
5594     {
5595         mCommandQueue.flushWaitSemaphores(protectionType, priority, std::move(waitSemaphores),
5596                                           std::move(waitSemaphoreStageMasks));
5597     }
5598 
5599     return angle::Result::Continue;
5600 }
5601 
flushRenderPassCommands(vk::Context * context,vk::ProtectionType protectionType,egl::ContextPriority priority,const vk::RenderPass & renderPass,vk::RenderPassCommandBufferHelper ** renderPassCommands)5602 angle::Result RendererVk::flushRenderPassCommands(
5603     vk::Context *context,
5604     vk::ProtectionType protectionType,
5605     egl::ContextPriority priority,
5606     const vk::RenderPass &renderPass,
5607     vk::RenderPassCommandBufferHelper **renderPassCommands)
5608 {
5609     ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::flushRenderPassCommands");
5610     if (isAsyncCommandQueueEnabled())
5611     {
5612         ANGLE_TRY(mCommandProcessor.enqueueFlushRenderPassCommands(
5613             context, protectionType, priority, renderPass, renderPassCommands));
5614     }
5615     else
5616     {
5617         ANGLE_TRY(mCommandQueue.flushRenderPassCommands(context, protectionType, priority,
5618                                                         renderPass, renderPassCommands));
5619     }
5620 
5621     return angle::Result::Continue;
5622 }
5623 
flushOutsideRPCommands(vk::Context * context,vk::ProtectionType protectionType,egl::ContextPriority priority,vk::OutsideRenderPassCommandBufferHelper ** outsideRPCommands)5624 angle::Result RendererVk::flushOutsideRPCommands(
5625     vk::Context *context,
5626     vk::ProtectionType protectionType,
5627     egl::ContextPriority priority,
5628     vk::OutsideRenderPassCommandBufferHelper **outsideRPCommands)
5629 {
5630     ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::flushOutsideRPCommands");
5631     if (isAsyncCommandQueueEnabled())
5632     {
5633         ANGLE_TRY(mCommandProcessor.enqueueFlushOutsideRPCommands(context, protectionType, priority,
5634                                                                   outsideRPCommands));
5635     }
5636     else
5637     {
5638         ANGLE_TRY(mCommandQueue.flushOutsideRPCommands(context, protectionType, priority,
5639                                                        outsideRPCommands));
5640     }
5641 
5642     return angle::Result::Continue;
5643 }
5644 
queuePresent(vk::Context * context,egl::ContextPriority priority,const VkPresentInfoKHR & presentInfo,vk::SwapchainStatus * swapchainStatus)5645 void RendererVk::queuePresent(vk::Context *context,
5646                               egl::ContextPriority priority,
5647                               const VkPresentInfoKHR &presentInfo,
5648                               vk::SwapchainStatus *swapchainStatus)
5649 {
5650     if (isAsyncCommandQueueEnabled())
5651     {
5652         mCommandProcessor.enqueuePresent(priority, presentInfo, swapchainStatus);
5653         // lastPresentResult should always VK_SUCCESS when isPending is true
5654         ASSERT(!swapchainStatus->isPending || swapchainStatus->lastPresentResult == VK_SUCCESS);
5655     }
5656     else
5657     {
5658         mCommandQueue.queuePresent(priority, presentInfo, swapchainStatus);
5659         ASSERT(!swapchainStatus->isPending);
5660     }
5661 
5662     if (getFeatures().logMemoryReportStats.enabled)
5663     {
5664         mMemoryReport.logMemoryReportStats();
5665     }
5666 }
5667 
5668 template <typename CommandBufferHelperT, typename RecyclerT>
getCommandBufferImpl(vk::Context * context,vk::SecondaryCommandPool * commandPool,vk::SecondaryCommandMemoryAllocator * commandsAllocator,RecyclerT * recycler,CommandBufferHelperT ** commandBufferHelperOut)5669 angle::Result RendererVk::getCommandBufferImpl(
5670     vk::Context *context,
5671     vk::SecondaryCommandPool *commandPool,
5672     vk::SecondaryCommandMemoryAllocator *commandsAllocator,
5673     RecyclerT *recycler,
5674     CommandBufferHelperT **commandBufferHelperOut)
5675 {
5676     return recycler->getCommandBufferHelper(context, commandPool, commandsAllocator,
5677                                             commandBufferHelperOut);
5678 }
5679 
getOutsideRenderPassCommandBufferHelper(vk::Context * context,vk::SecondaryCommandPool * commandPool,vk::SecondaryCommandMemoryAllocator * commandsAllocator,vk::OutsideRenderPassCommandBufferHelper ** commandBufferHelperOut)5680 angle::Result RendererVk::getOutsideRenderPassCommandBufferHelper(
5681     vk::Context *context,
5682     vk::SecondaryCommandPool *commandPool,
5683     vk::SecondaryCommandMemoryAllocator *commandsAllocator,
5684     vk::OutsideRenderPassCommandBufferHelper **commandBufferHelperOut)
5685 {
5686     ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::getOutsideRenderPassCommandBufferHelper");
5687     return getCommandBufferImpl(context, commandPool, commandsAllocator,
5688                                 &mOutsideRenderPassCommandBufferRecycler, commandBufferHelperOut);
5689 }
5690 
getRenderPassCommandBufferHelper(vk::Context * context,vk::SecondaryCommandPool * commandPool,vk::SecondaryCommandMemoryAllocator * commandsAllocator,vk::RenderPassCommandBufferHelper ** commandBufferHelperOut)5691 angle::Result RendererVk::getRenderPassCommandBufferHelper(
5692     vk::Context *context,
5693     vk::SecondaryCommandPool *commandPool,
5694     vk::SecondaryCommandMemoryAllocator *commandsAllocator,
5695     vk::RenderPassCommandBufferHelper **commandBufferHelperOut)
5696 {
5697     ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::getRenderPassCommandBufferHelper");
5698     return getCommandBufferImpl(context, commandPool, commandsAllocator,
5699                                 &mRenderPassCommandBufferRecycler, commandBufferHelperOut);
5700 }
5701 
recycleOutsideRenderPassCommandBufferHelper(vk::OutsideRenderPassCommandBufferHelper ** commandBuffer)5702 void RendererVk::recycleOutsideRenderPassCommandBufferHelper(
5703     vk::OutsideRenderPassCommandBufferHelper **commandBuffer)
5704 {
5705     ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::recycleOutsideRenderPassCommandBufferHelper");
5706     mOutsideRenderPassCommandBufferRecycler.recycleCommandBufferHelper(commandBuffer);
5707 }
5708 
recycleRenderPassCommandBufferHelper(vk::RenderPassCommandBufferHelper ** commandBuffer)5709 void RendererVk::recycleRenderPassCommandBufferHelper(
5710     vk::RenderPassCommandBufferHelper **commandBuffer)
5711 {
5712     ANGLE_TRACE_EVENT0("gpu.angle", "RendererVk::recycleRenderPassCommandBufferHelper");
5713     mRenderPassCommandBufferRecycler.recycleCommandBufferHelper(commandBuffer);
5714 }
5715 
logCacheStats() const5716 void RendererVk::logCacheStats() const
5717 {
5718     if (!vk::kOutputCumulativePerfCounters)
5719     {
5720         return;
5721     }
5722 
5723     std::unique_lock<std::mutex> localLock(mCacheStatsMutex);
5724 
5725     int cacheType = 0;
5726     INFO() << "Vulkan object cache hit ratios: ";
5727     for (const CacheStats &stats : mVulkanCacheStats)
5728     {
5729         INFO() << "    CacheType " << cacheType++ << ": " << stats.getHitRatio();
5730     }
5731 }
5732 
getFormatDescriptorCountForVkFormat(ContextVk * contextVk,VkFormat format,uint32_t * descriptorCountOut)5733 angle::Result RendererVk::getFormatDescriptorCountForVkFormat(ContextVk *contextVk,
5734                                                               VkFormat format,
5735                                                               uint32_t *descriptorCountOut)
5736 {
5737     if (mVkFormatDescriptorCountMap.count(format) == 0)
5738     {
5739         // Query device for descriptor count with basic values for most of
5740         // VkPhysicalDeviceImageFormatInfo2 members.
5741         VkPhysicalDeviceImageFormatInfo2 imageFormatInfo = {};
5742         imageFormatInfo.sType  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
5743         imageFormatInfo.format = format;
5744         imageFormatInfo.type   = VK_IMAGE_TYPE_2D;
5745         imageFormatInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
5746         imageFormatInfo.usage  = VK_IMAGE_USAGE_SAMPLED_BIT;
5747         imageFormatInfo.flags  = 0;
5748 
5749         VkImageFormatProperties imageFormatProperties                            = {};
5750         VkSamplerYcbcrConversionImageFormatProperties ycbcrImageFormatProperties = {};
5751         ycbcrImageFormatProperties.sType =
5752             VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
5753 
5754         VkImageFormatProperties2 imageFormatProperties2 = {};
5755         imageFormatProperties2.sType                 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2;
5756         imageFormatProperties2.pNext                 = &ycbcrImageFormatProperties;
5757         imageFormatProperties2.imageFormatProperties = imageFormatProperties;
5758 
5759         ANGLE_VK_TRY(contextVk, vkGetPhysicalDeviceImageFormatProperties2(
5760                                     mPhysicalDevice, &imageFormatInfo, &imageFormatProperties2));
5761 
5762         mVkFormatDescriptorCountMap[format] =
5763             ycbcrImageFormatProperties.combinedImageSamplerDescriptorCount;
5764     }
5765 
5766     ASSERT(descriptorCountOut);
5767     *descriptorCountOut = mVkFormatDescriptorCountMap[format];
5768     return angle::Result::Continue;
5769 }
5770 
getFormatDescriptorCountForExternalFormat(ContextVk * contextVk,uint64_t format,uint32_t * descriptorCountOut)5771 angle::Result RendererVk::getFormatDescriptorCountForExternalFormat(ContextVk *contextVk,
5772                                                                     uint64_t format,
5773                                                                     uint32_t *descriptorCountOut)
5774 {
5775     ASSERT(descriptorCountOut);
5776 
5777     // TODO: need to query for external formats as well once spec is fixed. http://anglebug.com/6141
5778     ANGLE_VK_CHECK(contextVk, getFeatures().useMultipleDescriptorsForExternalFormats.enabled,
5779                    VK_ERROR_INCOMPATIBLE_DRIVER);
5780 
5781     // Vulkan spec has a gap in that there is no mechanism available to query the immutable
5782     // sampler descriptor count of an external format. For now, return a default value.
5783     constexpr uint32_t kExternalFormatDefaultDescriptorCount = 4;
5784     *descriptorCountOut = kExternalFormatDefaultDescriptorCount;
5785     return angle::Result::Continue;
5786 }
5787 
onAllocateHandle(vk::HandleType handleType)5788 void RendererVk::onAllocateHandle(vk::HandleType handleType)
5789 {
5790     std::unique_lock<std::mutex> localLock(mActiveHandleCountsMutex);
5791     mActiveHandleCounts.onAllocate(handleType);
5792 }
5793 
onDeallocateHandle(vk::HandleType handleType)5794 void RendererVk::onDeallocateHandle(vk::HandleType handleType)
5795 {
5796     std::unique_lock<std::mutex> localLock(mActiveHandleCountsMutex);
5797     mActiveHandleCounts.onDeallocate(handleType);
5798 }
5799 
getPreferedBufferBlockSize(uint32_t memoryTypeIndex) const5800 VkDeviceSize RendererVk::getPreferedBufferBlockSize(uint32_t memoryTypeIndex) const
5801 {
5802     // Try not to exceed 1/64 of heap size to begin with.
5803     const VkDeviceSize heapSize = getMemoryProperties().getHeapSizeForMemoryType(memoryTypeIndex);
5804     return std::min(heapSize / 64, mPreferredLargeHeapBlockSize);
5805 }
5806 
allocateScopedQueueSerialIndex(vk::ScopedQueueSerialIndex * indexOut)5807 angle::Result RendererVk::allocateScopedQueueSerialIndex(vk::ScopedQueueSerialIndex *indexOut)
5808 {
5809     SerialIndex index;
5810     ANGLE_TRY(allocateQueueSerialIndex(&index));
5811     indexOut->init(index, &mQueueSerialIndexAllocator);
5812     return angle::Result::Continue;
5813 }
5814 
allocateQueueSerialIndex(SerialIndex * serialIndexOut)5815 angle::Result RendererVk::allocateQueueSerialIndex(SerialIndex *serialIndexOut)
5816 {
5817     *serialIndexOut = mQueueSerialIndexAllocator.allocate();
5818     if (*serialIndexOut == kInvalidQueueSerialIndex)
5819     {
5820         return angle::Result::Stop;
5821     }
5822     return angle::Result::Continue;
5823 }
5824 
releaseQueueSerialIndex(SerialIndex index)5825 void RendererVk::releaseQueueSerialIndex(SerialIndex index)
5826 {
5827     mQueueSerialIndexAllocator.release(index);
5828 }
5829 
finishOneCommandBatchAndCleanup(vk::Context * context,bool * anyBatchCleaned)5830 angle::Result RendererVk::finishOneCommandBatchAndCleanup(vk::Context *context,
5831                                                           bool *anyBatchCleaned)
5832 {
5833     return mCommandQueue.finishOneCommandBatchAndCleanup(context, getMaxFenceWaitTimeNs(),
5834                                                          anyBatchCleaned);
5835 }
5836 
5837 // static
GetVulkanObjectTypeName(VkObjectType type)5838 const char *RendererVk::GetVulkanObjectTypeName(VkObjectType type)
5839 {
5840     return GetVkObjectTypeName(type);
5841 }
5842 
5843 namespace vk
5844 {
ImageMemorySuballocator()5845 ImageMemorySuballocator::ImageMemorySuballocator() {}
~ImageMemorySuballocator()5846 ImageMemorySuballocator::~ImageMemorySuballocator() {}
5847 
destroy(RendererVk * renderer)5848 void ImageMemorySuballocator::destroy(RendererVk *renderer) {}
5849 
allocateAndBindMemory(Context * context,Image * image,const VkImageCreateInfo * imageCreateInfo,VkMemoryPropertyFlags requiredFlags,VkMemoryPropertyFlags preferredFlags,MemoryAllocationType memoryAllocationType,Allocation * allocationOut,VkMemoryPropertyFlags * memoryFlagsOut,uint32_t * memoryTypeIndexOut,VkDeviceSize * sizeOut)5850 VkResult ImageMemorySuballocator::allocateAndBindMemory(Context *context,
5851                                                         Image *image,
5852                                                         const VkImageCreateInfo *imageCreateInfo,
5853                                                         VkMemoryPropertyFlags requiredFlags,
5854                                                         VkMemoryPropertyFlags preferredFlags,
5855                                                         MemoryAllocationType memoryAllocationType,
5856                                                         Allocation *allocationOut,
5857                                                         VkMemoryPropertyFlags *memoryFlagsOut,
5858                                                         uint32_t *memoryTypeIndexOut,
5859                                                         VkDeviceSize *sizeOut)
5860 {
5861     ASSERT(image && image->valid());
5862     ASSERT(allocationOut && !allocationOut->valid());
5863     RendererVk *renderer       = context->getRenderer();
5864     const Allocator &allocator = renderer->getAllocator();
5865 
5866     VkMemoryRequirements memoryRequirements;
5867     image->getMemoryRequirements(renderer->getDevice(), &memoryRequirements);
5868     bool allocateDedicatedMemory =
5869         memoryRequirements.size >= kImageSizeThresholdForDedicatedMemoryAllocation;
5870 
5871     // Allocate and bind memory for the image. Try allocating on the device first. If unsuccessful,
5872     // it is possible to retry allocation after cleaning the garbage.
5873     VkResult result;
5874     bool anyBatchCleaned             = false;
5875     uint32_t batchesWaitedAndCleaned = 0;
5876 
5877     do
5878     {
5879         result = vma::AllocateAndBindMemoryForImage(
5880             allocator.getHandle(), &image->mHandle, requiredFlags, preferredFlags,
5881             allocateDedicatedMemory, &allocationOut->mHandle, memoryTypeIndexOut, sizeOut);
5882 
5883         if (result != VK_SUCCESS)
5884         {
5885             // If there is an error in command batch finish, a device OOM error will be returned.
5886             if (renderer->finishOneCommandBatchAndCleanup(context, &anyBatchCleaned) ==
5887                 angle::Result::Stop)
5888             {
5889                 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5890             }
5891 
5892             if (anyBatchCleaned)
5893             {
5894                 batchesWaitedAndCleaned++;
5895             }
5896         }
5897     } while (result != VK_SUCCESS && anyBatchCleaned);
5898 
5899     if (batchesWaitedAndCleaned > 0)
5900     {
5901         INFO() << "Initial allocation failed. Waited for " << batchesWaitedAndCleaned
5902                << " commands to finish and free garbage | Allocation result: "
5903                << ((result == VK_SUCCESS) ? "SUCCESS" : "FAIL");
5904     }
5905 
5906     // If there is still no space for the new allocation, the allocation may still be made outside
5907     // the device, although it will result in performance penalty.
5908     if (result != VK_SUCCESS)
5909     {
5910         requiredFlags &= (~VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
5911         result = vma::AllocateAndBindMemoryForImage(
5912             allocator.getHandle(), &image->mHandle, requiredFlags, preferredFlags,
5913             allocateDedicatedMemory, &allocationOut->mHandle, memoryTypeIndexOut, sizeOut);
5914 
5915         INFO()
5916             << "Allocation failed. Removed the DEVICE_LOCAL bit requirement | Allocation result: "
5917             << ((result == VK_SUCCESS) ? "SUCCESS" : "FAIL");
5918     }
5919 
5920     // At the end, if all available options fail, we should return the appropriate out-of-memory
5921     // error.
5922     if (result != VK_SUCCESS)
5923     {
5924         // Record the failed memory allocation.
5925         uint32_t pendingMemoryTypeIndex;
5926         if (vma::FindMemoryTypeIndexForImageInfo(
5927                 allocator.getHandle(), imageCreateInfo, requiredFlags, preferredFlags,
5928                 allocateDedicatedMemory, &pendingMemoryTypeIndex) == VK_SUCCESS)
5929         {
5930             renderer->getMemoryAllocationTracker()->setPendingMemoryAlloc(
5931                 memoryAllocationType, memoryRequirements.size, pendingMemoryTypeIndex);
5932         }
5933 
5934         return result;
5935     }
5936 
5937     // We need to get the property flags of the allocated memory.
5938     *memoryFlagsOut =
5939         renderer->getMemoryProperties().getMemoryType(*memoryTypeIndexOut).propertyFlags;
5940     if ((~(*memoryFlagsOut) & preferredFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
5941     {
5942         // For images allocated here, although allocation is preferred on the device, it is not
5943         // required.
5944         ASSERT((requiredFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0);
5945         renderer->getMemoryAllocationTracker()->compareExpectedFlagsWithAllocatedFlags(
5946             requiredFlags, preferredFlags, *memoryFlagsOut,
5947             reinterpret_cast<void *>(allocationOut->getHandle()));
5948         context->getPerfCounters().deviceMemoryImageAllocationFallbacks++;
5949     }
5950 
5951     renderer->onMemoryAlloc(memoryAllocationType, *sizeOut, *memoryTypeIndexOut,
5952                             allocationOut->getHandle());
5953     return VK_SUCCESS;
5954 }
5955 
mapMemoryAndInitWithNonZeroValue(RendererVk * renderer,Allocation * allocation,VkDeviceSize size,int value,VkMemoryPropertyFlags flags)5956 VkResult ImageMemorySuballocator::mapMemoryAndInitWithNonZeroValue(RendererVk *renderer,
5957                                                                    Allocation *allocation,
5958                                                                    VkDeviceSize size,
5959                                                                    int value,
5960                                                                    VkMemoryPropertyFlags flags)
5961 {
5962     ASSERT(allocation && allocation->valid());
5963     const Allocator &allocator = renderer->getAllocator();
5964 
5965     void *mappedMemoryData;
5966     VkResult result = vma::MapMemory(allocator.getHandle(), allocation->mHandle, &mappedMemoryData);
5967     if (result != VK_SUCCESS)
5968     {
5969         return result;
5970     }
5971 
5972     memset(mappedMemoryData, value, static_cast<size_t>(size));
5973     vma::UnmapMemory(allocator.getHandle(), allocation->mHandle);
5974 
5975     // If the memory type is not host coherent, we perform an explicit flush.
5976     if ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
5977     {
5978         vma::FlushAllocation(allocator.getHandle(), allocation->mHandle, 0, VK_WHOLE_SIZE);
5979     }
5980 
5981     return VK_SUCCESS;
5982 }
5983 
5984 }  // namespace vk
5985 }  // namespace rx
5986